@platforma-open/milaboratories.vj-usage.model 2.1.4 → 2.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +13 -28
- package/.turbo/turbo-lint.log +5 -0
- package/.turbo/turbo-type-check.log +6 -0
- package/CHANGELOG.md +12 -0
- package/dist/bundle.js +8252 -6953
- package/dist/bundle.js.map +1 -1
- package/dist/index.cjs +82 -73
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +22 -18
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +82 -74
- package/dist/index.js.map +1 -1
- package/dist/label.cjs +19 -0
- package/dist/label.cjs.map +1 -0
- package/dist/label.d.ts +7 -0
- package/dist/label.d.ts.map +1 -0
- package/dist/label.js +17 -0
- package/dist/label.js.map +1 -0
- package/dist/model.json +1 -1
- package/package.json +24 -27
- package/src/index.ts +15 -5
- package/src/label.ts +23 -0
- package/tsconfig.json +2 -14
- package/vitest.config.mts +8 -0
- package/dist/index.d.cts +0 -55
- package/vite.config.mts +0 -20
package/dist/model.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"v3":{"sdkVersion":"1.45.0","renderingMode":"Heavy","initialArgs":{"scChain":"A","allele":false},"initialUiState":{"blockTitle":"V/J Usage","weightedFlag":true,"vUsagePlotState":{"title":"V Usage","template":"heatmapClustered","currentTab":"settings","layersSettings":{"heatmapClustered":{"normalizationDirection":null}}},"jUsagePlotState":{"title":"V Usage","template":"heatmapClustered","currentTab":null,"layersSettings":{"heatmapClustered":{"normalizationDirection":null}}},"vjUsagePlotState":{"title":"V/J Usage","template":"heatmapClustered","currentTab":null,"layersSettings":{"heatmapClustered":{"normalizationDirection":null}}}},"inputsValid":{"__renderLambda":true,"handle":"inputsValid"},"sections":{"__renderLambda":true,"handle":"sections"},"title":{"__renderLambda":true,"handle":"title"},"outputs":{"datasetOptions":{"__renderLambda":true,"handle":"output#datasetOptions"},"datasetSpec":{"__renderLambda":true,"handle":"output#datasetSpec"},"pf":{"__renderLambda":true,"handle":"output#pf"},"isRunning":{"__renderLambda":true,"handle":"output#isRunning"}},"featureFlags":{"supportsLazyState":true,"requiresUIAPIVersion":2,"requiresModelAPIVersion":1}},"sdkVersion":"1.45.0","renderingMode":"Heavy","initialArgs":{"scChain":"A","allele":false},"inputsValid":"inputsValid","sections":"sections","outputs":{"datasetOptions":"output#datasetOptions","datasetSpec":"output#datasetSpec","pf":"output#pf","isRunning":"output#isRunning"},"code":{"type":"plain","content":"(function(global, factory) {\n typeof exports === \"object\" && typeof module !== \"undefined\" ? factory(exports) : typeof define === \"function\" && define.amd ? define([\"exports\"], factory) : (global = typeof globalThis !== \"undefined\" ? globalThis : global || self, factory(global.model = {}));\n})(this, function(exports2) {\n \"use strict\";var __defProp = Object.defineProperty;\nvar __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;\nvar __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== \"symbol\" ? key + \"\" : key, value);\n\n function getImmediate(value) {\n return { type: \"Immediate\", value };\n }\n function isInUI() {\n return typeof globalThis.getPlatforma !== \"undefined\" || typeof globalThis.platforma !== \"undefined\";\n }\n function getPlatformaInstance(config) {\n if (config && typeof globalThis.getPlatforma === \"function\")\n return globalThis.getPlatforma(config);\n else if (typeof globalThis.platforma !== \"undefined\")\n return globalThis.platforma;\n else\n throw new Error(\"Can't get platforma instance.\");\n }\n function tryGetCfgRenderCtx() {\n if (typeof globalThis.cfgRenderCtx !== \"undefined\")\n return globalThis.cfgRenderCtx;\n else\n return void 0;\n }\n function getCfgRenderCtx() {\n if (typeof globalThis.cfgRenderCtx !== \"undefined\")\n return globalThis.cfgRenderCtx;\n else\n throw new Error(\"Not in config rendering context\");\n }\n function tryRegisterCallback(key, callback) {\n const ctx = tryGetCfgRenderCtx();\n if (ctx === void 0)\n return false;\n if (key in ctx.callbackRegistry)\n throw new Error(`Callback with key ${key} already registered.`);\n ctx.callbackRegistry[key] = callback;\n return true;\n }\n const futureResolves = /* @__PURE__ */ new Map();\n function registerFutureAwait(handle, onResolve) {\n if (!(handle in getCfgRenderCtx().callbackRegistry)) {\n getCfgRenderCtx().callbackRegistry[handle] = (value) => {\n for (const res of futureResolves.get(handle)) {\n res(value);\n }\n };\n futureResolves.set(handle, []);\n }\n futureResolves.get(handle).push(onResolve);\n }\n var util;\n (function(util2) {\n util2.assertEqual = (val) => val;\n function assertIs(_arg) {\n }\n util2.assertIs = assertIs;\n function assertNever2(_x) {\n throw new Error();\n }\n util2.assertNever = assertNever2;\n util2.arrayToEnum = (items) => {\n const obj = {};\n for (const item of items) {\n obj[item] = item;\n }\n return obj;\n };\n util2.getValidEnumValues = (obj) => {\n const validKeys = util2.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== \"number\");\n const filtered = {};\n for (const k of validKeys) {\n filtered[k] = obj[k];\n }\n return util2.objectValues(filtered);\n };\n util2.objectValues = (obj) => {\n return util2.objectKeys(obj).map(function(e) {\n return obj[e];\n });\n };\n util2.objectKeys = typeof Object.keys === \"function\" ? (obj) => Object.keys(obj) : (object) => {\n const keys = [];\n for (const key in object) {\n if (Object.prototype.hasOwnProperty.call(object, key)) {\n keys.push(key);\n }\n }\n return keys;\n };\n util2.find = (arr, checker) => {\n for (const item of arr) {\n if (checker(item))\n return item;\n }\n return void 0;\n };\n util2.isInteger = typeof Number.isInteger === \"function\" ? (val) => Number.isInteger(val) : (val) => typeof val === \"number\" && isFinite(val) && Math.floor(val) === val;\n function joinValues(array, separator = \" | \") {\n return array.map((val) => typeof val === \"string\" ? `'${val}'` : val).join(separator);\n }\n util2.joinValues = joinValues;\n util2.jsonStringifyReplacer = (_, value) => {\n if (typeof value === \"bigint\") {\n return value.toString();\n }\n return value;\n };\n })(util || (util = {}));\n var objectUtil;\n (function(objectUtil2) {\n objectUtil2.mergeShapes = (first, second) => {\n return {\n ...first,\n ...second\n // second overwrites first\n };\n };\n })(objectUtil || (objectUtil = {}));\n const ZodParsedType = util.arrayToEnum([\n \"string\",\n \"nan\",\n \"number\",\n \"integer\",\n \"float\",\n \"boolean\",\n \"date\",\n \"bigint\",\n \"symbol\",\n \"function\",\n \"undefined\",\n \"null\",\n \"array\",\n \"object\",\n \"unknown\",\n \"promise\",\n \"void\",\n \"never\",\n \"map\",\n \"set\"\n ]);\n const getParsedType = (data) => {\n const t = typeof data;\n switch (t) {\n case \"undefined\":\n return ZodParsedType.undefined;\n case \"string\":\n return ZodParsedType.string;\n case \"number\":\n return isNaN(data) ? ZodParsedType.nan : ZodParsedType.number;\n case \"boolean\":\n return ZodParsedType.boolean;\n case \"function\":\n return ZodParsedType.function;\n case \"bigint\":\n return ZodParsedType.bigint;\n case \"symbol\":\n return ZodParsedType.symbol;\n case \"object\":\n if (Array.isArray(data)) {\n return ZodParsedType.array;\n }\n if (data === null) {\n return ZodParsedType.null;\n }\n if (data.then && typeof data.then === \"function\" && data.catch && typeof data.catch === \"function\") {\n return ZodParsedType.promise;\n }\n if (typeof Map !== \"undefined\" && data instanceof Map) {\n return ZodParsedType.map;\n }\n if (typeof Set !== \"undefined\" && data instanceof Set) {\n return ZodParsedType.set;\n }\n if (typeof Date !== \"undefined\" && data instanceof Date) {\n return ZodParsedType.date;\n }\n return ZodParsedType.object;\n default:\n return ZodParsedType.unknown;\n }\n };\n const ZodIssueCode = util.arrayToEnum([\n \"invalid_type\",\n \"invalid_literal\",\n \"custom\",\n \"invalid_union\",\n \"invalid_union_discriminator\",\n \"invalid_enum_value\",\n \"unrecognized_keys\",\n \"invalid_arguments\",\n \"invalid_return_type\",\n \"invalid_date\",\n \"invalid_string\",\n \"too_small\",\n \"too_big\",\n \"invalid_intersection_types\",\n \"not_multiple_of\",\n \"not_finite\"\n ]);\n const quotelessJson = (obj) => {\n const json = JSON.stringify(obj, null, 2);\n return json.replace(/\"([^\"]+)\":/g, \"$1:\");\n };\n class ZodError extends Error {\n constructor(issues) {\n super();\n this.issues = [];\n this.addIssue = (sub) => {\n this.issues = [...this.issues, sub];\n };\n this.addIssues = (subs = []) => {\n this.issues = [...this.issues, ...subs];\n };\n const actualProto = new.target.prototype;\n if (Object.setPrototypeOf) {\n Object.setPrototypeOf(this, actualProto);\n } else {\n this.__proto__ = actualProto;\n }\n this.name = \"ZodError\";\n this.issues = issues;\n }\n get errors() {\n return this.issues;\n }\n format(_mapper) {\n const mapper = _mapper || function(issue) {\n return issue.message;\n };\n const fieldErrors = { _errors: [] };\n const processError = (error) => {\n for (const issue of error.issues) {\n if (issue.code === \"invalid_union\") {\n issue.unionErrors.map(processError);\n } else if (issue.code === \"invalid_return_type\") {\n processError(issue.returnTypeError);\n } else if (issue.code === \"invalid_arguments\") {\n processError(issue.argumentsError);\n } else if (issue.path.length === 0) {\n fieldErrors._errors.push(mapper(issue));\n } else {\n let curr = fieldErrors;\n let i = 0;\n while (i < issue.path.length) {\n const el = issue.path[i];\n const terminal = i === issue.path.length - 1;\n if (!terminal) {\n curr[el] = curr[el] || { _errors: [] };\n } else {\n curr[el] = curr[el] || { _errors: [] };\n curr[el]._errors.push(mapper(issue));\n }\n curr = curr[el];\n i++;\n }\n }\n }\n };\n processError(this);\n return fieldErrors;\n }\n static assert(value) {\n if (!(value instanceof ZodError)) {\n throw new Error(`Not a ZodError: ${value}`);\n }\n }\n toString() {\n return this.message;\n }\n get message() {\n return JSON.stringify(this.issues, util.jsonStringifyReplacer, 2);\n }\n get isEmpty() {\n return this.issues.length === 0;\n }\n flatten(mapper = (issue) => issue.message) {\n const fieldErrors = {};\n const formErrors = [];\n for (const sub of this.issues) {\n if (sub.path.length > 0) {\n fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || [];\n fieldErrors[sub.path[0]].push(mapper(sub));\n } else {\n formErrors.push(mapper(sub));\n }\n }\n return { formErrors, fieldErrors };\n }\n get formErrors() {\n return this.flatten();\n }\n }\n ZodError.create = (issues) => {\n const error = new ZodError(issues);\n return error;\n };\n const errorMap = (issue, _ctx) => {\n let message;\n switch (issue.code) {\n case ZodIssueCode.invalid_type:\n if (issue.received === ZodParsedType.undefined) {\n message = \"Required\";\n } else {\n message = `Expected ${issue.expected}, received ${issue.received}`;\n }\n break;\n case ZodIssueCode.invalid_literal:\n message = `Invalid literal value, expected ${JSON.stringify(issue.expected, util.jsonStringifyReplacer)}`;\n break;\n case ZodIssueCode.unrecognized_keys:\n message = `Unrecognized key(s) in object: ${util.joinValues(issue.keys, \", \")}`;\n break;\n case ZodIssueCode.invalid_union:\n message = `Invalid input`;\n break;\n case ZodIssueCode.invalid_union_discriminator:\n message = `Invalid discriminator value. Expected ${util.joinValues(issue.options)}`;\n break;\n case ZodIssueCode.invalid_enum_value:\n message = `Invalid enum value. Expected ${util.joinValues(issue.options)}, received '${issue.received}'`;\n break;\n case ZodIssueCode.invalid_arguments:\n message = `Invalid function arguments`;\n break;\n case ZodIssueCode.invalid_return_type:\n message = `Invalid function return type`;\n break;\n case ZodIssueCode.invalid_date:\n message = `Invalid date`;\n break;\n case ZodIssueCode.invalid_string:\n if (typeof issue.validation === \"object\") {\n if (\"includes\" in issue.validation) {\n message = `Invalid input: must include \"${issue.validation.includes}\"`;\n if (typeof issue.validation.position === \"number\") {\n message = `${message} at one or more positions greater than or equal to ${issue.validation.position}`;\n }\n } else if (\"startsWith\" in issue.validation) {\n message = `Invalid input: must start with \"${issue.validation.startsWith}\"`;\n } else if (\"endsWith\" in issue.validation) {\n message = `Invalid input: must end with \"${issue.validation.endsWith}\"`;\n } else {\n util.assertNever(issue.validation);\n }\n } else if (issue.validation !== \"regex\") {\n message = `Invalid ${issue.validation}`;\n } else {\n message = \"Invalid\";\n }\n break;\n case ZodIssueCode.too_small:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.exact ? \"exactly\" : issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.exact ? \"exactly\" : issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be ${issue.exact ? `exactly equal to ` : issue.inclusive ? `greater than or equal to ` : `greater than `}${issue.minimum}`;\n else if (issue.type === \"date\")\n message = `Date must be ${issue.exact ? `exactly equal to ` : issue.inclusive ? `greater than or equal to ` : `greater than `}${new Date(Number(issue.minimum))}`;\n else\n message = \"Invalid input\";\n break;\n case ZodIssueCode.too_big:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be ${issue.exact ? `exactly` : issue.inclusive ? `less than or equal to` : `less than`} ${issue.maximum}`;\n else if (issue.type === \"bigint\")\n message = `BigInt must be ${issue.exact ? `exactly` : issue.inclusive ? `less than or equal to` : `less than`} ${issue.maximum}`;\n else if (issue.type === \"date\")\n message = `Date must be ${issue.exact ? `exactly` : issue.inclusive ? `smaller than or equal to` : `smaller than`} ${new Date(Number(issue.maximum))}`;\n else\n message = \"Invalid input\";\n break;\n case ZodIssueCode.custom:\n message = `Invalid input`;\n break;\n case ZodIssueCode.invalid_intersection_types:\n message = `Intersection results could not be merged`;\n break;\n case ZodIssueCode.not_multiple_of:\n message = `Number must be a multiple of ${issue.multipleOf}`;\n break;\n case ZodIssueCode.not_finite:\n message = \"Number must be finite\";\n break;\n default:\n message = _ctx.defaultError;\n util.assertNever(issue);\n }\n return { message };\n };\n let overrideErrorMap = errorMap;\n function setErrorMap(map) {\n overrideErrorMap = map;\n }\n function getErrorMap() {\n return overrideErrorMap;\n }\n const makeIssue = (params) => {\n const { data, path, errorMaps, issueData } = params;\n const fullPath = [...path, ...issueData.path || []];\n const fullIssue = {\n ...issueData,\n path: fullPath\n };\n if (issueData.message !== void 0) {\n return {\n ...issueData,\n path: fullPath,\n message: issueData.message\n };\n }\n let errorMessage = \"\";\n const maps = errorMaps.filter((m) => !!m).slice().reverse();\n for (const map of maps) {\n errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message;\n }\n return {\n ...issueData,\n path: fullPath,\n message: errorMessage\n };\n };\n const EMPTY_PATH = [];\n function addIssueToContext(ctx, issueData) {\n const overrideMap = getErrorMap();\n const issue = makeIssue({\n issueData,\n data: ctx.data,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n overrideMap,\n overrideMap === errorMap ? void 0 : errorMap\n // then global default map\n ].filter((x) => !!x)\n });\n ctx.common.issues.push(issue);\n }\n class ParseStatus {\n constructor() {\n this.value = \"valid\";\n }\n dirty() {\n if (this.value === \"valid\")\n this.value = \"dirty\";\n }\n abort() {\n if (this.value !== \"aborted\")\n this.value = \"aborted\";\n }\n static mergeArray(status, results) {\n const arrayValue = [];\n for (const s of results) {\n if (s.status === \"aborted\")\n return INVALID;\n if (s.status === \"dirty\")\n status.dirty();\n arrayValue.push(s.value);\n }\n return { status: status.value, value: arrayValue };\n }\n static async mergeObjectAsync(status, pairs) {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n syncPairs.push({\n key,\n value\n });\n }\n return ParseStatus.mergeObjectSync(status, syncPairs);\n }\n static mergeObjectSync(status, pairs) {\n const finalObject = {};\n for (const pair of pairs) {\n const { key, value } = pair;\n if (key.status === \"aborted\")\n return INVALID;\n if (value.status === \"aborted\")\n return INVALID;\n if (key.status === \"dirty\")\n status.dirty();\n if (value.status === \"dirty\")\n status.dirty();\n if (key.value !== \"__proto__\" && (typeof value.value !== \"undefined\" || pair.alwaysSet)) {\n finalObject[key.value] = value.value;\n }\n }\n return { status: status.value, value: finalObject };\n }\n }\n const INVALID = Object.freeze({\n status: \"aborted\"\n });\n const DIRTY = (value) => ({ status: \"dirty\", value });\n const OK = (value) => ({ status: \"valid\", value });\n const isAborted = (x) => x.status === \"aborted\";\n const isDirty = (x) => x.status === \"dirty\";\n const isValid = (x) => x.status === \"valid\";\n const isAsync = (x) => typeof Promise !== \"undefined\" && x instanceof Promise;\n function __classPrivateFieldGet(receiver, state, kind, f) {\n if (typeof state === \"function\" ? receiver !== state || true : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return state.get(receiver);\n }\n function __classPrivateFieldSet(receiver, state, value, kind, f) {\n if (typeof state === \"function\" ? receiver !== state || true : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return state.set(receiver, value), value;\n }\n typeof SuppressedError === \"function\" ? SuppressedError : function(error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n };\n var errorUtil;\n (function(errorUtil2) {\n errorUtil2.errToObj = (message) => typeof message === \"string\" ? { message } : message || {};\n errorUtil2.toString = (message) => typeof message === \"string\" ? message : message === null || message === void 0 ? void 0 : message.message;\n })(errorUtil || (errorUtil = {}));\n var _ZodEnum_cache, _ZodNativeEnum_cache;\n class ParseInputLazyPath {\n constructor(parent, value, path, key) {\n this._cachedPath = [];\n this.parent = parent;\n this.data = value;\n this._path = path;\n this._key = key;\n }\n get path() {\n if (!this._cachedPath.length) {\n if (this._key instanceof Array) {\n this._cachedPath.push(...this._path, ...this._key);\n } else {\n this._cachedPath.push(...this._path, this._key);\n }\n }\n return this._cachedPath;\n }\n }\n const handleResult = (ctx, result) => {\n if (isValid(result)) {\n return { success: true, data: result.value };\n } else {\n if (!ctx.common.issues.length) {\n throw new Error(\"Validation failed but no issues detected.\");\n }\n return {\n success: false,\n get error() {\n if (this._error)\n return this._error;\n const error = new ZodError(ctx.common.issues);\n this._error = error;\n return this._error;\n }\n };\n }\n };\n function processCreateParams(params) {\n if (!params)\n return {};\n const { errorMap: errorMap2, invalid_type_error, required_error, description } = params;\n if (errorMap2 && (invalid_type_error || required_error)) {\n throw new Error(`Can't use \"invalid_type_error\" or \"required_error\" in conjunction with custom error map.`);\n }\n if (errorMap2)\n return { errorMap: errorMap2, description };\n const customMap = (iss, ctx) => {\n var _a, _b;\n const { message } = params;\n if (iss.code === \"invalid_enum_value\") {\n return { message: message !== null && message !== void 0 ? message : ctx.defaultError };\n }\n if (typeof ctx.data === \"undefined\") {\n return { message: (_a = message !== null && message !== void 0 ? message : required_error) !== null && _a !== void 0 ? _a : ctx.defaultError };\n }\n if (iss.code !== \"invalid_type\")\n return { message: ctx.defaultError };\n return { message: (_b = message !== null && message !== void 0 ? message : invalid_type_error) !== null && _b !== void 0 ? _b : ctx.defaultError };\n };\n return { errorMap: customMap, description };\n }\n class ZodType {\n constructor(def) {\n this.spa = this.safeParseAsync;\n this._def = def;\n this.parse = this.parse.bind(this);\n this.safeParse = this.safeParse.bind(this);\n this.parseAsync = this.parseAsync.bind(this);\n this.safeParseAsync = this.safeParseAsync.bind(this);\n this.spa = this.spa.bind(this);\n this.refine = this.refine.bind(this);\n this.refinement = this.refinement.bind(this);\n this.superRefine = this.superRefine.bind(this);\n this.optional = this.optional.bind(this);\n this.nullable = this.nullable.bind(this);\n this.nullish = this.nullish.bind(this);\n this.array = this.array.bind(this);\n this.promise = this.promise.bind(this);\n this.or = this.or.bind(this);\n this.and = this.and.bind(this);\n this.transform = this.transform.bind(this);\n this.brand = this.brand.bind(this);\n this.default = this.default.bind(this);\n this.catch = this.catch.bind(this);\n this.describe = this.describe.bind(this);\n this.pipe = this.pipe.bind(this);\n this.readonly = this.readonly.bind(this);\n this.isNullable = this.isNullable.bind(this);\n this.isOptional = this.isOptional.bind(this);\n }\n get description() {\n return this._def.description;\n }\n _getType(input) {\n return getParsedType(input.data);\n }\n _getOrReturnCtx(input, ctx) {\n return ctx || {\n common: input.parent.common,\n data: input.data,\n parsedType: getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent\n };\n }\n _processInputParams(input) {\n return {\n status: new ParseStatus(),\n ctx: {\n common: input.parent.common,\n data: input.data,\n parsedType: getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent\n }\n };\n }\n _parseSync(input) {\n const result = this._parse(input);\n if (isAsync(result)) {\n throw new Error(\"Synchronous parse encountered promise.\");\n }\n return result;\n }\n _parseAsync(input) {\n const result = this._parse(input);\n return Promise.resolve(result);\n }\n parse(data, params) {\n const result = this.safeParse(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n safeParse(data, params) {\n var _a;\n const ctx = {\n common: {\n issues: [],\n async: (_a = params === null || params === void 0 ? void 0 : params.async) !== null && _a !== void 0 ? _a : false,\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: getParsedType(data)\n };\n const result = this._parseSync({ data, path: ctx.path, parent: ctx });\n return handleResult(ctx, result);\n }\n async parseAsync(data, params) {\n const result = await this.safeParseAsync(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n async safeParseAsync(data, params) {\n const ctx = {\n common: {\n issues: [],\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n async: true\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: getParsedType(data)\n };\n const maybeAsyncResult = this._parse({ data, path: ctx.path, parent: ctx });\n const result = await (isAsync(maybeAsyncResult) ? maybeAsyncResult : Promise.resolve(maybeAsyncResult));\n return handleResult(ctx, result);\n }\n refine(check, message) {\n const getIssueProperties = (val) => {\n if (typeof message === \"string\" || typeof message === \"undefined\") {\n return { message };\n } else if (typeof message === \"function\") {\n return message(val);\n } else {\n return message;\n }\n };\n return this._refinement((val, ctx) => {\n const result = check(val);\n const setError = () => ctx.addIssue({\n code: ZodIssueCode.custom,\n ...getIssueProperties(val)\n });\n if (typeof Promise !== \"undefined\" && result instanceof Promise) {\n return result.then((data) => {\n if (!data) {\n setError();\n return false;\n } else {\n return true;\n }\n });\n }\n if (!result) {\n setError();\n return false;\n } else {\n return true;\n }\n });\n }\n refinement(check, refinementData) {\n return this._refinement((val, ctx) => {\n if (!check(val)) {\n ctx.addIssue(typeof refinementData === \"function\" ? refinementData(val, ctx) : refinementData);\n return false;\n } else {\n return true;\n }\n });\n }\n _refinement(refinement) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"refinement\", refinement }\n });\n }\n superRefine(refinement) {\n return this._refinement(refinement);\n }\n optional() {\n return ZodOptional.create(this, this._def);\n }\n nullable() {\n return ZodNullable.create(this, this._def);\n }\n nullish() {\n return this.nullable().optional();\n }\n array() {\n return ZodArray.create(this, this._def);\n }\n promise() {\n return ZodPromise.create(this, this._def);\n }\n or(option) {\n return ZodUnion.create([this, option], this._def);\n }\n and(incoming) {\n return ZodIntersection.create(this, incoming, this._def);\n }\n transform(transform) {\n return new ZodEffects({\n ...processCreateParams(this._def),\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"transform\", transform }\n });\n }\n default(def) {\n const defaultValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodDefault({\n ...processCreateParams(this._def),\n innerType: this,\n defaultValue: defaultValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodDefault\n });\n }\n brand() {\n return new ZodBranded({\n typeName: ZodFirstPartyTypeKind.ZodBranded,\n type: this,\n ...processCreateParams(this._def)\n });\n }\n catch(def) {\n const catchValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodCatch({\n ...processCreateParams(this._def),\n innerType: this,\n catchValue: catchValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodCatch\n });\n }\n describe(description) {\n const This = this.constructor;\n return new This({\n ...this._def,\n description\n });\n }\n pipe(target) {\n return ZodPipeline.create(this, target);\n }\n readonly() {\n return ZodReadonly.create(this);\n }\n isOptional() {\n return this.safeParse(void 0).success;\n }\n isNullable() {\n return this.safeParse(null).success;\n }\n }\n const cuidRegex = /^c[^\\s-]{8,}$/i;\n const cuid2Regex = /^[0-9a-z]+$/;\n const ulidRegex = /^[0-9A-HJKMNP-TV-Z]{26}$/;\n const uuidRegex = /^[0-9a-fA-F]{8}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{12}$/i;\n const nanoidRegex = /^[a-z0-9_-]{21}$/i;\n const durationRegex = /^[-+]?P(?!$)(?:(?:[-+]?\\d+Y)|(?:[-+]?\\d+[.,]\\d+Y$))?(?:(?:[-+]?\\d+M)|(?:[-+]?\\d+[.,]\\d+M$))?(?:(?:[-+]?\\d+W)|(?:[-+]?\\d+[.,]\\d+W$))?(?:(?:[-+]?\\d+D)|(?:[-+]?\\d+[.,]\\d+D$))?(?:T(?=[\\d+-])(?:(?:[-+]?\\d+H)|(?:[-+]?\\d+[.,]\\d+H$))?(?:(?:[-+]?\\d+M)|(?:[-+]?\\d+[.,]\\d+M$))?(?:[-+]?\\d+(?:[.,]\\d+)?S)?)??$/;\n const emailRegex = /^(?!\\.)(?!.*\\.\\.)([A-Z0-9_'+\\-\\.]*)[A-Z0-9_+-]@([A-Z0-9][A-Z0-9\\-]*\\.)+[A-Z]{2,}$/i;\n const _emojiRegex = `^(\\\\p{Extended_Pictographic}|\\\\p{Emoji_Component})+$`;\n let emojiRegex;\n const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/;\n const ipv6Regex = /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/;\n const base64Regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/;\n const dateRegexSource = `((\\\\d\\\\d[2468][048]|\\\\d\\\\d[13579][26]|\\\\d\\\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\\\d{4}-((0[13578]|1[02])-(0[1-9]|[12]\\\\d|3[01])|(0[469]|11)-(0[1-9]|[12]\\\\d|30)|(02)-(0[1-9]|1\\\\d|2[0-8])))`;\n const dateRegex = new RegExp(`^${dateRegexSource}$`);\n function timeRegexSource(args) {\n let regex = `([01]\\\\d|2[0-3]):[0-5]\\\\d:[0-5]\\\\d`;\n if (args.precision) {\n regex = `${regex}\\\\.\\\\d{${args.precision}}`;\n } else if (args.precision == null) {\n regex = `${regex}(\\\\.\\\\d+)?`;\n }\n return regex;\n }\n function timeRegex(args) {\n return new RegExp(`^${timeRegexSource(args)}$`);\n }\n function datetimeRegex(args) {\n let regex = `${dateRegexSource}T${timeRegexSource(args)}`;\n const opts = [];\n opts.push(args.local ? `Z?` : `Z`);\n if (args.offset)\n opts.push(`([+-]\\\\d{2}:?\\\\d{2})`);\n regex = `${regex}(${opts.join(\"|\")})`;\n return new RegExp(`^${regex}$`);\n }\n function isValidIP(ip, version2) {\n if ((version2 === \"v4\" || !version2) && ipv4Regex.test(ip)) {\n return true;\n }\n if ((version2 === \"v6\" || !version2) && ipv6Regex.test(ip)) {\n return true;\n }\n return false;\n }\n class ZodString extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = String(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.string) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.string,\n received: ctx2.parsedType\n });\n return INVALID;\n }\n const status = new ParseStatus();\n let ctx = void 0;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.length < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n exact: false,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"max\") {\n if (input.data.length > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n exact: false,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"length\") {\n const tooBig = input.data.length > check.value;\n const tooSmall = input.data.length < check.value;\n if (tooBig || tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n if (tooBig) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n exact: true,\n message: check.message\n });\n } else if (tooSmall) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n exact: true,\n message: check.message\n });\n }\n status.dirty();\n }\n } else if (check.kind === \"email\") {\n if (!emailRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"email\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"emoji\") {\n if (!emojiRegex) {\n emojiRegex = new RegExp(_emojiRegex, \"u\");\n }\n if (!emojiRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"emoji\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"uuid\") {\n if (!uuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"uuid\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"nanoid\") {\n if (!nanoidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"nanoid\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"cuid\") {\n if (!cuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"cuid\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"cuid2\") {\n if (!cuid2Regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"cuid2\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"ulid\") {\n if (!ulidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"ulid\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"url\") {\n try {\n new URL(input.data);\n } catch (_a) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"url\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"regex\") {\n check.regex.lastIndex = 0;\n const testResult = check.regex.test(input.data);\n if (!testResult) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"regex\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"trim\") {\n input.data = input.data.trim();\n } else if (check.kind === \"includes\") {\n if (!input.data.includes(check.value, check.position)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { includes: check.value, position: check.position },\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"toLowerCase\") {\n input.data = input.data.toLowerCase();\n } else if (check.kind === \"toUpperCase\") {\n input.data = input.data.toUpperCase();\n } else if (check.kind === \"startsWith\") {\n if (!input.data.startsWith(check.value)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { startsWith: check.value },\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"endsWith\") {\n if (!input.data.endsWith(check.value)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { endsWith: check.value },\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"datetime\") {\n const regex = datetimeRegex(check);\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"datetime\",\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"date\") {\n const regex = dateRegex;\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"date\",\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"time\") {\n const regex = timeRegex(check);\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"time\",\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"duration\") {\n if (!durationRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"duration\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"ip\") {\n if (!isValidIP(input.data, check.version)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"ip\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"base64\") {\n if (!base64Regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"base64\",\n code: ZodIssueCode.invalid_string,\n message: check.message\n });\n status.dirty();\n }\n } else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n _regex(regex, validation, message) {\n return this.refinement((data) => regex.test(data), {\n validation,\n code: ZodIssueCode.invalid_string,\n ...errorUtil.errToObj(message)\n });\n }\n _addCheck(check) {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, check]\n });\n }\n email(message) {\n return this._addCheck({ kind: \"email\", ...errorUtil.errToObj(message) });\n }\n url(message) {\n return this._addCheck({ kind: \"url\", ...errorUtil.errToObj(message) });\n }\n emoji(message) {\n return this._addCheck({ kind: \"emoji\", ...errorUtil.errToObj(message) });\n }\n uuid(message) {\n return this._addCheck({ kind: \"uuid\", ...errorUtil.errToObj(message) });\n }\n nanoid(message) {\n return this._addCheck({ kind: \"nanoid\", ...errorUtil.errToObj(message) });\n }\n cuid(message) {\n return this._addCheck({ kind: \"cuid\", ...errorUtil.errToObj(message) });\n }\n cuid2(message) {\n return this._addCheck({ kind: \"cuid2\", ...errorUtil.errToObj(message) });\n }\n ulid(message) {\n return this._addCheck({ kind: \"ulid\", ...errorUtil.errToObj(message) });\n }\n base64(message) {\n return this._addCheck({ kind: \"base64\", ...errorUtil.errToObj(message) });\n }\n ip(options) {\n return this._addCheck({ kind: \"ip\", ...errorUtil.errToObj(options) });\n }\n datetime(options) {\n var _a, _b;\n if (typeof options === \"string\") {\n return this._addCheck({\n kind: \"datetime\",\n precision: null,\n offset: false,\n local: false,\n message: options\n });\n }\n return this._addCheck({\n kind: \"datetime\",\n precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === \"undefined\" ? null : options === null || options === void 0 ? void 0 : options.precision,\n offset: (_a = options === null || options === void 0 ? void 0 : options.offset) !== null && _a !== void 0 ? _a : false,\n local: (_b = options === null || options === void 0 ? void 0 : options.local) !== null && _b !== void 0 ? _b : false,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message)\n });\n }\n date(message) {\n return this._addCheck({ kind: \"date\", message });\n }\n time(options) {\n if (typeof options === \"string\") {\n return this._addCheck({\n kind: \"time\",\n precision: null,\n message: options\n });\n }\n return this._addCheck({\n kind: \"time\",\n precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === \"undefined\" ? null : options === null || options === void 0 ? void 0 : options.precision,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message)\n });\n }\n duration(message) {\n return this._addCheck({ kind: \"duration\", ...errorUtil.errToObj(message) });\n }\n regex(regex, message) {\n return this._addCheck({\n kind: \"regex\",\n regex,\n ...errorUtil.errToObj(message)\n });\n }\n includes(value, options) {\n return this._addCheck({\n kind: \"includes\",\n value,\n position: options === null || options === void 0 ? void 0 : options.position,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message)\n });\n }\n startsWith(value, message) {\n return this._addCheck({\n kind: \"startsWith\",\n value,\n ...errorUtil.errToObj(message)\n });\n }\n endsWith(value, message) {\n return this._addCheck({\n kind: \"endsWith\",\n value,\n ...errorUtil.errToObj(message)\n });\n }\n min(minLength, message) {\n return this._addCheck({\n kind: \"min\",\n value: minLength,\n ...errorUtil.errToObj(message)\n });\n }\n max(maxLength, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxLength,\n ...errorUtil.errToObj(message)\n });\n }\n length(len, message) {\n return this._addCheck({\n kind: \"length\",\n value: len,\n ...errorUtil.errToObj(message)\n });\n }\n /**\n * @deprecated Use z.string().min(1) instead.\n * @see {@link ZodString.min}\n */\n nonempty(message) {\n return this.min(1, errorUtil.errToObj(message));\n }\n trim() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"trim\" }]\n });\n }\n toLowerCase() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"toLowerCase\" }]\n });\n }\n toUpperCase() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"toUpperCase\" }]\n });\n }\n get isDatetime() {\n return !!this._def.checks.find((ch) => ch.kind === \"datetime\");\n }\n get isDate() {\n return !!this._def.checks.find((ch) => ch.kind === \"date\");\n }\n get isTime() {\n return !!this._def.checks.find((ch) => ch.kind === \"time\");\n }\n get isDuration() {\n return !!this._def.checks.find((ch) => ch.kind === \"duration\");\n }\n get isEmail() {\n return !!this._def.checks.find((ch) => ch.kind === \"email\");\n }\n get isURL() {\n return !!this._def.checks.find((ch) => ch.kind === \"url\");\n }\n get isEmoji() {\n return !!this._def.checks.find((ch) => ch.kind === \"emoji\");\n }\n get isUUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"uuid\");\n }\n get isNANOID() {\n return !!this._def.checks.find((ch) => ch.kind === \"nanoid\");\n }\n get isCUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid\");\n }\n get isCUID2() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid2\");\n }\n get isULID() {\n return !!this._def.checks.find((ch) => ch.kind === \"ulid\");\n }\n get isIP() {\n return !!this._def.checks.find((ch) => ch.kind === \"ip\");\n }\n get isBase64() {\n return !!this._def.checks.find((ch) => ch.kind === \"base64\");\n }\n get minLength() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxLength() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n }\n ZodString.create = (params) => {\n var _a;\n return new ZodString({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodString,\n coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false,\n ...processCreateParams(params)\n });\n };\n function floatSafeRemainder(val, step) {\n const valDecCount = (val.toString().split(\".\")[1] || \"\").length;\n const stepDecCount = (step.toString().split(\".\")[1] || \"\").length;\n const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount;\n const valInt = parseInt(val.toFixed(decCount).replace(\".\", \"\"));\n const stepInt = parseInt(step.toFixed(decCount).replace(\".\", \"\"));\n return valInt % stepInt / Math.pow(10, decCount);\n }\n class ZodNumber extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n this.step = this.multipleOf;\n }\n _parse(input) {\n if (this._def.coerce) {\n input.data = Number(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.number) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.number,\n received: ctx2.parsedType\n });\n return INVALID;\n }\n let ctx = void 0;\n const status = new ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"int\") {\n if (!util.isInteger(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: \"integer\",\n received: \"float\",\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"min\") {\n const tooSmall = check.inclusive ? input.data < check.value : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n exact: false,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"max\") {\n const tooBig = check.inclusive ? input.data > check.value : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n exact: false,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"multipleOf\") {\n if (floatSafeRemainder(input.data, check.value) !== 0) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"finite\") {\n if (!Number.isFinite(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_finite,\n message: check.message\n });\n status.dirty();\n }\n } else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodNumber({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil.toString(message)\n }\n ]\n });\n }\n _addCheck(check) {\n return new ZodNumber({\n ...this._def,\n checks: [...this._def.checks, check]\n });\n }\n int(message) {\n return this._addCheck({\n kind: \"int\",\n message: errorUtil.toString(message)\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: false,\n message: errorUtil.toString(message)\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: false,\n message: errorUtil.toString(message)\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: true,\n message: errorUtil.toString(message)\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: true,\n message: errorUtil.toString(message)\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value,\n message: errorUtil.toString(message)\n });\n }\n finite(message) {\n return this._addCheck({\n kind: \"finite\",\n message: errorUtil.toString(message)\n });\n }\n safe(message) {\n return this._addCheck({\n kind: \"min\",\n inclusive: true,\n value: Number.MIN_SAFE_INTEGER,\n message: errorUtil.toString(message)\n })._addCheck({\n kind: \"max\",\n inclusive: true,\n value: Number.MAX_SAFE_INTEGER,\n message: errorUtil.toString(message)\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n get isInt() {\n return !!this._def.checks.find((ch) => ch.kind === \"int\" || ch.kind === \"multipleOf\" && util.isInteger(ch.value));\n }\n get isFinite() {\n let max = null, min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"finite\" || ch.kind === \"int\" || ch.kind === \"multipleOf\") {\n return true;\n } else if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n } else if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return Number.isFinite(min) && Number.isFinite(max);\n }\n }\n ZodNumber.create = (params) => {\n return new ZodNumber({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodNumber,\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n ...processCreateParams(params)\n });\n };\n class ZodBigInt extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n }\n _parse(input) {\n if (this._def.coerce) {\n input.data = BigInt(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.bigint) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.bigint,\n received: ctx2.parsedType\n });\n return INVALID;\n }\n let ctx = void 0;\n const status = new ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n const tooSmall = check.inclusive ? input.data < check.value : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n type: \"bigint\",\n minimum: check.value,\n inclusive: check.inclusive,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"max\") {\n const tooBig = check.inclusive ? input.data > check.value : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n type: \"bigint\",\n maximum: check.value,\n inclusive: check.inclusive,\n message: check.message\n });\n status.dirty();\n }\n } else if (check.kind === \"multipleOf\") {\n if (input.data % check.value !== BigInt(0)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message\n });\n status.dirty();\n }\n } else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodBigInt({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil.toString(message)\n }\n ]\n });\n }\n _addCheck(check) {\n return new ZodBigInt({\n ...this._def,\n checks: [...this._def.checks, check]\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: BigInt(0),\n inclusive: false,\n message: errorUtil.toString(message)\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: BigInt(0),\n inclusive: false,\n message: errorUtil.toString(message)\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: BigInt(0),\n inclusive: true,\n message: errorUtil.toString(message)\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: BigInt(0),\n inclusive: true,\n message: errorUtil.toString(message)\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value,\n message: errorUtil.toString(message)\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n }\n ZodBigInt.create = (params) => {\n var _a;\n return new ZodBigInt({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodBigInt,\n coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false,\n ...processCreateParams(params)\n });\n };\n class ZodBoolean extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = Boolean(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.boolean) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.boolean,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodBoolean.create = (params) => {\n return new ZodBoolean({\n typeName: ZodFirstPartyTypeKind.ZodBoolean,\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n ...processCreateParams(params)\n });\n };\n class ZodDate extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = new Date(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.date) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.date,\n received: ctx2.parsedType\n });\n return INVALID;\n }\n if (isNaN(input.data.getTime())) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_date\n });\n return INVALID;\n }\n const status = new ParseStatus();\n let ctx = void 0;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.getTime() < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n message: check.message,\n inclusive: true,\n exact: false,\n minimum: check.value,\n type: \"date\"\n });\n status.dirty();\n }\n } else if (check.kind === \"max\") {\n if (input.data.getTime() > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n message: check.message,\n inclusive: true,\n exact: false,\n maximum: check.value,\n type: \"date\"\n });\n status.dirty();\n }\n } else {\n util.assertNever(check);\n }\n }\n return {\n status: status.value,\n value: new Date(input.data.getTime())\n };\n }\n _addCheck(check) {\n return new ZodDate({\n ...this._def,\n checks: [...this._def.checks, check]\n });\n }\n min(minDate, message) {\n return this._addCheck({\n kind: \"min\",\n value: minDate.getTime(),\n message: errorUtil.toString(message)\n });\n }\n max(maxDate, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxDate.getTime(),\n message: errorUtil.toString(message)\n });\n }\n get minDate() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min != null ? new Date(min) : null;\n }\n get maxDate() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max != null ? new Date(max) : null;\n }\n }\n ZodDate.create = (params) => {\n return new ZodDate({\n checks: [],\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n typeName: ZodFirstPartyTypeKind.ZodDate,\n ...processCreateParams(params)\n });\n };\n class ZodSymbol extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.symbol) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.symbol,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodSymbol.create = (params) => {\n return new ZodSymbol({\n typeName: ZodFirstPartyTypeKind.ZodSymbol,\n ...processCreateParams(params)\n });\n };\n class ZodUndefined extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.undefined,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodUndefined.create = (params) => {\n return new ZodUndefined({\n typeName: ZodFirstPartyTypeKind.ZodUndefined,\n ...processCreateParams(params)\n });\n };\n class ZodNull extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.null) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.null,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodNull.create = (params) => {\n return new ZodNull({\n typeName: ZodFirstPartyTypeKind.ZodNull,\n ...processCreateParams(params)\n });\n };\n class ZodAny extends ZodType {\n constructor() {\n super(...arguments);\n this._any = true;\n }\n _parse(input) {\n return OK(input.data);\n }\n }\n ZodAny.create = (params) => {\n return new ZodAny({\n typeName: ZodFirstPartyTypeKind.ZodAny,\n ...processCreateParams(params)\n });\n };\n class ZodUnknown extends ZodType {\n constructor() {\n super(...arguments);\n this._unknown = true;\n }\n _parse(input) {\n return OK(input.data);\n }\n }\n ZodUnknown.create = (params) => {\n return new ZodUnknown({\n typeName: ZodFirstPartyTypeKind.ZodUnknown,\n ...processCreateParams(params)\n });\n };\n class ZodNever extends ZodType {\n _parse(input) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.never,\n received: ctx.parsedType\n });\n return INVALID;\n }\n }\n ZodNever.create = (params) => {\n return new ZodNever({\n typeName: ZodFirstPartyTypeKind.ZodNever,\n ...processCreateParams(params)\n });\n };\n class ZodVoid extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.void,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodVoid.create = (params) => {\n return new ZodVoid({\n typeName: ZodFirstPartyTypeKind.ZodVoid,\n ...processCreateParams(params)\n });\n };\n class ZodArray extends ZodType {\n _parse(input) {\n const { ctx, status } = this._processInputParams(input);\n const def = this._def;\n if (ctx.parsedType !== ZodParsedType.array) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.array,\n received: ctx.parsedType\n });\n return INVALID;\n }\n if (def.exactLength !== null) {\n const tooBig = ctx.data.length > def.exactLength.value;\n const tooSmall = ctx.data.length < def.exactLength.value;\n if (tooBig || tooSmall) {\n addIssueToContext(ctx, {\n code: tooBig ? ZodIssueCode.too_big : ZodIssueCode.too_small,\n minimum: tooSmall ? def.exactLength.value : void 0,\n maximum: tooBig ? def.exactLength.value : void 0,\n type: \"array\",\n inclusive: true,\n exact: true,\n message: def.exactLength.message\n });\n status.dirty();\n }\n }\n if (def.minLength !== null) {\n if (ctx.data.length < def.minLength.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: def.minLength.value,\n type: \"array\",\n inclusive: true,\n exact: false,\n message: def.minLength.message\n });\n status.dirty();\n }\n }\n if (def.maxLength !== null) {\n if (ctx.data.length > def.maxLength.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: def.maxLength.value,\n type: \"array\",\n inclusive: true,\n exact: false,\n message: def.maxLength.message\n });\n status.dirty();\n }\n }\n if (ctx.common.async) {\n return Promise.all([...ctx.data].map((item, i) => {\n return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n })).then((result2) => {\n return ParseStatus.mergeArray(status, result2);\n });\n }\n const result = [...ctx.data].map((item, i) => {\n return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n });\n return ParseStatus.mergeArray(status, result);\n }\n get element() {\n return this._def.type;\n }\n min(minLength, message) {\n return new ZodArray({\n ...this._def,\n minLength: { value: minLength, message: errorUtil.toString(message) }\n });\n }\n max(maxLength, message) {\n return new ZodArray({\n ...this._def,\n maxLength: { value: maxLength, message: errorUtil.toString(message) }\n });\n }\n length(len, message) {\n return new ZodArray({\n ...this._def,\n exactLength: { value: len, message: errorUtil.toString(message) }\n });\n }\n nonempty(message) {\n return this.min(1, message);\n }\n }\n ZodArray.create = (schema, params) => {\n return new ZodArray({\n type: schema,\n minLength: null,\n maxLength: null,\n exactLength: null,\n typeName: ZodFirstPartyTypeKind.ZodArray,\n ...processCreateParams(params)\n });\n };\n function deepPartialify(schema) {\n if (schema instanceof ZodObject) {\n const newShape = {};\n for (const key in schema.shape) {\n const fieldSchema = schema.shape[key];\n newShape[key] = ZodOptional.create(deepPartialify(fieldSchema));\n }\n return new ZodObject({\n ...schema._def,\n shape: () => newShape\n });\n } else if (schema instanceof ZodArray) {\n return new ZodArray({\n ...schema._def,\n type: deepPartialify(schema.element)\n });\n } else if (schema instanceof ZodOptional) {\n return ZodOptional.create(deepPartialify(schema.unwrap()));\n } else if (schema instanceof ZodNullable) {\n return ZodNullable.create(deepPartialify(schema.unwrap()));\n } else if (schema instanceof ZodTuple) {\n return ZodTuple.create(schema.items.map((item) => deepPartialify(item)));\n } else {\n return schema;\n }\n }\n class ZodObject extends ZodType {\n constructor() {\n super(...arguments);\n this._cached = null;\n this.nonstrict = this.passthrough;\n this.augment = this.extend;\n }\n _getCached() {\n if (this._cached !== null)\n return this._cached;\n const shape = this._def.shape();\n const keys = util.objectKeys(shape);\n return this._cached = { shape, keys };\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.object) {\n const ctx2 = this._getOrReturnCtx(input);\n addIssueToContext(ctx2, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx2.parsedType\n });\n return INVALID;\n }\n const { status, ctx } = this._processInputParams(input);\n const { shape, keys: shapeKeys } = this._getCached();\n const extraKeys = [];\n if (!(this._def.catchall instanceof ZodNever && this._def.unknownKeys === \"strip\")) {\n for (const key in ctx.data) {\n if (!shapeKeys.includes(key)) {\n extraKeys.push(key);\n }\n }\n }\n const pairs = [];\n for (const key of shapeKeys) {\n const keyValidator = shape[key];\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)),\n alwaysSet: key in ctx.data\n });\n }\n if (this._def.catchall instanceof ZodNever) {\n const unknownKeys = this._def.unknownKeys;\n if (unknownKeys === \"passthrough\") {\n for (const key of extraKeys) {\n pairs.push({\n key: { status: \"valid\", value: key },\n value: { status: \"valid\", value: ctx.data[key] }\n });\n }\n } else if (unknownKeys === \"strict\") {\n if (extraKeys.length > 0) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.unrecognized_keys,\n keys: extraKeys\n });\n status.dirty();\n }\n } else if (unknownKeys === \"strip\") ;\n else {\n throw new Error(`Internal ZodObject error: invalid unknownKeys value.`);\n }\n } else {\n const catchall = this._def.catchall;\n for (const key of extraKeys) {\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: catchall._parse(\n new ParseInputLazyPath(ctx, value, ctx.path, key)\n //, ctx.child(key), value, getParsedType(value)\n ),\n alwaysSet: key in ctx.data\n });\n }\n }\n if (ctx.common.async) {\n return Promise.resolve().then(async () => {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n syncPairs.push({\n key,\n value,\n alwaysSet: pair.alwaysSet\n });\n }\n return syncPairs;\n }).then((syncPairs) => {\n return ParseStatus.mergeObjectSync(status, syncPairs);\n });\n } else {\n return ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get shape() {\n return this._def.shape();\n }\n strict(message) {\n errorUtil.errToObj;\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strict\",\n ...message !== void 0 ? {\n errorMap: (issue, ctx) => {\n var _a, _b, _c, _d;\n const defaultError = (_c = (_b = (_a = this._def).errorMap) === null || _b === void 0 ? void 0 : _b.call(_a, issue, ctx).message) !== null && _c !== void 0 ? _c : ctx.defaultError;\n if (issue.code === \"unrecognized_keys\")\n return {\n message: (_d = errorUtil.errToObj(message).message) !== null && _d !== void 0 ? _d : defaultError\n };\n return {\n message: defaultError\n };\n }\n } : {}\n });\n }\n strip() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strip\"\n });\n }\n passthrough() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"passthrough\"\n });\n }\n // const AugmentFactory =\n // <Def extends ZodObjectDef>(def: Def) =>\n // <Augmentation extends ZodRawShape>(\n // augmentation: Augmentation\n // ): ZodObject<\n // extendShape<ReturnType<Def[\"shape\"]>, Augmentation>,\n // Def[\"unknownKeys\"],\n // Def[\"catchall\"]\n // > => {\n // return new ZodObject({\n // ...def,\n // shape: () => ({\n // ...def.shape(),\n // ...augmentation,\n // }),\n // }) as any;\n // };\n extend(augmentation) {\n return new ZodObject({\n ...this._def,\n shape: () => ({\n ...this._def.shape(),\n ...augmentation\n })\n });\n }\n /**\n * Prior to zod@1.0.12 there was a bug in the\n * inferred type of merged objects. Please\n * upgrade if you are experiencing issues.\n */\n merge(merging) {\n const merged = new ZodObject({\n unknownKeys: merging._def.unknownKeys,\n catchall: merging._def.catchall,\n shape: () => ({\n ...this._def.shape(),\n ...merging._def.shape()\n }),\n typeName: ZodFirstPartyTypeKind.ZodObject\n });\n return merged;\n }\n // merge<\n // Incoming extends AnyZodObject,\n // Augmentation extends Incoming[\"shape\"],\n // NewOutput extends {\n // [k in keyof Augmentation | keyof Output]: k extends keyof Augmentation\n // ? Augmentation[k][\"_output\"]\n // : k extends keyof Output\n // ? Output[k]\n // : never;\n // },\n // NewInput extends {\n // [k in keyof Augmentation | keyof Input]: k extends keyof Augmentation\n // ? Augmentation[k][\"_input\"]\n // : k extends keyof Input\n // ? Input[k]\n // : never;\n // }\n // >(\n // merging: Incoming\n // ): ZodObject<\n // extendShape<T, ReturnType<Incoming[\"_def\"][\"shape\"]>>,\n // Incoming[\"_def\"][\"unknownKeys\"],\n // Incoming[\"_def\"][\"catchall\"],\n // NewOutput,\n // NewInput\n // > {\n // const merged: any = new ZodObject({\n // unknownKeys: merging._def.unknownKeys,\n // catchall: merging._def.catchall,\n // shape: () =>\n // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n // typeName: ZodFirstPartyTypeKind.ZodObject,\n // }) as any;\n // return merged;\n // }\n setKey(key, schema) {\n return this.augment({ [key]: schema });\n }\n // merge<Incoming extends AnyZodObject>(\n // merging: Incoming\n // ): //ZodObject<T & Incoming[\"_shape\"], UnknownKeys, Catchall> = (merging) => {\n // ZodObject<\n // extendShape<T, ReturnType<Incoming[\"_def\"][\"shape\"]>>,\n // Incoming[\"_def\"][\"unknownKeys\"],\n // Incoming[\"_def\"][\"catchall\"]\n // > {\n // // const mergedShape = objectUtil.mergeShapes(\n // // this._def.shape(),\n // // merging._def.shape()\n // // );\n // const merged: any = new ZodObject({\n // unknownKeys: merging._def.unknownKeys,\n // catchall: merging._def.catchall,\n // shape: () =>\n // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n // typeName: ZodFirstPartyTypeKind.ZodObject,\n // }) as any;\n // return merged;\n // }\n catchall(index) {\n return new ZodObject({\n ...this._def,\n catchall: index\n });\n }\n pick(mask) {\n const shape = {};\n util.objectKeys(mask).forEach((key) => {\n if (mask[key] && this.shape[key]) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape\n });\n }\n omit(mask) {\n const shape = {};\n util.objectKeys(this.shape).forEach((key) => {\n if (!mask[key]) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape\n });\n }\n /**\n * @deprecated\n */\n deepPartial() {\n return deepPartialify(this);\n }\n partial(mask) {\n const newShape = {};\n util.objectKeys(this.shape).forEach((key) => {\n const fieldSchema = this.shape[key];\n if (mask && !mask[key]) {\n newShape[key] = fieldSchema;\n } else {\n newShape[key] = fieldSchema.optional();\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape\n });\n }\n required(mask) {\n const newShape = {};\n util.objectKeys(this.shape).forEach((key) => {\n if (mask && !mask[key]) {\n newShape[key] = this.shape[key];\n } else {\n const fieldSchema = this.shape[key];\n let newField = fieldSchema;\n while (newField instanceof ZodOptional) {\n newField = newField._def.innerType;\n }\n newShape[key] = newField;\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape\n });\n }\n keyof() {\n return createZodEnum(util.objectKeys(this.shape));\n }\n }\n ZodObject.create = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params)\n });\n };\n ZodObject.strictCreate = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strict\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params)\n });\n };\n ZodObject.lazycreate = (shape, params) => {\n return new ZodObject({\n shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params)\n });\n };\n class ZodUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const options = this._def.options;\n function handleResults(results) {\n for (const result of results) {\n if (result.result.status === \"valid\") {\n return result.result;\n }\n }\n for (const result of results) {\n if (result.result.status === \"dirty\") {\n ctx.common.issues.push(...result.ctx.common.issues);\n return result.result;\n }\n }\n const unionErrors = results.map((result) => new ZodError(result.ctx.common.issues));\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union,\n unionErrors\n });\n return INVALID;\n }\n if (ctx.common.async) {\n return Promise.all(options.map(async (option) => {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: []\n },\n parent: null\n };\n return {\n result: await option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx\n }),\n ctx: childCtx\n };\n })).then(handleResults);\n } else {\n let dirty = void 0;\n const issues = [];\n for (const option of options) {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: []\n },\n parent: null\n };\n const result = option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx\n });\n if (result.status === \"valid\") {\n return result;\n } else if (result.status === \"dirty\" && !dirty) {\n dirty = { result, ctx: childCtx };\n }\n if (childCtx.common.issues.length) {\n issues.push(childCtx.common.issues);\n }\n }\n if (dirty) {\n ctx.common.issues.push(...dirty.ctx.common.issues);\n return dirty.result;\n }\n const unionErrors = issues.map((issues2) => new ZodError(issues2));\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union,\n unionErrors\n });\n return INVALID;\n }\n }\n get options() {\n return this._def.options;\n }\n }\n ZodUnion.create = (types, params) => {\n return new ZodUnion({\n options: types,\n typeName: ZodFirstPartyTypeKind.ZodUnion,\n ...processCreateParams(params)\n });\n };\n const getDiscriminator = (type) => {\n if (type instanceof ZodLazy) {\n return getDiscriminator(type.schema);\n } else if (type instanceof ZodEffects) {\n return getDiscriminator(type.innerType());\n } else if (type instanceof ZodLiteral) {\n return [type.value];\n } else if (type instanceof ZodEnum) {\n return type.options;\n } else if (type instanceof ZodNativeEnum) {\n return util.objectValues(type.enum);\n } else if (type instanceof ZodDefault) {\n return getDiscriminator(type._def.innerType);\n } else if (type instanceof ZodUndefined) {\n return [void 0];\n } else if (type instanceof ZodNull) {\n return [null];\n } else if (type instanceof ZodOptional) {\n return [void 0, ...getDiscriminator(type.unwrap())];\n } else if (type instanceof ZodNullable) {\n return [null, ...getDiscriminator(type.unwrap())];\n } else if (type instanceof ZodBranded) {\n return getDiscriminator(type.unwrap());\n } else if (type instanceof ZodReadonly) {\n return getDiscriminator(type.unwrap());\n } else if (type instanceof ZodCatch) {\n return getDiscriminator(type._def.innerType);\n } else {\n return [];\n }\n };\n class ZodDiscriminatedUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.object) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx.parsedType\n });\n return INVALID;\n }\n const discriminator = this.discriminator;\n const discriminatorValue = ctx.data[discriminator];\n const option = this.optionsMap.get(discriminatorValue);\n if (!option) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union_discriminator,\n options: Array.from(this.optionsMap.keys()),\n path: [discriminator]\n });\n return INVALID;\n }\n if (ctx.common.async) {\n return option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n } else {\n return option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n }\n }\n get discriminator() {\n return this._def.discriminator;\n }\n get options() {\n return this._def.options;\n }\n get optionsMap() {\n return this._def.optionsMap;\n }\n /**\n * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor.\n * However, it only allows a union of objects, all of which need to share a discriminator property. This property must\n * have a different value for each object in the union.\n * @param discriminator the name of the discriminator property\n * @param types an array of object schemas\n * @param params\n */\n static create(discriminator, options, params) {\n const optionsMap = /* @__PURE__ */ new Map();\n for (const type of options) {\n const discriminatorValues = getDiscriminator(type.shape[discriminator]);\n if (!discriminatorValues.length) {\n throw new Error(`A discriminator value for key \\`${discriminator}\\` could not be extracted from all schema options`);\n }\n for (const value of discriminatorValues) {\n if (optionsMap.has(value)) {\n throw new Error(`Discriminator property ${String(discriminator)} has duplicate value ${String(value)}`);\n }\n optionsMap.set(value, type);\n }\n }\n return new ZodDiscriminatedUnion({\n typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion,\n discriminator,\n options,\n optionsMap,\n ...processCreateParams(params)\n });\n }\n }\n function mergeValues(a, b) {\n const aType = getParsedType(a);\n const bType = getParsedType(b);\n if (a === b) {\n return { valid: true, data: a };\n } else if (aType === ZodParsedType.object && bType === ZodParsedType.object) {\n const bKeys = util.objectKeys(b);\n const sharedKeys = util.objectKeys(a).filter((key) => bKeys.indexOf(key) !== -1);\n const newObj = { ...a, ...b };\n for (const key of sharedKeys) {\n const sharedValue = mergeValues(a[key], b[key]);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newObj[key] = sharedValue.data;\n }\n return { valid: true, data: newObj };\n } else if (aType === ZodParsedType.array && bType === ZodParsedType.array) {\n if (a.length !== b.length) {\n return { valid: false };\n }\n const newArray = [];\n for (let index = 0; index < a.length; index++) {\n const itemA = a[index];\n const itemB = b[index];\n const sharedValue = mergeValues(itemA, itemB);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newArray.push(sharedValue.data);\n }\n return { valid: true, data: newArray };\n } else if (aType === ZodParsedType.date && bType === ZodParsedType.date && +a === +b) {\n return { valid: true, data: a };\n } else {\n return { valid: false };\n }\n }\n class ZodIntersection extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const handleParsed = (parsedLeft, parsedRight) => {\n if (isAborted(parsedLeft) || isAborted(parsedRight)) {\n return INVALID;\n }\n const merged = mergeValues(parsedLeft.value, parsedRight.value);\n if (!merged.valid) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_intersection_types\n });\n return INVALID;\n }\n if (isDirty(parsedLeft) || isDirty(parsedRight)) {\n status.dirty();\n }\n return { status: status.value, value: merged.data };\n };\n if (ctx.common.async) {\n return Promise.all([\n this._def.left._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n }),\n this._def.right._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n })\n ]).then(([left, right]) => handleParsed(left, right));\n } else {\n return handleParsed(this._def.left._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n }), this._def.right._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n }));\n }\n }\n }\n ZodIntersection.create = (left, right, params) => {\n return new ZodIntersection({\n left,\n right,\n typeName: ZodFirstPartyTypeKind.ZodIntersection,\n ...processCreateParams(params)\n });\n };\n class ZodTuple extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.array) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.array,\n received: ctx.parsedType\n });\n return INVALID;\n }\n if (ctx.data.length < this._def.items.length) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: this._def.items.length,\n inclusive: true,\n exact: false,\n type: \"array\"\n });\n return INVALID;\n }\n const rest = this._def.rest;\n if (!rest && ctx.data.length > this._def.items.length) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: this._def.items.length,\n inclusive: true,\n exact: false,\n type: \"array\"\n });\n status.dirty();\n }\n const items = [...ctx.data].map((item, itemIndex) => {\n const schema = this._def.items[itemIndex] || this._def.rest;\n if (!schema)\n return null;\n return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex));\n }).filter((x) => !!x);\n if (ctx.common.async) {\n return Promise.all(items).then((results) => {\n return ParseStatus.mergeArray(status, results);\n });\n } else {\n return ParseStatus.mergeArray(status, items);\n }\n }\n get items() {\n return this._def.items;\n }\n rest(rest) {\n return new ZodTuple({\n ...this._def,\n rest\n });\n }\n }\n ZodTuple.create = (schemas, params) => {\n if (!Array.isArray(schemas)) {\n throw new Error(\"You must pass an array of schemas to z.tuple([ ... ])\");\n }\n return new ZodTuple({\n items: schemas,\n typeName: ZodFirstPartyTypeKind.ZodTuple,\n rest: null,\n ...processCreateParams(params)\n });\n };\n class ZodRecord extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.object) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx.parsedType\n });\n return INVALID;\n }\n const pairs = [];\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n for (const key in ctx.data) {\n pairs.push({\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)),\n value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)),\n alwaysSet: key in ctx.data\n });\n }\n if (ctx.common.async) {\n return ParseStatus.mergeObjectAsync(status, pairs);\n } else {\n return ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get element() {\n return this._def.valueType;\n }\n static create(first, second, third) {\n if (second instanceof ZodType) {\n return new ZodRecord({\n keyType: first,\n valueType: second,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(third)\n });\n }\n return new ZodRecord({\n keyType: ZodString.create(),\n valueType: first,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(second)\n });\n }\n }\n class ZodMap extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.map) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.map,\n received: ctx.parsedType\n });\n return INVALID;\n }\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n const pairs = [...ctx.data.entries()].map(([key, value], index) => {\n return {\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, \"key\"])),\n value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, \"value\"]))\n };\n });\n if (ctx.common.async) {\n const finalMap = /* @__PURE__ */ new Map();\n return Promise.resolve().then(async () => {\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n });\n } else {\n const finalMap = /* @__PURE__ */ new Map();\n for (const pair of pairs) {\n const key = pair.key;\n const value = pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n }\n }\n }\n ZodMap.create = (keyType, valueType, params) => {\n return new ZodMap({\n valueType,\n keyType,\n typeName: ZodFirstPartyTypeKind.ZodMap,\n ...processCreateParams(params)\n });\n };\n class ZodSet extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.set) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.set,\n received: ctx.parsedType\n });\n return INVALID;\n }\n const def = this._def;\n if (def.minSize !== null) {\n if (ctx.data.size < def.minSize.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: def.minSize.value,\n type: \"set\",\n inclusive: true,\n exact: false,\n message: def.minSize.message\n });\n status.dirty();\n }\n }\n if (def.maxSize !== null) {\n if (ctx.data.size > def.maxSize.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: def.maxSize.value,\n type: \"set\",\n inclusive: true,\n exact: false,\n message: def.maxSize.message\n });\n status.dirty();\n }\n }\n const valueType = this._def.valueType;\n function finalizeSet(elements2) {\n const parsedSet = /* @__PURE__ */ new Set();\n for (const element of elements2) {\n if (element.status === \"aborted\")\n return INVALID;\n if (element.status === \"dirty\")\n status.dirty();\n parsedSet.add(element.value);\n }\n return { status: status.value, value: parsedSet };\n }\n const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i)));\n if (ctx.common.async) {\n return Promise.all(elements).then((elements2) => finalizeSet(elements2));\n } else {\n return finalizeSet(elements);\n }\n }\n min(minSize, message) {\n return new ZodSet({\n ...this._def,\n minSize: { value: minSize, message: errorUtil.toString(message) }\n });\n }\n max(maxSize, message) {\n return new ZodSet({\n ...this._def,\n maxSize: { value: maxSize, message: errorUtil.toString(message) }\n });\n }\n size(size, message) {\n return this.min(size, message).max(size, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n }\n ZodSet.create = (valueType, params) => {\n return new ZodSet({\n valueType,\n minSize: null,\n maxSize: null,\n typeName: ZodFirstPartyTypeKind.ZodSet,\n ...processCreateParams(params)\n });\n };\n class ZodFunction extends ZodType {\n constructor() {\n super(...arguments);\n this.validate = this.implement;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.function) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.function,\n received: ctx.parsedType\n });\n return INVALID;\n }\n function makeArgsIssue(args, error) {\n return makeIssue({\n data: args,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n getErrorMap(),\n errorMap\n ].filter((x) => !!x),\n issueData: {\n code: ZodIssueCode.invalid_arguments,\n argumentsError: error\n }\n });\n }\n function makeReturnsIssue(returns, error) {\n return makeIssue({\n data: returns,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n getErrorMap(),\n errorMap\n ].filter((x) => !!x),\n issueData: {\n code: ZodIssueCode.invalid_return_type,\n returnTypeError: error\n }\n });\n }\n const params = { errorMap: ctx.common.contextualErrorMap };\n const fn = ctx.data;\n if (this._def.returns instanceof ZodPromise) {\n const me = this;\n return OK(async function(...args) {\n const error = new ZodError([]);\n const parsedArgs = await me._def.args.parseAsync(args, params).catch((e) => {\n error.addIssue(makeArgsIssue(args, e));\n throw error;\n });\n const result = await Reflect.apply(fn, this, parsedArgs);\n const parsedReturns = await me._def.returns._def.type.parseAsync(result, params).catch((e) => {\n error.addIssue(makeReturnsIssue(result, e));\n throw error;\n });\n return parsedReturns;\n });\n } else {\n const me = this;\n return OK(function(...args) {\n const parsedArgs = me._def.args.safeParse(args, params);\n if (!parsedArgs.success) {\n throw new ZodError([makeArgsIssue(args, parsedArgs.error)]);\n }\n const result = Reflect.apply(fn, this, parsedArgs.data);\n const parsedReturns = me._def.returns.safeParse(result, params);\n if (!parsedReturns.success) {\n throw new ZodError([makeReturnsIssue(result, parsedReturns.error)]);\n }\n return parsedReturns.data;\n });\n }\n }\n parameters() {\n return this._def.args;\n }\n returnType() {\n return this._def.returns;\n }\n args(...items) {\n return new ZodFunction({\n ...this._def,\n args: ZodTuple.create(items).rest(ZodUnknown.create())\n });\n }\n returns(returnType) {\n return new ZodFunction({\n ...this._def,\n returns: returnType\n });\n }\n implement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n strictImplement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n static create(args, returns, params) {\n return new ZodFunction({\n args: args ? args : ZodTuple.create([]).rest(ZodUnknown.create()),\n returns: returns || ZodUnknown.create(),\n typeName: ZodFirstPartyTypeKind.ZodFunction,\n ...processCreateParams(params)\n });\n }\n }\n class ZodLazy extends ZodType {\n get schema() {\n return this._def.getter();\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const lazySchema = this._def.getter();\n return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx });\n }\n }\n ZodLazy.create = (getter, params) => {\n return new ZodLazy({\n getter,\n typeName: ZodFirstPartyTypeKind.ZodLazy,\n ...processCreateParams(params)\n });\n };\n class ZodLiteral extends ZodType {\n _parse(input) {\n if (input.data !== this._def.value) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_literal,\n expected: this._def.value\n });\n return INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n get value() {\n return this._def.value;\n }\n }\n ZodLiteral.create = (value, params) => {\n return new ZodLiteral({\n value,\n typeName: ZodFirstPartyTypeKind.ZodLiteral,\n ...processCreateParams(params)\n });\n };\n function createZodEnum(values, params) {\n return new ZodEnum({\n values,\n typeName: ZodFirstPartyTypeKind.ZodEnum,\n ...processCreateParams(params)\n });\n }\n class ZodEnum extends ZodType {\n constructor() {\n super(...arguments);\n _ZodEnum_cache.set(this, void 0);\n }\n _parse(input) {\n if (typeof input.data !== \"string\") {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n addIssueToContext(ctx, {\n expected: util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodIssueCode.invalid_type\n });\n return INVALID;\n }\n if (!__classPrivateFieldGet(this, _ZodEnum_cache)) {\n __classPrivateFieldSet(this, _ZodEnum_cache, new Set(this._def.values));\n }\n if (!__classPrivateFieldGet(this, _ZodEnum_cache).has(input.data)) {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_enum_value,\n options: expectedValues\n });\n return INVALID;\n }\n return OK(input.data);\n }\n get options() {\n return this._def.values;\n }\n get enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Values() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n extract(values, newDef = this._def) {\n return ZodEnum.create(values, {\n ...this._def,\n ...newDef\n });\n }\n exclude(values, newDef = this._def) {\n return ZodEnum.create(this.options.filter((opt) => !values.includes(opt)), {\n ...this._def,\n ...newDef\n });\n }\n }\n _ZodEnum_cache = /* @__PURE__ */ new WeakMap();\n ZodEnum.create = createZodEnum;\n class ZodNativeEnum extends ZodType {\n constructor() {\n super(...arguments);\n _ZodNativeEnum_cache.set(this, void 0);\n }\n _parse(input) {\n const nativeEnumValues = util.getValidEnumValues(this._def.values);\n const ctx = this._getOrReturnCtx(input);\n if (ctx.parsedType !== ZodParsedType.string && ctx.parsedType !== ZodParsedType.number) {\n const expectedValues = util.objectValues(nativeEnumValues);\n addIssueToContext(ctx, {\n expected: util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodIssueCode.invalid_type\n });\n return INVALID;\n }\n if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache)) {\n __classPrivateFieldSet(this, _ZodNativeEnum_cache, new Set(util.getValidEnumValues(this._def.values)));\n }\n if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache).has(input.data)) {\n const expectedValues = util.objectValues(nativeEnumValues);\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_enum_value,\n options: expectedValues\n });\n return INVALID;\n }\n return OK(input.data);\n }\n get enum() {\n return this._def.values;\n }\n }\n _ZodNativeEnum_cache = /* @__PURE__ */ new WeakMap();\n ZodNativeEnum.create = (values, params) => {\n return new ZodNativeEnum({\n values,\n typeName: ZodFirstPartyTypeKind.ZodNativeEnum,\n ...processCreateParams(params)\n });\n };\n class ZodPromise extends ZodType {\n unwrap() {\n return this._def.type;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.promise && ctx.common.async === false) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.promise,\n received: ctx.parsedType\n });\n return INVALID;\n }\n const promisified = ctx.parsedType === ZodParsedType.promise ? ctx.data : Promise.resolve(ctx.data);\n return OK(promisified.then((data) => {\n return this._def.type.parseAsync(data, {\n path: ctx.path,\n errorMap: ctx.common.contextualErrorMap\n });\n }));\n }\n }\n ZodPromise.create = (schema, params) => {\n return new ZodPromise({\n type: schema,\n typeName: ZodFirstPartyTypeKind.ZodPromise,\n ...processCreateParams(params)\n });\n };\n class ZodEffects extends ZodType {\n innerType() {\n return this._def.schema;\n }\n sourceType() {\n return this._def.schema._def.typeName === ZodFirstPartyTypeKind.ZodEffects ? this._def.schema.sourceType() : this._def.schema;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const effect = this._def.effect || null;\n const checkCtx = {\n addIssue: (arg) => {\n addIssueToContext(ctx, arg);\n if (arg.fatal) {\n status.abort();\n } else {\n status.dirty();\n }\n },\n get path() {\n return ctx.path;\n }\n };\n checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx);\n if (effect.type === \"preprocess\") {\n const processed = effect.transform(ctx.data, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(processed).then(async (processed2) => {\n if (status.value === \"aborted\")\n return INVALID;\n const result = await this._def.schema._parseAsync({\n data: processed2,\n path: ctx.path,\n parent: ctx\n });\n if (result.status === \"aborted\")\n return INVALID;\n if (result.status === \"dirty\")\n return DIRTY(result.value);\n if (status.value === \"dirty\")\n return DIRTY(result.value);\n return result;\n });\n } else {\n if (status.value === \"aborted\")\n return INVALID;\n const result = this._def.schema._parseSync({\n data: processed,\n path: ctx.path,\n parent: ctx\n });\n if (result.status === \"aborted\")\n return INVALID;\n if (result.status === \"dirty\")\n return DIRTY(result.value);\n if (status.value === \"dirty\")\n return DIRTY(result.value);\n return result;\n }\n }\n if (effect.type === \"refinement\") {\n const executeRefinement = (acc) => {\n const result = effect.refinement(acc, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(result);\n }\n if (result instanceof Promise) {\n throw new Error(\"Async refinement encountered during synchronous parse operation. Use .parseAsync instead.\");\n }\n return acc;\n };\n if (ctx.common.async === false) {\n const inner = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n if (inner.status === \"aborted\")\n return INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n executeRefinement(inner.value);\n return { status: status.value, value: inner.value };\n } else {\n return this._def.schema._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }).then((inner) => {\n if (inner.status === \"aborted\")\n return INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n return executeRefinement(inner.value).then(() => {\n return { status: status.value, value: inner.value };\n });\n });\n }\n }\n if (effect.type === \"transform\") {\n if (ctx.common.async === false) {\n const base = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n if (!isValid(base))\n return base;\n const result = effect.transform(base.value, checkCtx);\n if (result instanceof Promise) {\n throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`);\n }\n return { status: status.value, value: result };\n } else {\n return this._def.schema._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }).then((base) => {\n if (!isValid(base))\n return base;\n return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result }));\n });\n }\n }\n util.assertNever(effect);\n }\n }\n ZodEffects.create = (schema, effect, params) => {\n return new ZodEffects({\n schema,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect,\n ...processCreateParams(params)\n });\n };\n ZodEffects.createWithPreprocess = (preprocess, schema, params) => {\n return new ZodEffects({\n schema,\n effect: { type: \"preprocess\", transform: preprocess },\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n ...processCreateParams(params)\n });\n };\n class ZodOptional extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === ZodParsedType.undefined) {\n return OK(void 0);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodOptional.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params)\n });\n };\n class ZodNullable extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === ZodParsedType.null) {\n return OK(null);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodNullable.create = (type, params) => {\n return new ZodNullable({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodNullable,\n ...processCreateParams(params)\n });\n };\n class ZodDefault extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n let data = ctx.data;\n if (ctx.parsedType === ZodParsedType.undefined) {\n data = this._def.defaultValue();\n }\n return this._def.innerType._parse({\n data,\n path: ctx.path,\n parent: ctx\n });\n }\n removeDefault() {\n return this._def.innerType;\n }\n }\n ZodDefault.create = (type, params) => {\n return new ZodDefault({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodDefault,\n defaultValue: typeof params.default === \"function\" ? params.default : () => params.default,\n ...processCreateParams(params)\n });\n };\n class ZodCatch extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const newCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: []\n }\n };\n const result = this._def.innerType._parse({\n data: newCtx.data,\n path: newCtx.path,\n parent: {\n ...newCtx\n }\n });\n if (isAsync(result)) {\n return result.then((result2) => {\n return {\n status: \"valid\",\n value: result2.status === \"valid\" ? result2.value : this._def.catchValue({\n get error() {\n return new ZodError(newCtx.common.issues);\n },\n input: newCtx.data\n })\n };\n });\n } else {\n return {\n status: \"valid\",\n value: result.status === \"valid\" ? result.value : this._def.catchValue({\n get error() {\n return new ZodError(newCtx.common.issues);\n },\n input: newCtx.data\n })\n };\n }\n }\n removeCatch() {\n return this._def.innerType;\n }\n }\n ZodCatch.create = (type, params) => {\n return new ZodCatch({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodCatch,\n catchValue: typeof params.catch === \"function\" ? params.catch : () => params.catch,\n ...processCreateParams(params)\n });\n };\n class ZodNaN extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.nan) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.nan,\n received: ctx.parsedType\n });\n return INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n }\n ZodNaN.create = (params) => {\n return new ZodNaN({\n typeName: ZodFirstPartyTypeKind.ZodNaN,\n ...processCreateParams(params)\n });\n };\n const BRAND = Symbol(\"zod_brand\");\n class ZodBranded extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const data = ctx.data;\n return this._def.type._parse({\n data,\n path: ctx.path,\n parent: ctx\n });\n }\n unwrap() {\n return this._def.type;\n }\n }\n class ZodPipeline extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.common.async) {\n const handleAsync = async () => {\n const inResult = await this._def.in._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n if (inResult.status === \"aborted\")\n return INVALID;\n if (inResult.status === \"dirty\") {\n status.dirty();\n return DIRTY(inResult.value);\n } else {\n return this._def.out._parseAsync({\n data: inResult.value,\n path: ctx.path,\n parent: ctx\n });\n }\n };\n return handleAsync();\n } else {\n const inResult = this._def.in._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx\n });\n if (inResult.status === \"aborted\")\n return INVALID;\n if (inResult.status === \"dirty\") {\n status.dirty();\n return {\n status: \"dirty\",\n value: inResult.value\n };\n } else {\n return this._def.out._parseSync({\n data: inResult.value,\n path: ctx.path,\n parent: ctx\n });\n }\n }\n }\n static create(a, b) {\n return new ZodPipeline({\n in: a,\n out: b,\n typeName: ZodFirstPartyTypeKind.ZodPipeline\n });\n }\n }\n class ZodReadonly extends ZodType {\n _parse(input) {\n const result = this._def.innerType._parse(input);\n const freeze = (data) => {\n if (isValid(data)) {\n data.value = Object.freeze(data.value);\n }\n return data;\n };\n return isAsync(result) ? result.then((data) => freeze(data)) : freeze(result);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodReadonly.create = (type, params) => {\n return new ZodReadonly({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodReadonly,\n ...processCreateParams(params)\n });\n };\n function custom(check, params = {}, fatal) {\n if (check)\n return ZodAny.create().superRefine((data, ctx) => {\n var _a, _b;\n if (!check(data)) {\n const p = typeof params === \"function\" ? params(data) : typeof params === \"string\" ? { message: params } : params;\n const _fatal = (_b = (_a = p.fatal) !== null && _a !== void 0 ? _a : fatal) !== null && _b !== void 0 ? _b : true;\n const p2 = typeof p === \"string\" ? { message: p } : p;\n ctx.addIssue({ code: \"custom\", ...p2, fatal: _fatal });\n }\n });\n return ZodAny.create();\n }\n const late = {\n object: ZodObject.lazycreate\n };\n var ZodFirstPartyTypeKind;\n (function(ZodFirstPartyTypeKind2) {\n ZodFirstPartyTypeKind2[\"ZodString\"] = \"ZodString\";\n ZodFirstPartyTypeKind2[\"ZodNumber\"] = \"ZodNumber\";\n ZodFirstPartyTypeKind2[\"ZodNaN\"] = \"ZodNaN\";\n ZodFirstPartyTypeKind2[\"ZodBigInt\"] = \"ZodBigInt\";\n ZodFirstPartyTypeKind2[\"ZodBoolean\"] = \"ZodBoolean\";\n ZodFirstPartyTypeKind2[\"ZodDate\"] = \"ZodDate\";\n ZodFirstPartyTypeKind2[\"ZodSymbol\"] = \"ZodSymbol\";\n ZodFirstPartyTypeKind2[\"ZodUndefined\"] = \"ZodUndefined\";\n ZodFirstPartyTypeKind2[\"ZodNull\"] = \"ZodNull\";\n ZodFirstPartyTypeKind2[\"ZodAny\"] = \"ZodAny\";\n ZodFirstPartyTypeKind2[\"ZodUnknown\"] = \"ZodUnknown\";\n ZodFirstPartyTypeKind2[\"ZodNever\"] = \"ZodNever\";\n ZodFirstPartyTypeKind2[\"ZodVoid\"] = \"ZodVoid\";\n ZodFirstPartyTypeKind2[\"ZodArray\"] = \"ZodArray\";\n ZodFirstPartyTypeKind2[\"ZodObject\"] = \"ZodObject\";\n ZodFirstPartyTypeKind2[\"ZodUnion\"] = \"ZodUnion\";\n ZodFirstPartyTypeKind2[\"ZodDiscriminatedUnion\"] = \"ZodDiscriminatedUnion\";\n ZodFirstPartyTypeKind2[\"ZodIntersection\"] = \"ZodIntersection\";\n ZodFirstPartyTypeKind2[\"ZodTuple\"] = \"ZodTuple\";\n ZodFirstPartyTypeKind2[\"ZodRecord\"] = \"ZodRecord\";\n ZodFirstPartyTypeKind2[\"ZodMap\"] = \"ZodMap\";\n ZodFirstPartyTypeKind2[\"ZodSet\"] = \"ZodSet\";\n ZodFirstPartyTypeKind2[\"ZodFunction\"] = \"ZodFunction\";\n ZodFirstPartyTypeKind2[\"ZodLazy\"] = \"ZodLazy\";\n ZodFirstPartyTypeKind2[\"ZodLiteral\"] = \"ZodLiteral\";\n ZodFirstPartyTypeKind2[\"ZodEnum\"] = \"ZodEnum\";\n ZodFirstPartyTypeKind2[\"ZodEffects\"] = \"ZodEffects\";\n ZodFirstPartyTypeKind2[\"ZodNativeEnum\"] = \"ZodNativeEnum\";\n ZodFirstPartyTypeKind2[\"ZodOptional\"] = \"ZodOptional\";\n ZodFirstPartyTypeKind2[\"ZodNullable\"] = \"ZodNullable\";\n ZodFirstPartyTypeKind2[\"ZodDefault\"] = \"ZodDefault\";\n ZodFirstPartyTypeKind2[\"ZodCatch\"] = \"ZodCatch\";\n ZodFirstPartyTypeKind2[\"ZodPromise\"] = \"ZodPromise\";\n ZodFirstPartyTypeKind2[\"ZodBranded\"] = \"ZodBranded\";\n ZodFirstPartyTypeKind2[\"ZodPipeline\"] = \"ZodPipeline\";\n ZodFirstPartyTypeKind2[\"ZodReadonly\"] = \"ZodReadonly\";\n })(ZodFirstPartyTypeKind || (ZodFirstPartyTypeKind = {}));\n const instanceOfType = (cls, params = {\n message: `Input not instance of ${cls.name}`\n }) => custom((data) => data instanceof cls, params);\n const stringType = ZodString.create;\n const numberType = ZodNumber.create;\n const nanType = ZodNaN.create;\n const bigIntType = ZodBigInt.create;\n const booleanType = ZodBoolean.create;\n const dateType = ZodDate.create;\n const symbolType = ZodSymbol.create;\n const undefinedType = ZodUndefined.create;\n const nullType = ZodNull.create;\n const anyType = ZodAny.create;\n const unknownType = ZodUnknown.create;\n const neverType = ZodNever.create;\n const voidType = ZodVoid.create;\n const arrayType = ZodArray.create;\n const objectType = ZodObject.create;\n const strictObjectType = ZodObject.strictCreate;\n const unionType = ZodUnion.create;\n const discriminatedUnionType = ZodDiscriminatedUnion.create;\n const intersectionType = ZodIntersection.create;\n const tupleType = ZodTuple.create;\n const recordType = ZodRecord.create;\n const mapType = ZodMap.create;\n const setType = ZodSet.create;\n const functionType = ZodFunction.create;\n const lazyType = ZodLazy.create;\n const literalType = ZodLiteral.create;\n const enumType = ZodEnum.create;\n const nativeEnumType = ZodNativeEnum.create;\n const promiseType = ZodPromise.create;\n const effectsType = ZodEffects.create;\n const optionalType = ZodOptional.create;\n const nullableType = ZodNullable.create;\n const preprocessType = ZodEffects.createWithPreprocess;\n const pipelineType = ZodPipeline.create;\n const ostring = () => stringType().optional();\n const onumber = () => numberType().optional();\n const oboolean = () => booleanType().optional();\n const coerce = {\n string: (arg) => ZodString.create({ ...arg, coerce: true }),\n number: (arg) => ZodNumber.create({ ...arg, coerce: true }),\n boolean: (arg) => ZodBoolean.create({\n ...arg,\n coerce: true\n }),\n bigint: (arg) => ZodBigInt.create({ ...arg, coerce: true }),\n date: (arg) => ZodDate.create({ ...arg, coerce: true })\n };\n const NEVER = INVALID;\n var z = /* @__PURE__ */ Object.freeze({\n __proto__: null,\n defaultErrorMap: errorMap,\n setErrorMap,\n getErrorMap,\n makeIssue,\n EMPTY_PATH,\n addIssueToContext,\n ParseStatus,\n INVALID,\n DIRTY,\n OK,\n isAborted,\n isDirty,\n isValid,\n isAsync,\n get util() {\n return util;\n },\n get objectUtil() {\n return objectUtil;\n },\n ZodParsedType,\n getParsedType,\n ZodType,\n datetimeRegex,\n ZodString,\n ZodNumber,\n ZodBigInt,\n ZodBoolean,\n ZodDate,\n ZodSymbol,\n ZodUndefined,\n ZodNull,\n ZodAny,\n ZodUnknown,\n ZodNever,\n ZodVoid,\n ZodArray,\n ZodObject,\n ZodUnion,\n ZodDiscriminatedUnion,\n ZodIntersection,\n ZodTuple,\n ZodRecord,\n ZodMap,\n ZodSet,\n ZodFunction,\n ZodLazy,\n ZodLiteral,\n ZodEnum,\n ZodNativeEnum,\n ZodPromise,\n ZodEffects,\n ZodTransformer: ZodEffects,\n ZodOptional,\n ZodNullable,\n ZodDefault,\n ZodCatch,\n ZodNaN,\n BRAND,\n ZodBranded,\n ZodPipeline,\n ZodReadonly,\n custom,\n Schema: ZodType,\n ZodSchema: ZodType,\n late,\n get ZodFirstPartyTypeKind() {\n return ZodFirstPartyTypeKind;\n },\n coerce,\n any: anyType,\n array: arrayType,\n bigint: bigIntType,\n boolean: booleanType,\n date: dateType,\n discriminatedUnion: discriminatedUnionType,\n effect: effectsType,\n \"enum\": enumType,\n \"function\": functionType,\n \"instanceof\": instanceOfType,\n intersection: intersectionType,\n lazy: lazyType,\n literal: literalType,\n map: mapType,\n nan: nanType,\n nativeEnum: nativeEnumType,\n never: neverType,\n \"null\": nullType,\n nullable: nullableType,\n number: numberType,\n object: objectType,\n oboolean,\n onumber,\n optional: optionalType,\n ostring,\n pipeline: pipelineType,\n preprocess: preprocessType,\n promise: promiseType,\n record: recordType,\n set: setType,\n strictObject: strictObjectType,\n string: stringType,\n symbol: symbolType,\n transformer: effectsType,\n tuple: tupleType,\n \"undefined\": undefinedType,\n union: unionType,\n unknown: unknownType,\n \"void\": voidType,\n NEVER,\n ZodIssueCode,\n quotelessJson,\n ZodError\n });\n z.object({\n type: z.literal(\"plain\"),\n content: z.string()\n });\n z.object({\n /** Included left border. */\n from: z.number().min(0),\n /** Excluded right border. */\n to: z.number().min(1)\n });\n function assertNever(x) {\n throw new Error(\"Unexpected object: \" + x);\n }\n function isDataInfo(value) {\n if (!value || typeof value !== \"object\") {\n return false;\n }\n const data = value;\n if (!(\"type\" in data)) {\n return false;\n }\n switch (data.type) {\n case \"Json\":\n return typeof data.keyLength === \"number\" && data.data !== void 0 && typeof data.data === \"object\";\n case \"JsonPartitioned\":\n case \"BinaryPartitioned\":\n case \"ParquetPartitioned\":\n return typeof data.partitionKeyLength === \"number\" && data.parts !== void 0 && typeof data.parts === \"object\";\n default:\n return false;\n }\n }\n function mapDataInfo(dataInfo, mapFn) {\n if (dataInfo === void 0) {\n return void 0;\n }\n switch (dataInfo.type) {\n case \"Json\":\n return dataInfo;\n case \"JsonPartitioned\": {\n const newParts = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts\n };\n }\n case \"BinaryPartitioned\": {\n const newParts = {};\n for (const [key, chunk] of Object.entries(dataInfo.parts)) {\n newParts[key] = {\n index: mapFn(chunk.index),\n values: mapFn(chunk.values)\n };\n }\n return {\n ...dataInfo,\n parts: newParts\n };\n }\n case \"ParquetPartitioned\": {\n const newParts = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts\n };\n }\n }\n }\n function visitDataInfo(dataInfo, cb) {\n switch (dataInfo.type) {\n case \"Json\":\n break;\n case \"JsonPartitioned\": {\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n case \"BinaryPartitioned\": {\n Object.values(dataInfo.parts).forEach((chunk) => {\n cb(chunk.index);\n cb(chunk.values);\n });\n break;\n }\n case \"ParquetPartitioned\": {\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n }\n }\n function isDataInfoEntries(value) {\n if (!value || typeof value !== \"object\") {\n return false;\n }\n const data = value;\n if (!(\"type\" in data)) {\n return false;\n }\n switch (data.type) {\n case \"Json\":\n return typeof data.keyLength === \"number\" && Array.isArray(data.data);\n case \"JsonPartitioned\":\n case \"BinaryPartitioned\":\n case \"ParquetPartitioned\":\n return typeof data.partitionKeyLength === \"number\" && Array.isArray(data.parts);\n default:\n return false;\n }\n }\n function isPartitionedDataInfoEntries(value) {\n if (!isDataInfoEntries(value))\n return false;\n switch (value.type) {\n case \"JsonPartitioned\":\n case \"BinaryPartitioned\":\n case \"ParquetPartitioned\":\n return true;\n default:\n return false;\n }\n }\n function dataInfoToEntries(dataInfo) {\n switch (dataInfo.type) {\n case \"Json\":\n return {\n type: \"Json\",\n keyLength: dataInfo.keyLength,\n data: Object.entries(dataInfo.data).map(([keyStr, value]) => {\n const key = JSON.parse(keyStr);\n return { key, value };\n })\n };\n case \"JsonPartitioned\":\n return {\n type: \"JsonPartitioned\",\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr);\n return { key, value: blob };\n })\n };\n case \"BinaryPartitioned\":\n return {\n type: \"BinaryPartitioned\",\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, chunk]) => {\n const key = JSON.parse(keyStr);\n return { key, value: chunk };\n })\n };\n case \"ParquetPartitioned\":\n return {\n type: \"ParquetPartitioned\",\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr);\n return { key, value: blob };\n })\n };\n default:\n assertNever(dataInfo);\n }\n }\n function entriesToDataInfo(dataInfoEntries) {\n switch (dataInfoEntries.type) {\n case \"Json\":\n return {\n type: \"Json\",\n keyLength: dataInfoEntries.keyLength,\n data: Object.fromEntries(dataInfoEntries.data.map(({ key, value }) => [JSON.stringify(key), value]))\n };\n case \"JsonPartitioned\":\n return {\n type: \"JsonPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]))\n };\n case \"BinaryPartitioned\":\n return {\n type: \"BinaryPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]))\n };\n case \"ParquetPartitioned\":\n return {\n type: \"ParquetPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]))\n };\n default:\n assertNever(dataInfoEntries);\n }\n }\n function stringifyValue(value) {\n if (typeof value === \"string\") {\n return `String value was thrown: ${value}`;\n }\n if (value && typeof value === \"object\") {\n try {\n return `Plain object was thrown: ${JSON.stringify(value)}`;\n } catch (jsonError) {\n const errorMessage = jsonError instanceof Error ? jsonError.message : String(jsonError);\n return `Non-serializable object was thrown (JSON.stringify failed: ${errorMessage}): ${String(value)}`;\n }\n }\n return String(`Non-Error value (${typeof value}) was thrown: ${value}`);\n }\n function ensureError(value) {\n if (value instanceof Error) {\n return value;\n }\n return new Error(stringifyValue(value));\n }\n function getDefaultExportFromCjs(x) {\n return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, \"default\") ? x[\"default\"] : x;\n }\n var canonicalize$1;\n var hasRequiredCanonicalize;\n function requireCanonicalize() {\n if (hasRequiredCanonicalize) return canonicalize$1;\n hasRequiredCanonicalize = 1;\n canonicalize$1 = function serialize(object) {\n if (typeof object === \"number\" && isNaN(object)) {\n throw new Error(\"NaN is not allowed\");\n }\n if (typeof object === \"number\" && !isFinite(object)) {\n throw new Error(\"Infinity is not allowed\");\n }\n if (object === null || typeof object !== \"object\") {\n return JSON.stringify(object);\n }\n if (object.toJSON instanceof Function) {\n return serialize(object.toJSON());\n }\n if (Array.isArray(object)) {\n const values2 = object.reduce((t, cv, ci) => {\n const comma = ci === 0 ? \"\" : \",\";\n const value = cv === void 0 || typeof cv === \"symbol\" ? null : cv;\n return `${t}${comma}${serialize(value)}`;\n }, \"\");\n return `[${values2}]`;\n }\n const values = Object.keys(object).sort().reduce((t, cv) => {\n if (object[cv] === void 0 || typeof object[cv] === \"symbol\") {\n return t;\n }\n const comma = t.length === 0 ? \"\" : \",\";\n return `${t}${comma}${serialize(cv)}:${serialize(object[cv])}`;\n }, \"\");\n return `{${values}}`;\n };\n return canonicalize$1;\n }\n var canonicalizeExports = requireCanonicalize();\n const canonicalize = /* @__PURE__ */ getDefaultExportFromCjs(canonicalizeExports);\n function stringifyJson(value) {\n return JSON.stringify(value);\n }\n function canonicalizeJson(value) {\n return canonicalize(value);\n }\n function parseJson(value) {\n return JSON.parse(value);\n }\n function readMetadata(metadata, key) {\n return metadata == null ? void 0 : metadata[key];\n }\n function readMetadataJsonOrThrow(metadata, metadataJson, key, methodNameInError = \"readMetadataJsonOrThrow\") {\n const json = readMetadata(metadata, key);\n if (json === void 0)\n return void 0;\n const schema = metadataJson[key];\n try {\n const value = JSON.parse(json);\n return schema.parse(value);\n } catch (error) {\n throw new Error(`${methodNameInError} failed, key: ${String(key)}, value: ${json}, error: ${ensureError(error)}`);\n }\n }\n function readMetadataJson(metadata, metadataJson, key) {\n try {\n return readMetadataJsonOrThrow(metadata, metadataJson, key);\n } catch {\n return void 0;\n }\n }\n const Annotation = {\n DiscreteValues: \"pl7.app/discreteValues\",\n Graph: {\n Axis: {\n HighCardinality: \"pl7.app/graph/axis/highCardinality\",\n LowerLimit: \"pl7.app/graph/axis/lowerLimit\",\n SymmetricRange: \"pl7.app/graph/axis/symmetricRange\",\n UpperLimit: \"pl7.app/graph/axis/upperLimit\"\n },\n IsDenseAxis: \"pl7.app/graph/isDenseAxis\",\n IsVirtual: \"pl7.app/graph/isVirtual\",\n Palette: \"pl7.app/graph/palette\",\n Thresholds: \"pl7.app/graph/thresholds\",\n TreatAbsentValuesAs: \"pl7.app/graph/treatAbsentValuesAs\"\n },\n HideDataFromUi: \"pl7.app/hideDataFromUi\",\n HideDataFromGraphs: \"pl7.app/hideDataFromGraphs\",\n IsDiscreteFilter: \"pl7.app/isDiscreteFilter\",\n IsLinkerColumn: \"pl7.app/isLinkerColumn\",\n IsSubset: \"pl7.app/isSubset\",\n Label: \"pl7.app/label\",\n Max: \"pl7.app/max\",\n Min: \"pl7.app/min\",\n MultipliesBy: \"pl7.app/multipliesBy\",\n Parents: \"pl7.app/parents\",\n Sequence: {\n Annotation: {\n Mapping: \"pl7.app/sequence/annotation/mapping\"\n },\n IsAnnotation: \"pl7.app/sequence/isAnnotation\"\n },\n Table: {\n OrderPriority: \"pl7.app/table/orderPriority\"\n },\n Trace: \"pl7.app/trace\"\n };\n const ValueTypeSchema = z.enum([\"Int\", \"Long\", \"Float\", \"Double\", \"String\"]);\n const AnnotationJson = {\n [Annotation.DiscreteValues]: z.array(z.string()).or(z.array(z.number())),\n [Annotation.Graph.Axis.HighCardinality]: z.boolean(),\n [Annotation.Graph.Axis.LowerLimit]: z.number(),\n [Annotation.Graph.Axis.UpperLimit]: z.number(),\n [Annotation.Graph.Axis.SymmetricRange]: z.boolean(),\n [Annotation.Graph.IsDenseAxis]: z.boolean(),\n [Annotation.Graph.Palette]: z.object({ mapping: z.record(z.number()), name: z.string() }),\n [Annotation.Graph.Thresholds]: z.array(z.object({\n columnId: z.object({ valueType: ValueTypeSchema, name: z.string() }),\n value: z.number()\n })),\n [Annotation.Graph.TreatAbsentValuesAs]: z.number(),\n [Annotation.Graph.IsVirtual]: z.boolean(),\n [Annotation.HideDataFromUi]: z.boolean(),\n [Annotation.HideDataFromGraphs]: z.boolean(),\n [Annotation.IsDiscreteFilter]: z.boolean(),\n [Annotation.IsLinkerColumn]: z.boolean(),\n [Annotation.IsSubset]: z.boolean(),\n [Annotation.Max]: z.number(),\n [Annotation.Min]: z.number(),\n [Annotation.MultipliesBy]: z.array(z.string()),\n [Annotation.Parents]: z.array(z.string()),\n [Annotation.Sequence.Annotation.Mapping]: z.record(z.string(), z.string()),\n [Annotation.Sequence.IsAnnotation]: z.boolean(),\n [Annotation.Table.OrderPriority]: z.number(),\n [Annotation.Trace]: z.record(z.string(), z.unknown())\n };\n function readAnnotation(spec, key) {\n return readMetadata(spec == null ? void 0 : spec.annotations, key);\n }\n function readAnnotationJson(spec, key) {\n return readMetadataJson(spec == null ? void 0 : spec.annotations, AnnotationJson, key);\n }\n function makeAxisTree(axis) {\n return { axis, children: [] };\n }\n function getAxesTree(rootAxis) {\n const root = makeAxisTree(rootAxis);\n let nodesQ = [root];\n while (nodesQ.length) {\n const nextNodes = [];\n for (const node of nodesQ) {\n node.children = node.axis.parentAxesSpec.map(makeAxisTree);\n nextNodes.push(...node.children);\n }\n nodesQ = nextNodes;\n }\n return root;\n }\n function getArrayFromAxisTree(tree) {\n const res = [tree.axis];\n let nodesQ = [tree];\n while (nodesQ.length) {\n const nextNodes = [];\n for (const node of nodesQ) {\n for (const parent of node.children) {\n res.push(parent.axis);\n nextNodes.push(parent);\n }\n }\n nodesQ = nextNodes;\n }\n return res;\n }\n function canonicalizeAxisWithParents(axis) {\n return canonicalizeJson(getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n }\n function normalizingAxesComparator(axis1, axis2) {\n if (axis1.name !== axis2.name) {\n return axis1.name < axis2.name ? 1 : -1;\n }\n if (axis1.type !== axis2.type) {\n return axis1.type < axis2.type ? 1 : -1;\n }\n const domain1 = canonicalizeJson(axis1.domain ?? {});\n const domain2 = canonicalizeJson(axis2.domain ?? {});\n if (domain1 !== domain2) {\n return domain1 < domain2 ? 1 : -1;\n }\n const parents1 = canonicalizeAxisWithParents(axis1);\n const parents2 = canonicalizeAxisWithParents(axis2);\n if (parents1 !== parents2) {\n return parents1 < parents2 ? 1 : -1;\n }\n const annotation1 = canonicalizeJson(axis1.annotations ?? {});\n const annotation2 = canonicalizeJson(axis2.annotations ?? {});\n if (annotation1 !== annotation2) {\n return annotation1 < annotation2 ? 1 : -1;\n }\n return 0;\n }\n function parseParentsFromAnnotations(axis) {\n const parentsList = readAnnotationJson(axis, Annotation.Parents);\n if (parentsList === void 0) {\n return [];\n }\n return parentsList;\n }\n function sortParentsDeep(axisSpec) {\n axisSpec.parentAxesSpec.forEach(sortParentsDeep);\n axisSpec.parentAxesSpec.sort(normalizingAxesComparator);\n }\n function hasCycleOfParents(axisSpec) {\n const root = makeAxisTree(axisSpec);\n let nodesQ = [root];\n const ancestors = new Set(canonicalizeJson(getAxisId(axisSpec)));\n while (nodesQ.length) {\n const nextNodes = [];\n const levelIds = /* @__PURE__ */ new Set();\n for (const node of nodesQ) {\n node.children = node.axis.parentAxesSpec.map(makeAxisTree);\n for (const child of node.children) {\n const childId = canonicalizeJson(getAxisId(child.axis));\n if (!levelIds.has(childId)) {\n nextNodes.push(child);\n levelIds.add(childId);\n if (ancestors.has(childId)) {\n return true;\n }\n ancestors.add(childId);\n }\n }\n }\n nodesQ = nextNodes;\n }\n return false;\n }\n function getNormalizedAxesList(axes) {\n if (!axes.length) {\n return [];\n }\n const modifiedAxes = axes.map((axis) => {\n const { parentAxes: _, ...copiedRest } = axis;\n return { ...copiedRest, annotations: { ...copiedRest.annotations }, parentAxesSpec: [] };\n });\n axes.forEach((axis, idx) => {\n var _a;\n const modifiedAxis = modifiedAxes[idx];\n if (axis.parentAxes) {\n modifiedAxis.parentAxesSpec = axis.parentAxes.map((idx2) => modifiedAxes[idx2]);\n } else {\n const parents = parseParentsFromAnnotations(axis).map((name) => modifiedAxes.find((axis2) => axis2.name === name));\n modifiedAxis.parentAxesSpec = parents.some((p) => p === void 0) ? [] : parents;\n (_a = modifiedAxis.annotations) == null ? true : delete _a[Annotation.Parents];\n }\n });\n if (modifiedAxes.some(hasCycleOfParents)) {\n modifiedAxes.forEach((axis) => {\n axis.parentAxesSpec = [];\n });\n } else {\n modifiedAxes.forEach((axis) => {\n sortParentsDeep(axis);\n });\n }\n return modifiedAxes;\n }\n const PColumnName = {\n Label: \"pl7.app/label\"\n };\n function getColumnIdAndSpec(column) {\n return {\n columnId: column.id,\n spec: column.spec\n };\n }\n function getAxisId(spec) {\n const { type, name, domain } = spec;\n const result = { type, name };\n if (domain && Object.entries(domain).length > 0) {\n Object.assign(result, { domain });\n }\n return result;\n }\n function getAxesId(spec) {\n return spec.map(getAxisId);\n }\n function canonicalizeAxisId(id) {\n return canonicalizeJson(getAxisId(id));\n }\n function matchDomain$1(query, target) {\n if (query === void 0)\n return target === void 0;\n if (target === void 0)\n return true;\n for (const k in target) {\n if (query[k] !== target[k])\n return false;\n }\n return true;\n }\n function matchAxisId(query, target) {\n return query.name === target.name && matchDomain$1(query.domain, target.domain);\n }\n function mapPTableDef(def, cb) {\n return { ...def, src: mapJoinEntry(def.src, cb) };\n }\n function mapJoinEntry(entry, cb) {\n switch (entry.type) {\n case \"column\":\n return {\n type: \"column\",\n column: cb(entry.column)\n };\n case \"slicedColumn\":\n return {\n type: \"slicedColumn\",\n column: cb(entry.column),\n newId: entry.newId,\n axisFilters: entry.axisFilters\n };\n case \"artificialColumn\":\n return {\n type: \"artificialColumn\",\n column: cb(entry.column),\n newId: entry.newId,\n axesIndices: entry.axesIndices\n };\n case \"inlineColumn\":\n return entry;\n case \"inner\":\n case \"full\":\n return {\n type: entry.type,\n entries: entry.entries.map((col) => mapJoinEntry(col, cb))\n };\n case \"outer\":\n return {\n type: \"outer\",\n primary: mapJoinEntry(entry.primary, cb),\n secondary: entry.secondary.map((col) => mapJoinEntry(col, cb))\n };\n default:\n assertNever(entry);\n }\n }\n function stringifyColumnId(id) {\n return canonicalize(id);\n }\n function axisKey(axis) {\n return canonicalize(getAxisId(axis));\n }\n function domainKey(key, value) {\n return JSON.stringify([key, value]);\n }\n class AnchoredIdDeriver {\n /**\n * Creates a new anchor context from a set of anchor column specifications\n * @param anchors Record of anchor column specifications indexed by anchor ID\n */\n constructor(anchors) {\n __publicField(this, \"anchors\");\n __publicField(this, \"domains\", /* @__PURE__ */ new Map());\n __publicField(this, \"axes\", /* @__PURE__ */ new Map());\n /**\n * Domain packs are used to group domain keys that can be anchored to the same anchor\n * This is used to optimize the lookup of domain anchors\n */\n __publicField(this, \"domainPacks\", []);\n /**\n * Maps domain packs to anchors\n */\n __publicField(this, \"domainPackToAnchor\", /* @__PURE__ */ new Map());\n this.anchors = anchors;\n const anchorEntries = Object.entries(anchors);\n anchorEntries.sort((a, b) => a[0].localeCompare(b[0]));\n for (const [anchorId, spec] of anchorEntries) {\n for (let axisIdx = 0; axisIdx < spec.axesSpec.length; axisIdx++) {\n const axis = spec.axesSpec[axisIdx];\n const key = axisKey(axis);\n this.axes.set(key, { anchor: anchorId, idx: axisIdx });\n }\n if (spec.domain !== void 0) {\n const domainEntries = Object.entries(spec.domain);\n domainEntries.sort((a, b) => a[0].localeCompare(b[0]));\n this.domainPackToAnchor.set(JSON.stringify(domainEntries), anchorId);\n this.domainPacks.push(domainEntries.map(([dKey]) => dKey));\n for (const [dKey, dValue] of domainEntries) {\n const key = domainKey(dKey, dValue);\n this.domains.set(key, anchorId);\n }\n }\n }\n }\n /**\n * Implementation of derive method\n */\n derive(spec, axisFilters) {\n const result = {\n name: spec.name,\n axes: []\n };\n let skipDomains = void 0;\n if (spec.domain !== void 0) {\n outer: for (const domainPack of this.domainPacks) {\n const dAnchor = [];\n for (const domainKey2 of domainPack) {\n const dValue = spec.domain[domainKey2];\n if (dValue !== void 0)\n dAnchor.push([domainKey2, dValue]);\n else\n break outer;\n }\n const domainAnchor = this.domainPackToAnchor.get(JSON.stringify(dAnchor));\n if (domainAnchor !== void 0) {\n result.domainAnchor = domainAnchor;\n skipDomains = new Set(domainPack);\n break;\n }\n }\n }\n for (const [dKey, dValue] of Object.entries(spec.domain ?? {})) {\n if (skipDomains !== void 0 && skipDomains.has(dKey))\n continue;\n const key = domainKey(dKey, dValue);\n const anchorId = this.domains.get(key);\n result.domain ?? (result.domain = {});\n result.domain[dKey] = anchorId ? { anchor: anchorId } : dValue;\n }\n result.axes = spec.axesSpec.map((axis) => {\n const key = axisKey(axis);\n const anchorAxisRef = this.axes.get(key);\n if (anchorAxisRef === void 0)\n return getAxisId(axis);\n else\n return anchorAxisRef;\n });\n if (!axisFilters || axisFilters.length === 0) {\n return result;\n }\n const resolvedFilters = [];\n for (const filter of axisFilters) {\n const [axisIdOrIndex, value] = filter;\n if (typeof axisIdOrIndex === \"number\") {\n if (axisIdOrIndex < 0 || axisIdOrIndex >= spec.axesSpec.length) {\n throw new Error(`Axis index ${axisIdOrIndex} is out of bounds (0-${spec.axesSpec.length - 1})`);\n }\n resolvedFilters.push([axisIdOrIndex, value]);\n } else {\n const axisIndex = spec.axesSpec.findIndex((axis) => axis.name === axisIdOrIndex);\n if (axisIndex === -1) {\n throw new Error(`Axis with name \"${axisIdOrIndex}\" not found in the column specification`);\n }\n resolvedFilters.push([axisIndex, value]);\n }\n }\n resolvedFilters.sort((a, b) => a[0] - b[0]);\n return {\n source: result,\n axisFilters: resolvedFilters\n };\n }\n /**\n * Derives a canonicalized string representation of an anchored column identifier, can be used as a unique identifier for the column\n * @param spec Column specification to anchor\n * @param axisFilters Optional axis filters to apply to the column\n * @returns A canonicalized string representation of the anchored column identifier\n */\n deriveS(spec, axisFilters) {\n return stringifyColumnId(this.derive(spec, axisFilters));\n }\n }\n function resolveAnchors(anchors, matcher, options) {\n const result = { ...matcher };\n const ignoreMissingDomains = (options == null ? void 0 : options.ignoreMissingDomains) ?? false;\n if (result.domainAnchor !== void 0) {\n const anchorSpec = anchors[result.domainAnchor];\n if (!anchorSpec)\n throw new Error(`Anchor \"${result.domainAnchor}\" not found`);\n const anchorDomains = anchorSpec.domain || {};\n result.domain = { ...anchorDomains, ...result.domain };\n delete result.domainAnchor;\n }\n if (result.domain) {\n const resolvedDomain = {};\n for (const [key, value] of Object.entries(result.domain)) {\n if (typeof value === \"string\") {\n resolvedDomain[key] = value;\n } else {\n const anchorSpec = anchors[value.anchor];\n if (!anchorSpec)\n throw new Error(`Anchor \"${value.anchor}\" not found for domain key \"${key}\"`);\n if (!anchorSpec.domain || anchorSpec.domain[key] === void 0) {\n if (!ignoreMissingDomains)\n throw new Error(`Domain key \"${key}\" not found in anchor \"${value.anchor}\"`);\n continue;\n }\n resolvedDomain[key] = anchorSpec.domain[key];\n }\n }\n result.domain = resolvedDomain;\n }\n if (result.axes)\n result.axes = result.axes.map((axis) => resolveAxisReference(anchors, axis));\n return result;\n }\n function resolveAxisReference(anchors, axisRef) {\n if (!isAnchorAxisRef(axisRef))\n return axisRef;\n const anchorId = axisRef.anchor;\n const anchorSpec = anchors[anchorId];\n if (!anchorSpec)\n throw new Error(`Anchor \"${anchorId}\" not found for axis reference`);\n if (\"idx\" in axisRef) {\n if (axisRef.idx < 0 || axisRef.idx >= anchorSpec.axesSpec.length)\n throw new Error(`Axis index ${axisRef.idx} out of bounds for anchor \"${anchorId}\"`);\n return anchorSpec.axesSpec[axisRef.idx];\n } else if (\"name\" in axisRef) {\n const matches = anchorSpec.axesSpec.filter((axis) => axis.name === axisRef.name);\n if (matches.length > 1)\n throw new Error(`Multiple axes with name \"${axisRef.name}\" found in anchor \"${anchorId}\"`);\n if (matches.length === 0)\n throw new Error(`Axis with name \"${axisRef.name}\" not found in anchor \"${anchorId}\"`);\n return matches[0];\n } else if (\"id\" in axisRef) {\n const matches = anchorSpec.axesSpec.filter((axis) => matchAxisId(axisRef.id, getAxisId(axis)));\n if (matches.length > 1)\n throw new Error(`Multiple matching axes found for matcher in anchor \"${anchorId}\"`);\n if (matches.length === 0)\n throw new Error(`No matching axis found for matcher in anchor \"${anchorId}\"`);\n return matches[0];\n }\n throw new Error(`Unsupported axis reference type`);\n }\n function isAnchorAxisRef(value) {\n return typeof value === \"object\" && \"anchor\" in value;\n }\n function isPColumnSpec(spec) {\n return spec.kind === \"PColumn\";\n }\n function isPColumn(obj) {\n return isPColumnSpec(obj.spec);\n }\n function ensurePColumn(obj) {\n if (!isPColumn(obj))\n throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n }\n function mapPObjectData(pObj, cb) {\n return pObj === void 0 ? void 0 : {\n ...pObj,\n data: cb(pObj.data)\n };\n }\n function extractAllColumns(entry) {\n const columns = /* @__PURE__ */ new Map();\n const addAllColumns = (entry2) => {\n switch (entry2.type) {\n case \"column\":\n columns.set(entry2.column.id, entry2.column);\n return;\n case \"slicedColumn\":\n columns.set(entry2.column.id, entry2.column);\n return;\n case \"artificialColumn\":\n columns.set(entry2.column.id, entry2.column);\n return;\n case \"inlineColumn\":\n return;\n case \"full\":\n case \"inner\":\n for (const e of entry2.entries)\n addAllColumns(e);\n return;\n case \"outer\":\n addAllColumns(entry2.primary);\n for (const e of entry2.secondary)\n addAllColumns(e);\n return;\n default:\n assertNever(entry2);\n }\n };\n addAllColumns(entry);\n return [...columns.values()];\n }\n function matchAxis(selector, axis) {\n if (selector.name !== void 0 && selector.name !== axis.name)\n return false;\n if (selector.type !== void 0) {\n if (Array.isArray(selector.type)) {\n if (!selector.type.includes(axis.type))\n return false;\n } else if (selector.type !== axis.type) {\n return false;\n }\n }\n if (selector.domain !== void 0) {\n const axisDomain = axis.domain || {};\n for (const [key, value] of Object.entries(selector.domain))\n if (axisDomain[key] !== value)\n return false;\n }\n return true;\n }\n function matchPColumn(pcolumn, selector) {\n if (selector.name !== void 0 && pcolumn.name !== selector.name)\n return false;\n if (selector.namePattern !== void 0 && !new RegExp(selector.namePattern).test(pcolumn.name))\n return false;\n if (selector.type !== void 0) {\n if (Array.isArray(selector.type)) {\n if (!selector.type.includes(pcolumn.valueType))\n return false;\n } else if (selector.type !== pcolumn.valueType) {\n return false;\n }\n }\n if (selector.domain !== void 0) {\n const columnDomain = pcolumn.domain || {};\n for (const [key, value] of Object.entries(selector.domain))\n if (columnDomain[key] !== value)\n return false;\n }\n if (selector.axes !== void 0) {\n const pcolumnAxes = pcolumn.axesSpec.map(getAxisId);\n if (selector.partialAxesMatch) {\n for (const selectorAxis of selector.axes)\n if (!pcolumnAxes.some((columnAxis) => matchAxis(selectorAxis, columnAxis)))\n return false;\n } else {\n if (pcolumnAxes.length !== selector.axes.length)\n return false;\n for (let i = 0; i < selector.axes.length; i++)\n if (!matchAxis(selector.axes[i], pcolumnAxes[i]))\n return false;\n }\n }\n if (selector.annotations !== void 0) {\n const columnAnnotations = pcolumn.annotations || {};\n for (const [key, value] of Object.entries(selector.annotations))\n if (columnAnnotations[key] !== value)\n return false;\n }\n if (selector.annotationPatterns !== void 0) {\n const columnAnnotations = pcolumn.annotations || {};\n for (const [key, pattern] of Object.entries(selector.annotationPatterns)) {\n const value = columnAnnotations[key];\n if (value === void 0 || !new RegExp(pattern).test(value))\n return false;\n }\n }\n return true;\n }\n function selectorsToPredicate(predicateOrSelectors) {\n if (Array.isArray(predicateOrSelectors))\n return (spec) => predicateOrSelectors.some((selector) => isPColumnSpec(spec) && matchPColumn(spec, selector));\n else\n return (spec) => isPColumnSpec(spec) && matchPColumn(spec, predicateOrSelectors);\n }\n function deriveNativeId(spec) {\n const result = {\n kind: spec.kind,\n name: spec.name\n };\n if (spec.domain !== void 0)\n result.domain = spec.domain;\n if (isPColumnSpec(spec))\n result.axesSpec = getAxesId(spec.axesSpec);\n return canonicalize(result);\n }\n class LinkerMap {\n constructor(linkerMap) {\n /** Graph of linkers connected by axes (single or grouped by parents) */\n __publicField(this, \"data\");\n this.data = linkerMap;\n }\n get keys() {\n return this.data.keys();\n }\n get keyAxesIds() {\n return [...this.data.keys()].map(parseJson);\n }\n static fromColumns(columns) {\n var _a, _b;\n const result = /* @__PURE__ */ new Map();\n for (const linker of columns.filter((l) => !!readAnnotationJson(l.spec, Annotation.IsLinkerColumn))) {\n const groups = LinkerMap.getAxesGroups(getNormalizedAxesList(linker.spec.axesSpec));\n if (groups.length !== 2) {\n continue;\n }\n const [left, right] = groups;\n const leftKeyVariants = LinkerMap.getAxesRoots(left).map((axis) => {\n const axes = getArrayFromAxisTree(getAxesTree(axis));\n const key = canonicalizeJson(axes.map(getAxisId));\n return [key, axes];\n });\n const rightKeyVariants = LinkerMap.getAxesRoots(right).map((axis) => {\n const axes = getArrayFromAxisTree(getAxesTree(axis));\n const key = canonicalizeJson(axes.map(getAxisId));\n return [key, axes];\n });\n for (const [keyLeft, spec] of leftKeyVariants) {\n if (!result.has(keyLeft)) {\n result.set(keyLeft, { keyAxesSpec: spec, linkWith: /* @__PURE__ */ new Map() });\n }\n }\n for (const [keyRight, spec] of rightKeyVariants) {\n if (!result.has(keyRight)) {\n result.set(keyRight, { keyAxesSpec: spec, linkWith: /* @__PURE__ */ new Map() });\n }\n }\n for (const [keyLeft] of leftKeyVariants) {\n for (const [keyRight] of rightKeyVariants) {\n (_a = result.get(keyLeft)) == null ? void 0 : _a.linkWith.set(keyRight, linker);\n (_b = result.get(keyRight)) == null ? void 0 : _b.linkWith.set(keyLeft, linker);\n }\n }\n }\n return new this(result);\n }\n /** Get all available nodes of linker graphs if start from sourceAxesKeys */\n searchAvailableAxesKeys(sourceAxesKeys) {\n const startKeys = new Set(sourceAxesKeys);\n const allAvailableKeys = /* @__PURE__ */ new Set();\n let nextKeys = sourceAxesKeys;\n while (nextKeys.length) {\n const next = [];\n for (const key of nextKeys) {\n const node = this.data.get(key);\n if (!node)\n continue;\n for (const availableKey of node.linkWith.keys()) {\n if (!allAvailableKeys.has(availableKey) && !startKeys.has(availableKey)) {\n next.push(availableKey);\n allAvailableKeys.add(availableKey);\n }\n }\n }\n nextKeys = next;\n }\n return allAvailableKeys;\n }\n /** Get all linker columns that are necessary to reach endKey from startKey */\n searchLinkerPath(startKey, endKey) {\n const previous = {};\n let nextIds = /* @__PURE__ */ new Set([startKey]);\n const visited = /* @__PURE__ */ new Set([startKey]);\n while (nextIds.size) {\n const next = /* @__PURE__ */ new Set();\n for (const nextId of nextIds) {\n const node = this.data.get(nextId);\n if (!node)\n continue;\n for (const availableId of node.linkWith.keys()) {\n previous[availableId] = nextId;\n if (availableId === endKey) {\n const ids = [];\n let current = endKey;\n while (previous[current] !== startKey) {\n ids.push(current);\n current = previous[current];\n }\n ids.push(current);\n return ids.map((id) => this.data.get(id).linkWith.get(previous[id]));\n } else if (!visited.has(availableId)) {\n next.add(availableId);\n visited.add(availableId);\n }\n }\n }\n nextIds = next;\n }\n return [];\n }\n getLinkerColumnsForAxes({ from: sourceAxes, to: targetAxes, throwWhenNoLinkExists = true }) {\n const startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n return Array.from(new Map(LinkerMap.getAxesRoots(targetAxes).map(LinkerMap.getLinkerKeyFromAxisSpec).flatMap((targetKey) => {\n const linkers = startKeys.map((startKey) => this.searchLinkerPath(startKey, targetKey)).reduce((shortestPath, path) => shortestPath.length && shortestPath.length < path.length || !path.length ? shortestPath : path, []).map((linker) => [linker.columnId, linker]);\n if (!linkers.length && throwWhenNoLinkExists) {\n throw Error(`Unable to find linker column for ${targetKey}`);\n }\n return linkers;\n })).values());\n }\n /** Get list of axisSpecs from keys of linker columns map */\n getAxesListFromKeysList(keys) {\n return Array.from(new Map(keys.flatMap((key) => {\n var _a;\n return ((_a = this.data.get(key)) == null ? void 0 : _a.keyAxesSpec) ?? [];\n }).map((axis) => [canonicalizeJson(getAxisId(axis)), axis])).values());\n }\n /** Get axes of target axes that are impossible to be linked to source axes with current linker map */\n getNonLinkableAxes(sourceAxes, targetAxes) {\n const startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n const targetKeys = targetAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n const axes = Array.from(new Map(targetAxes.filter((_targetAxis, idx) => {\n const targetKey = targetKeys[idx];\n return !startKeys.some((startKey) => this.searchLinkerPath(startKey, targetKey).length);\n }).flatMap((axis) => getArrayFromAxisTree(getAxesTree(axis)).map((axis2) => [canonicalizeJson(getAxisId(axis2)), axis2]))).values());\n return axes;\n }\n /** Get all axes that can be connected to sourceAxes by linkers */\n getReachableByLinkersAxesFromAxes(sourceAxes) {\n const startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n const availableKeys = this.searchAvailableAxesKeys(startKeys);\n return this.getAxesListFromKeysList([...availableKeys]);\n }\n static getLinkerKeyFromAxisSpec(axis) {\n return canonicalizeJson(getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n }\n /** Split array of axes into several arrays by parents: axes of one group are parents for each other.\n There are no order inside every group. */\n static getAxesGroups(axesSpec) {\n switch (axesSpec.length) {\n case 0:\n return [];\n case 1:\n return [[axesSpec[0]]];\n }\n const axisKeys = axesSpec.map((spec) => canonicalizeJson(getAxisId(spec)));\n const axisParentsIdxs = axesSpec.map((spec) => new Set(spec.parentAxesSpec.map((spec2) => canonicalizeJson(getAxisId(spec2))).map((el) => {\n const idx = axisKeys.indexOf(el);\n if (idx === -1) {\n throw new Error(`malformed axesSpec: ${JSON.stringify(axesSpec)}, unable to locate parent ${el}`);\n }\n return idx;\n })));\n const allIdxs = [...axesSpec.keys()];\n const groups = [];\n const usedIdxs = /* @__PURE__ */ new Set();\n let nextFreeEl = allIdxs.find((idx) => !usedIdxs.has(idx));\n while (nextFreeEl !== void 0) {\n const currentGroup = [nextFreeEl];\n usedIdxs.add(nextFreeEl);\n let nextElsOfCurrentGroup = [nextFreeEl];\n while (nextElsOfCurrentGroup.length) {\n const next = /* @__PURE__ */ new Set();\n for (const groupIdx of nextElsOfCurrentGroup) {\n const groupElementParents = axisParentsIdxs[groupIdx];\n allIdxs.forEach((idx) => {\n if (idx === groupIdx || usedIdxs.has(idx)) {\n return;\n }\n const parents = axisParentsIdxs[idx];\n if (parents.has(groupIdx) || groupElementParents.has(idx)) {\n currentGroup.push(idx);\n next.add(idx);\n usedIdxs.add(idx);\n }\n });\n }\n nextElsOfCurrentGroup = [...next];\n }\n groups.push([...currentGroup]);\n nextFreeEl = allIdxs.find((idx) => !usedIdxs.has(idx));\n }\n return groups.map((group) => group.map((idx) => axesSpec[idx]));\n }\n /** Get all axes that are not parents of any other axis */\n static getAxesRoots(axes) {\n const parentsSet = new Set(axes.flatMap((axis) => axis.parentAxesSpec).map((spec) => canonicalizeJson(getAxisId(spec))));\n return axes.filter((axis) => !parentsSet.has(canonicalizeJson(getAxisId(axis))));\n }\n }\n const PlIdLength = 24;\n z.string().length(PlIdLength).regex(/[ABCDEFGHIJKLMNOPQRSTUVWXYZ234567]/).brand(\"PlId\");\n z.object({\n __isRef: z.literal(true).describe(\"Crucial marker for the block dependency tree reconstruction\"),\n blockId: z.string().describe(\"Upstream block id\"),\n name: z.string().describe(\"Name of the output provided to the upstream block's output context\"),\n requireEnrichments: z.literal(true).optional().describe(\"True if current block that stores this reference in its args, may need enrichments for the references value originating from the blocks in between current and referenced block\")\n }).describe(\"Universal reference type, allowing to set block connections. It is crucial that {@link __isRef} is present and equal to true, internal logic relies on this marker to build block dependency trees.\").readonly();\n function isPlRef(value) {\n return typeof value === \"object\" && value !== null && \"__isRef\" in value && value.__isRef === true && \"blockId\" in value && \"name\" in value;\n }\n function withEnrichments(ref, requireEnrichments = true) {\n if (requireEnrichments)\n return {\n ...ref,\n requireEnrichments: true\n };\n else {\n const { requireEnrichments: _, ...rest } = ref;\n return rest;\n }\n }\n function mapValueInVOE(voe, cb) {\n return voe.ok ? { ok: true, value: cb(voe.value) } : voe;\n }\n class FutureRef {\n constructor(handle, postProcess = (v) => v) {\n __publicField(this, \"handle\");\n __publicField(this, \"postProcess\");\n __publicField(this, \"isResolved\", false);\n __publicField(this, \"resolvedValue\");\n this.handle = handle;\n this.postProcess = postProcess;\n registerFutureAwait(handle, (value) => {\n this.resolvedValue = postProcess(value);\n this.isResolved = true;\n });\n }\n map(mapping) {\n return new FutureRef(this.handle, (v) => mapping(this.postProcess(v)));\n }\n mapDefined(mapping) {\n return new FutureRef(this.handle, (v) => {\n const vv = this.postProcess(v);\n return vv ? mapping(vv) : void 0;\n });\n }\n toJSON() {\n return this.isResolved ? this.resolvedValue : { __awaited_futures__: [this.handle] };\n }\n }\n function ifDef(value, cb) {\n return value === void 0 ? void 0 : cb(value);\n }\n class TreeNodeAccessor {\n constructor(handle, resolvePath) {\n __publicField(this, \"handle\");\n __publicField(this, \"resolvePath\");\n this.handle = handle;\n this.resolvePath = resolvePath;\n }\n resolve(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: \"Input\",\n ...typeof s === \"string\" ? { field: s } : s\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveOutput(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: \"Output\",\n ...typeof s === \"string\" ? { field: s } : s\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveInput(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: \"Input\",\n ...typeof s === \"string\" ? { field: s } : s\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveAny(...steps) {\n return this.resolveWithCommon({}, ...steps);\n }\n resolveWithCommon(commonOptions, ...steps) {\n const resolvePath = [\n ...this.resolvePath,\n ...steps.map((step) => typeof step === \"string\" ? step : step.field)\n ];\n return ifDef(getCfgRenderCtx().resolveWithCommon(this.handle, commonOptions, ...steps), (accessor) => new TreeNodeAccessor(accessor, resolvePath));\n }\n get resourceType() {\n return getCfgRenderCtx().getResourceType(this.handle);\n }\n getInputsLocked() {\n return getCfgRenderCtx().getInputsLocked(this.handle);\n }\n getOutputsLocked() {\n return getCfgRenderCtx().getOutputsLocked(this.handle);\n }\n getIsReadyOrError() {\n return getCfgRenderCtx().getIsReadyOrError(this.handle);\n }\n getIsFinal() {\n return getCfgRenderCtx().getIsFinal(this.handle);\n }\n getError() {\n const resolvePath = [...this.resolvePath, \"error\"];\n return ifDef(getCfgRenderCtx().getError(this.handle), (accsessor) => new TreeNodeAccessor(accsessor, resolvePath));\n }\n listInputFields() {\n return getCfgRenderCtx().listInputFields(this.handle);\n }\n listOutputFields() {\n return getCfgRenderCtx().listOutputFields(this.handle);\n }\n listDynamicFields() {\n return getCfgRenderCtx().listDynamicFields(this.handle);\n }\n getKeyValueBase64(key) {\n return getCfgRenderCtx().getKeyValueBase64(this.handle, key);\n }\n getKeyValueAsString(key) {\n return getCfgRenderCtx().getKeyValueAsString(this.handle, key);\n }\n getKeyValueAsJson(key) {\n const content = this.getKeyValueAsString(key);\n if (content == void 0)\n throw new Error(\"Resource has no content.\");\n return JSON.parse(content);\n }\n getDataBase64() {\n return getCfgRenderCtx().getDataBase64(this.handle);\n }\n getDataAsString() {\n return getCfgRenderCtx().getDataAsString(this.handle);\n }\n getDataAsJson() {\n const content = this.getDataAsString();\n if (content == void 0)\n throw new Error(\"Resource has no content.\");\n return JSON.parse(content);\n }\n /**\n *\n */\n getPColumns(errorOnUnknownField = false, prefix = \"\") {\n const result = this.parsePObjectCollection(errorOnUnknownField, prefix);\n if (result === void 0)\n return void 0;\n const pf = Object.entries(result).map(([, obj]) => {\n if (!isPColumn(obj))\n throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n });\n return pf;\n }\n /**\n *\n */\n parsePObjectCollection(errorOnUnknownField = false, prefix = \"\") {\n const pObjects = getCfgRenderCtx().parsePObjectCollection(this.handle, errorOnUnknownField, prefix, ...this.resolvePath);\n if (pObjects === void 0)\n return void 0;\n const result = {};\n for (const [key, value] of Object.entries(pObjects)) {\n const resolvePath = [...this.resolvePath, key];\n result[key] = mapPObjectData(value, (c) => new TreeNodeAccessor(c, resolvePath));\n }\n return result;\n }\n getFileContentAsBase64(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsBase64(this.handle, range));\n }\n getFileContentAsString(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsString(this.handle, range));\n }\n getFileContentAsJson(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsString(this.handle, range)).mapDefined((v) => JSON.parse(v));\n }\n /**\n * @deprecated use getFileContentAsBase64\n */\n getBlobContentAsBase64() {\n return this.getFileContentAsBase64();\n }\n /**\n * @deprecated use getFileContentAsString\n */\n getBlobContentAsString() {\n return this.getFileContentAsString();\n }\n /**\n * @returns downloaded file handle\n */\n getFileHandle() {\n return new FutureRef(getCfgRenderCtx().getDownloadedBlobContentHandle(this.handle));\n }\n /**\n * @deprecated use getFileHandle\n */\n getDownloadedBlobHandle() {\n return this.getFileHandle();\n }\n /**\n * @returns downloaded file handle\n */\n getRemoteFileHandle() {\n return new FutureRef(getCfgRenderCtx().getOnDemandBlobContentHandle(this.handle));\n }\n /**\n * @deprecated use getRemoteFileHandle\n */\n getOnDemandBlobHandle() {\n return this.getRemoteFileHandle();\n }\n /**\n * @returns the url to the extracted folder\n */\n extractArchiveAndGetURL(format) {\n return new FutureRef(getCfgRenderCtx().extractArchiveAndGetURL(this.handle, format));\n }\n getImportProgress() {\n return new FutureRef(getCfgRenderCtx().getImportProgress(this.handle));\n }\n getLastLogs(nLines) {\n return new FutureRef(getCfgRenderCtx().getLastLogs(this.handle, nLines));\n }\n getProgressLog(patternToSearch) {\n return new FutureRef(getCfgRenderCtx().getProgressLog(this.handle, patternToSearch));\n }\n getProgressLogWithInfo(patternToSearch) {\n return new FutureRef(getCfgRenderCtx().getProgressLogWithInfo(this.handle, patternToSearch));\n }\n getLogHandle() {\n return new FutureRef(getCfgRenderCtx().getLogHandle(this.handle));\n }\n allFieldsResolved(fieldType = \"Input\") {\n switch (fieldType) {\n case \"Input\":\n return this.getInputsLocked() && this.listInputFields().every((field) => this.resolve({ field, assertFieldType: \"Input\" }) !== void 0);\n case \"Output\":\n return this.getOutputsLocked() && this.listOutputFields().every((field) => this.resolve({ field, assertFieldType: \"Output\" }) !== void 0);\n }\n }\n mapFields(_mapping, _ops) {\n const { fieldType, requireLocked, skipUnresolved } = {\n fieldType: \"Input\",\n requireLocked: true,\n skipUnresolved: false,\n ..._ops\n };\n const mapping = _mapping;\n if (requireLocked) {\n if (fieldType === \"Input\" && !this.getInputsLocked())\n return void 0;\n if (fieldType === \"Output\" && !this.getOutputsLocked())\n return void 0;\n }\n const fieldList = fieldType === \"Input\" ? this.listInputFields() : fieldType === \"Output\" ? this.listOutputFields() : this.listDynamicFields();\n let fieldEntries = fieldList.map((field) => [field, this.resolve({ field, assertFieldType: fieldType })]);\n if (skipUnresolved)\n fieldEntries = fieldEntries.filter((e) => e[1] !== void 0);\n return fieldEntries.map(([name, value]) => mapping(name, value));\n }\n }\n const StagingAccessorName = \"staging\";\n const MainAccessorName = \"main\";\n function filterDataInfoEntries(dataInfoEntries, axisFilters) {\n const sortedFilters = [...axisFilters].sort((a, b) => b[0] - a[0]);\n const { type } = dataInfoEntries;\n switch (type) {\n case \"Json\": {\n const { keyLength } = dataInfoEntries;\n for (const [axisIdx] of axisFilters)\n if (axisIdx >= keyLength)\n throw new Error(`Can't filter on non-data axis ${axisIdx}. Must be >= ${keyLength}`);\n break;\n }\n case \"JsonPartitioned\":\n case \"BinaryPartitioned\":\n case \"ParquetPartitioned\": {\n const { partitionKeyLength } = dataInfoEntries;\n for (const [axisIdx] of axisFilters)\n if (axisIdx >= partitionKeyLength)\n throw new Error(`Can't filter on non-partitioned axis ${axisIdx}. Must be >= ${partitionKeyLength}`);\n break;\n }\n default:\n throw new Error(`Unsupported data info type: ${type}`);\n }\n const keyMatchesFilters = (key) => {\n for (const [axisIdx, axisValue] of sortedFilters)\n if (key[axisIdx] !== axisValue)\n return false;\n return true;\n };\n const removeFilteredAxes = (key) => {\n const newKey = [...key];\n for (const [axisIdx] of sortedFilters)\n newKey.splice(axisIdx, 1);\n return newKey;\n };\n switch (dataInfoEntries.type) {\n case \"Json\":\n return {\n type: \"Json\",\n keyLength: dataInfoEntries.keyLength - axisFilters.length,\n data: dataInfoEntries.data.filter((entry) => keyMatchesFilters(entry.key)).map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value\n }))\n };\n case \"JsonPartitioned\":\n return {\n type: \"JsonPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts.filter((entry) => keyMatchesFilters(entry.key)).map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value\n }))\n };\n case \"BinaryPartitioned\":\n return {\n type: \"BinaryPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts.filter((entry) => keyMatchesFilters(entry.key)).map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value\n }))\n };\n case \"ParquetPartitioned\":\n return {\n type: \"ParquetPartitioned\",\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts.filter((entry) => keyMatchesFilters(entry.key)).map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value\n }))\n };\n }\n }\n const TraceEntry = z.object({\n type: z.string(),\n importance: z.number().optional(),\n id: z.string().optional(),\n label: z.string()\n });\n const Trace = z.array(TraceEntry);\n const DistancePenalty = 1e-3;\n const LabelType = \"__LABEL__\";\n const LabelTypeFull = \"__LABEL__@1\";\n function deriveLabels(values, specExtractor, ops = {}) {\n const importances = /* @__PURE__ */ new Map();\n const forceTraceElements = ops.forceTraceElements !== void 0 && ops.forceTraceElements.length > 0 ? new Set(ops.forceTraceElements) : void 0;\n const numberOfRecordsWithType = /* @__PURE__ */ new Map();\n const enrichedRecords = values.map((value) => {\n const extractorResult = specExtractor(value);\n let spec;\n let prefixTrace;\n let suffixTrace;\n if (\"spec\" in extractorResult && typeof extractorResult.spec === \"object\") {\n spec = extractorResult.spec;\n prefixTrace = extractorResult.prefixTrace;\n suffixTrace = extractorResult.suffixTrace;\n } else {\n spec = extractorResult;\n }\n const label = readAnnotation(spec, Annotation.Label);\n const traceStr = readAnnotation(spec, Annotation.Trace);\n const baseTrace = (traceStr ? Trace.safeParse(parseJson(traceStr)).data : void 0) ?? [];\n const trace = [\n ...prefixTrace ?? [],\n ...baseTrace,\n ...suffixTrace ?? []\n ];\n if (label !== void 0) {\n const labelEntry = { label, type: LabelType, importance: -2 };\n if (ops.addLabelAsSuffix)\n trace.push(labelEntry);\n else\n trace.splice(0, 0, labelEntry);\n }\n const fullTrace = [];\n const occurrences = /* @__PURE__ */ new Map();\n for (let i = trace.length - 1; i >= 0; --i) {\n const { type: typeName } = trace[i];\n const importance = trace[i].importance ?? 0;\n const occurrenceIndex = (occurrences.get(typeName) ?? 0) + 1;\n occurrences.set(typeName, occurrenceIndex);\n const fullType = `${typeName}@${occurrenceIndex}`;\n numberOfRecordsWithType.set(fullType, (numberOfRecordsWithType.get(fullType) ?? 0) + 1);\n importances.set(fullType, Math.max(importances.get(fullType) ?? Number.NEGATIVE_INFINITY, importance - (trace.length - i) * DistancePenalty));\n fullTrace.push({ ...trace[i], fullType, occurrenceIndex });\n }\n fullTrace.reverse();\n return {\n value,\n spec,\n label,\n fullTrace\n };\n });\n const mainTypes = [];\n const secondaryTypes = [];\n const allTypeRecords = [...importances];\n allTypeRecords.sort(([, i1], [, i2]) => i2 - i1);\n for (const [typeName] of allTypeRecords) {\n if (typeName.endsWith(\"@1\") || numberOfRecordsWithType.get(typeName) === values.length)\n mainTypes.push(typeName);\n else\n secondaryTypes.push(typeName);\n }\n const calculate = (includedTypes2, force = false) => {\n const result = [];\n for (let i = 0; i < enrichedRecords.length; i++) {\n const r = enrichedRecords[i];\n const includedTrace = r.fullTrace.filter((fm) => includedTypes2.has(fm.fullType) || forceTraceElements && forceTraceElements.has(fm.type));\n if (includedTrace.length === 0) {\n if (force)\n result.push({\n label: \"Unlabeled\",\n value: r.value\n });\n else\n return void 0;\n }\n const labelSet = includedTrace.map((fm) => fm.label);\n const sep = ops.separator ?? \" / \";\n result.push({\n label: labelSet.join(sep),\n value: r.value\n });\n }\n return result;\n };\n if (mainTypes.length === 0) {\n if (secondaryTypes.length !== 0)\n throw new Error(\"Non-empty secondary types list while main types list is empty.\");\n return calculate(new Set(LabelTypeFull), true);\n }\n let includedTypes = 0;\n let additionalType = -1;\n while (includedTypes < mainTypes.length) {\n const currentSet = /* @__PURE__ */ new Set();\n if (ops.includeNativeLabel)\n currentSet.add(LabelTypeFull);\n for (let i = 0; i < includedTypes; ++i)\n currentSet.add(mainTypes[i]);\n if (additionalType >= 0)\n currentSet.add(mainTypes[additionalType]);\n const candidateResult = calculate(currentSet);\n if (candidateResult !== void 0 && new Set(candidateResult.map((c) => c.label)).size === values.length)\n return candidateResult;\n additionalType++;\n if (additionalType >= mainTypes.length) {\n includedTypes++;\n additionalType = includedTypes;\n }\n }\n return calculate(/* @__PURE__ */ new Set([...mainTypes, ...secondaryTypes]), true);\n }\n const PCD_PREFIX = \"PColumnData/\";\n const RT_RESOURCE_MAP = PCD_PREFIX + \"ResourceMap\";\n const RT_RESOURCE_MAP_PARTITIONED = PCD_PREFIX + \"Partitioned/ResourceMap\";\n const RT_JSON_PARTITIONED = PCD_PREFIX + \"JsonPartitioned\";\n const RT_BINARY_PARTITIONED = PCD_PREFIX + \"BinaryPartitioned\";\n const RT_PARQUET_PARTITIONED = PCD_PREFIX + \"ParquetPartitioned\";\n const PCD_SUP_PREFIX = PCD_PREFIX + \"Partitioned/\";\n const RT_JSON_SUPER_PARTITIONED = PCD_SUP_PREFIX + \"JsonPartitioned\";\n const RT_BINARY_SUPER_PARTITIONED = PCD_SUP_PREFIX + \"BinaryPartitioned\";\n const RT_PARQUET_SUPER_PARTITIONED = PCD_SUP_PREFIX + \"ParquetPartitioned\";\n const removeIndexSuffix = (keyStr) => {\n if (keyStr.endsWith(\".index\")) {\n return { baseKey: keyStr.substring(0, keyStr.length - 6), type: \"index\" };\n } else if (keyStr.endsWith(\".values\")) {\n return { baseKey: keyStr.substring(0, keyStr.length - 7), type: \"values\" };\n } else {\n throw new Error(`key must ends on .index/.values for binary p-column, got: ${keyStr}`);\n }\n };\n function getPartitionKeysList(acc) {\n if (!acc)\n return void 0;\n const rt = acc.resourceType.name;\n const meta = acc.getDataAsJson();\n const data = [];\n let keyLength = 0;\n switch (rt) {\n case RT_RESOURCE_MAP:\n keyLength = meta[\"keyLength\"];\n break;\n case RT_RESOURCE_MAP_PARTITIONED:\n keyLength = meta[\"partitionKeyLength\"] + meta[\"keyLength\"];\n break;\n case RT_JSON_PARTITIONED:\n case RT_BINARY_PARTITIONED:\n case RT_PARQUET_PARTITIONED:\n keyLength = meta[\"partitionKeyLength\"];\n break;\n case RT_BINARY_SUPER_PARTITIONED:\n case RT_JSON_SUPER_PARTITIONED:\n case RT_PARQUET_SUPER_PARTITIONED:\n keyLength = meta[\"superPartitionKeyLength\"] + meta[\"partitionKeyLength\"];\n break;\n }\n switch (rt) {\n case RT_RESOURCE_MAP:\n case RT_JSON_PARTITIONED:\n case RT_BINARY_PARTITIONED:\n case RT_PARQUET_PARTITIONED:\n for (let keyStr of acc.listInputFields()) {\n if (rt === RT_BINARY_PARTITIONED) {\n keyStr = removeIndexSuffix(keyStr).baseKey;\n }\n const key = [...JSON.parse(keyStr)];\n data.push(key);\n }\n break;\n case RT_RESOURCE_MAP_PARTITIONED:\n case RT_BINARY_SUPER_PARTITIONED:\n case RT_JSON_SUPER_PARTITIONED:\n case RT_PARQUET_SUPER_PARTITIONED:\n for (const supKeyStr of acc.listInputFields()) {\n const keyPrefix = [...JSON.parse(supKeyStr)];\n const value = acc.resolve({ field: supKeyStr, assertFieldType: \"Input\" });\n if (value !== void 0) {\n for (let keyStr of value.listInputFields()) {\n if (rt === RT_BINARY_SUPER_PARTITIONED) {\n keyStr = removeIndexSuffix(keyStr).baseKey;\n }\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n data.push(key);\n }\n }\n }\n break;\n }\n return { data, keyLength };\n }\n function getUniquePartitionKeysForDataEntries(list) {\n if (list.type !== \"JsonPartitioned\" && list.type !== \"BinaryPartitioned\" && list.type !== \"ParquetPartitioned\")\n throw new Error(`Splitting requires Partitioned DataInfoEntries, got ${list.type}`);\n const { parts, partitionKeyLength } = list;\n const result = [];\n for (let i = 0; i < partitionKeyLength; ++i) {\n result.push(/* @__PURE__ */ new Set());\n }\n for (const part of parts) {\n const key = part.key;\n if (key.length !== partitionKeyLength) {\n throw new Error(`Key length (${key.length}) does not match partition length (${partitionKeyLength}) for key: ${JSON.stringify(key)}`);\n }\n for (let i = 0; i < partitionKeyLength; ++i) {\n result[i].add(key[i]);\n }\n }\n return result.map((s) => Array.from(s.values()));\n }\n function getUniquePartitionKeys(acc) {\n if (acc === void 0)\n return void 0;\n if (isDataInfoEntries(acc))\n return getUniquePartitionKeysForDataEntries(acc);\n const list = getPartitionKeysList(acc);\n if (!list)\n return void 0;\n const { data, keyLength } = list;\n const result = [];\n for (let i = 0; i < keyLength; ++i) {\n result.push(/* @__PURE__ */ new Set());\n }\n for (const l of data) {\n if (l.length !== keyLength) {\n throw new Error(\"key length does not match partition length\");\n }\n for (let i = 0; i < keyLength; ++i) {\n result[i].add(l[i]);\n }\n }\n return result.map((s) => Array.from(s.values()));\n }\n function parsePColumnData(acc, keyPrefix = []) {\n if (acc === void 0)\n return void 0;\n if (!acc.getIsReadyOrError())\n return void 0;\n const resourceType = acc.resourceType.name;\n const meta = acc.getDataAsJson();\n if (keyPrefix.length > 0 && (resourceType === RT_JSON_SUPER_PARTITIONED || resourceType === RT_BINARY_SUPER_PARTITIONED || resourceType === RT_PARQUET_SUPER_PARTITIONED)) {\n throw new Error(`Unexpected nested super-partitioned resource: ${resourceType}`);\n }\n switch (resourceType) {\n case RT_RESOURCE_MAP:\n case RT_RESOURCE_MAP_PARTITIONED:\n throw new Error(`Only data columns are supported, got: ${resourceType}`);\n case RT_JSON_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n for (const keyStr of acc.listInputFields()) {\n const value = acc.resolve({ field: keyStr, assertFieldType: \"Input\" });\n if (value === void 0)\n return void 0;\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n parts.push({ key, value });\n }\n return {\n type: \"JsonPartitioned\",\n partitionKeyLength: meta.partitionKeyLength,\n parts\n };\n }\n case RT_BINARY_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n const baseKeys = /* @__PURE__ */ new Map();\n for (const keyStr of acc.listInputFields()) {\n const suffix = removeIndexSuffix(keyStr);\n const value = acc.resolve({ field: keyStr, assertFieldType: \"Input\" });\n if (value === void 0)\n return void 0;\n let entry = baseKeys.get(suffix.baseKey);\n if (!entry) {\n entry = {};\n baseKeys.set(suffix.baseKey, entry);\n }\n if (suffix.type === \"index\") {\n entry.index = value;\n } else {\n entry.values = value;\n }\n }\n for (const [baseKeyStr, entry] of baseKeys.entries()) {\n if (!entry.index || !entry.values)\n return void 0;\n const key = [...keyPrefix, ...JSON.parse(baseKeyStr)];\n parts.push({\n key,\n value: {\n index: entry.index,\n values: entry.values\n }\n });\n }\n return {\n type: \"BinaryPartitioned\",\n partitionKeyLength: meta.partitionKeyLength,\n parts\n };\n }\n case RT_PARQUET_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n for (const keyStr of acc.listInputFields()) {\n const value = acc.resolve({ field: keyStr, assertFieldType: \"Input\" });\n if (value === void 0)\n return void 0;\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n parts.push({ key, value });\n }\n return {\n type: \"ParquetPartitioned\",\n partitionKeyLength: meta.partitionKeyLength,\n parts\n };\n }\n case RT_JSON_SUPER_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.superPartitionKeyLength) !== \"number\" || typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: \"Input\" });\n if (superPartition === void 0)\n return void 0;\n if (superPartition.resourceType.name !== RT_JSON_PARTITIONED) {\n throw new Error(`Expected ${RT_JSON_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === void 0)\n return void 0;\n if (innerResult.type !== \"JsonPartitioned\")\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: \"JsonPartitioned\",\n partitionKeyLength: totalKeyLength,\n parts\n };\n }\n case RT_BINARY_SUPER_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.superPartitionKeyLength) !== \"number\" || typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: \"Input\" });\n if (superPartition === void 0)\n return void 0;\n if (superPartition.resourceType.name !== RT_BINARY_PARTITIONED) {\n throw new Error(`Expected ${RT_BINARY_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === void 0)\n return void 0;\n if (innerResult.type !== \"BinaryPartitioned\")\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: \"BinaryPartitioned\",\n partitionKeyLength: totalKeyLength,\n parts\n };\n }\n case RT_PARQUET_SUPER_PARTITIONED: {\n if (typeof (meta == null ? void 0 : meta.superPartitionKeyLength) !== \"number\" || typeof (meta == null ? void 0 : meta.partitionKeyLength) !== \"number\") {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: \"Input\" });\n if (superPartition === void 0)\n return void 0;\n if (superPartition.resourceType.name !== RT_PARQUET_PARTITIONED) {\n throw new Error(`Expected ${RT_PARQUET_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === void 0)\n return void 0;\n if (innerResult.type !== \"ParquetPartitioned\")\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: \"ParquetPartitioned\",\n partitionKeyLength: totalKeyLength,\n parts\n };\n }\n default:\n throw new Error(`Unknown resource type: ${resourceType}`);\n }\n }\n function convertOrParsePColumnData(acc) {\n if (acc === void 0)\n return void 0;\n if (isDataInfoEntries(acc))\n return acc;\n if (isDataInfo(acc))\n return dataInfoToEntries(acc);\n if (acc instanceof TreeNodeAccessor)\n return parsePColumnData(acc);\n throw new Error(`Unexpected input type: ${typeof acc}`);\n }\n function isPColumnValues(value) {\n if (!Array.isArray(value))\n return false;\n if (value.length === 0)\n return true;\n const first = value[0];\n return typeof first === \"object\" && first !== null && \"key\" in first && \"val\" in first;\n }\n class ArrayColumnProvider {\n constructor(columns) {\n __publicField(this, \"columns\");\n this.columns = columns;\n }\n selectColumns(selectors) {\n const predicate = typeof selectors === \"function\" ? selectors : selectorsToPredicate(selectors);\n return this.columns.filter((column) => predicate(column.spec));\n }\n }\n function splitFiltersToTrace(splitFilters) {\n if (!splitFilters)\n return void 0;\n return splitFilters.map((filter) => ({\n type: `split:${canonicalizeAxisId(filter.axisId)}`,\n label: filter.label,\n importance: 1e6\n // High importance for split filters in labels\n }));\n }\n function splitFiltersToAxisFilter(splitFilters) {\n if (!splitFilters)\n return void 0;\n return splitFilters.map((filter) => [filter.axisIdx, filter.value]);\n }\n function fallbackIdDeriver(originalId, axisFilters) {\n if (!axisFilters || axisFilters.length === 0)\n return originalId;\n const filtersToCanonicalize = [...axisFilters].sort((a, b) => a[0] - b[0]);\n return canonicalize({ id: originalId, axisFilters: filtersToCanonicalize });\n }\n function hasAnchors(selector) {\n if (!selector || typeof selector !== \"object\")\n return false;\n const potentialAnchored = selector;\n const domainHasAnchors = potentialAnchored[\"domain\"] && typeof potentialAnchored[\"domain\"] === \"object\" && Object.values(potentialAnchored[\"domain\"]).some((v) => typeof v === \"object\" && v !== null && \"anchor\" in v);\n const axesHaveAnchors = potentialAnchored[\"axes\"] && Array.isArray(potentialAnchored[\"axes\"]) && potentialAnchored[\"axes\"].some((a) => typeof a === \"object\" && a !== null && \"anchor\" in a);\n return !!potentialAnchored[\"domainAnchor\"] || domainHasAnchors || axesHaveAnchors;\n }\n function getSplitAxisIndices(selector) {\n if (typeof selector !== \"object\" || !(\"axes\" in selector) || selector.axes === void 0) {\n return [];\n }\n const splitIndices = selector.axes.map((axis, index) => typeof axis === \"object\" && \"split\" in axis && axis.split === true ? index : -1).filter((index) => index !== -1);\n if (splitIndices.length > 0 && selector.partialAxesMatch !== void 0) {\n throw new Error(\"Axis splitting is not supported when `partialAxesMatch` is defined.\");\n }\n splitIndices.sort((a, b) => a - b);\n return splitIndices;\n }\n class PColumnCollection {\n constructor() {\n __publicField(this, \"defaultProviderStore\", []);\n __publicField(this, \"providers\", [new ArrayColumnProvider(this.defaultProviderStore)]);\n __publicField(this, \"axisLabelProviders\", []);\n }\n addColumnProvider(provider) {\n this.providers.push(provider);\n return this;\n }\n addAxisLabelProvider(provider) {\n this.axisLabelProviders.push(provider);\n return this;\n }\n addColumns(columns) {\n this.defaultProviderStore.push(...columns);\n return this;\n }\n addColumn(column) {\n this.defaultProviderStore.push(column);\n return this;\n }\n /** Fetches labels for a given axis from the registered providers */\n findLabels(axis) {\n for (const provider of this.axisLabelProviders) {\n const labels = provider.findLabels(axis);\n if (labels)\n return labels;\n }\n return void 0;\n }\n getUniversalEntries(predicateOrSelectors, opts) {\n const { anchorCtx, labelOps: rawLabelOps, dontWaitAllData = false, overrideLabelAnnotation = false, exclude } = opts ?? {};\n const labelOps = {\n ...overrideLabelAnnotation && (rawLabelOps == null ? void 0 : rawLabelOps.includeNativeLabel) !== false ? { includeNativeLabel: true } : {},\n ...rawLabelOps ?? {}\n };\n let excludePredicate = () => false;\n if (exclude) {\n const excludePredicartes = (Array.isArray(exclude) ? exclude : [exclude]).map((selector) => {\n if (hasAnchors(selector)) {\n if (!anchorCtx)\n throw new Error(\"Anchored selectors in exclude require an AnchoredIdDeriver to be provided in options.\");\n return selectorsToPredicate(resolveAnchors(anchorCtx.anchors, selector, opts));\n } else\n return selectorsToPredicate(selector);\n });\n excludePredicate = (spec) => excludePredicartes.some((predicate) => predicate(spec));\n }\n const selectorsArray = typeof predicateOrSelectors === \"function\" ? [predicateOrSelectors] : Array.isArray(predicateOrSelectors) ? predicateOrSelectors : [predicateOrSelectors];\n const intermediateResults = [];\n const selectedNativeIds = /* @__PURE__ */ new Set();\n for (const rawSelector of selectorsArray) {\n const usesAnchors = hasAnchors(rawSelector);\n let currentSelector;\n if (usesAnchors) {\n if (!anchorCtx)\n throw new Error(\"Anchored selectors require an AnchoredIdDeriver to be provided in options.\");\n currentSelector = resolveAnchors(anchorCtx.anchors, rawSelector, opts);\n } else\n currentSelector = rawSelector;\n const selectedIds = /* @__PURE__ */ new Set();\n const selectedColumns = [];\n for (const provider of this.providers) {\n const providerColumns = provider.selectColumns(currentSelector);\n for (const col of providerColumns) {\n if (excludePredicate(col.spec))\n continue;\n if (selectedIds.has(col.id))\n throw new Error(`Duplicate column id ${col.id} in provider ${provider.constructor.name}`);\n const nativeId = deriveNativeId(col.spec);\n if (selectedNativeIds.has(nativeId))\n continue;\n selectedIds.add(col.id);\n selectedNativeIds.add(nativeId);\n selectedColumns.push(col);\n }\n }\n if (selectedColumns.length === 0)\n continue;\n const splitAxisIdxs = getSplitAxisIndices(rawSelector);\n const needsSplitting = splitAxisIdxs.length > 0;\n for (const column of selectedColumns) {\n if (!isPColumnSpec(column.spec))\n continue;\n const originalSpec = column.spec;\n if (needsSplitting) {\n if (isPColumnValues(column.data))\n throw new Error(`Splitting is not supported for PColumns with PColumnValues data format. Column id: ${column.id}`);\n const dataEntries = convertOrParsePColumnData(column.data);\n if (!dataEntries) {\n if (dontWaitAllData)\n continue;\n return void 0;\n }\n if (!isPartitionedDataInfoEntries(dataEntries))\n throw new Error(`Splitting requires Partitioned DataInfoEntries, but parsing resulted in ${dataEntries.type} for column ${column.id}`);\n const uniqueKeys = getUniquePartitionKeys(dataEntries);\n const maxSplitIdx = splitAxisIdxs[splitAxisIdxs.length - 1];\n if (maxSplitIdx >= dataEntries.partitionKeyLength)\n throw new Error(`Not enough partition keys (${dataEntries.partitionKeyLength}) for requested split axes (max index ${maxSplitIdx}) in column ${originalSpec.name}`);\n const axesLabels = splitAxisIdxs.map((idx) => this.findLabels(getAxisId(originalSpec.axesSpec[idx])));\n const keyCombinations = [];\n const generateCombinations = (currentCombo, sAxisIdx) => {\n if (sAxisIdx >= splitAxisIdxs.length) {\n keyCombinations.push([...currentCombo]);\n if (keyCombinations.length > 1e4)\n throw new Error(\"Too many key combinations, aborting.\");\n return;\n }\n const axisIdx = splitAxisIdxs[sAxisIdx];\n if (axisIdx >= uniqueKeys.length)\n throw new Error(`Axis index ${axisIdx} out of bounds for unique keys array (length ${uniqueKeys.length}) during split key generation for column ${column.id}`);\n const axisValues = uniqueKeys[axisIdx];\n if (!axisValues || axisValues.length === 0) {\n keyCombinations.length = 0;\n return;\n }\n for (const val of axisValues) {\n currentCombo.push(val);\n generateCombinations(currentCombo, sAxisIdx + 1);\n currentCombo.pop();\n }\n };\n generateCombinations([], 0);\n if (keyCombinations.length === 0)\n continue;\n const newAxesSpec = [...originalSpec.axesSpec];\n const splitAxisOriginalIdxs = splitAxisIdxs.map((idx) => idx);\n for (let i = splitAxisIdxs.length - 1; i >= 0; i--) {\n newAxesSpec.splice(splitAxisIdxs[i], 1);\n }\n const adjustedSpec = { ...originalSpec, axesSpec: newAxesSpec };\n for (const keyCombo of keyCombinations) {\n const splitFilters = keyCombo.map((value, sAxisIdx) => {\n const axisIdx = splitAxisOriginalIdxs[sAxisIdx];\n const axisId = getAxisId(originalSpec.axesSpec[axisIdx]);\n const axisLabelMap = axesLabels[sAxisIdx];\n const label = (axisLabelMap == null ? void 0 : axisLabelMap[value]) ?? String(value);\n return { axisIdx, axisId, value, label };\n });\n intermediateResults.push({\n type: \"split\",\n originalColumn: column,\n spec: originalSpec,\n adjustedSpec,\n dataEntries,\n axisFilters: splitFilters\n });\n }\n } else {\n intermediateResults.push({\n type: \"direct\",\n originalColumn: column,\n spec: originalSpec,\n adjustedSpec: originalSpec\n });\n }\n }\n }\n if (intermediateResults.length === 0)\n return [];\n const labeledResults = deriveLabels(intermediateResults, (entry) => ({\n spec: entry.spec,\n suffixTrace: entry.type === \"split\" ? splitFiltersToTrace(entry.axisFilters) : void 0\n }), labelOps);\n const result = [];\n for (const { value: entry, label } of labeledResults) {\n const { originalColumn, spec: originalSpec } = entry;\n const axisFilters = entry.type === \"split\" ? entry.axisFilters : void 0;\n const axisFiltersTuple = splitFiltersToAxisFilter(axisFilters);\n let finalId;\n if (anchorCtx)\n finalId = anchorCtx.deriveS(originalSpec, axisFiltersTuple);\n else\n finalId = fallbackIdDeriver(originalColumn.id, axisFiltersTuple);\n let finalSpec = { ...entry.adjustedSpec };\n if (overrideLabelAnnotation) {\n finalSpec = {\n ...finalSpec,\n annotations: {\n ...finalSpec.annotations ?? {},\n [Annotation.Label]: label\n }\n };\n }\n result.push({\n id: finalId,\n spec: finalSpec,\n data: () => entry.type === \"split\" ? entriesToDataInfo(filterDataInfoEntries(entry.dataEntries, axisFiltersTuple)) : entry.originalColumn.data,\n label\n });\n }\n return result;\n }\n getColumns(predicateOrSelectors, opts) {\n const entries = this.getUniversalEntries(predicateOrSelectors, {\n overrideLabelAnnotation: true,\n // default for getColumns\n ...opts ?? {}\n });\n if (!entries)\n return void 0;\n const columns = [];\n for (const entry of entries) {\n const data = entry.data();\n if (!data) {\n if (opts == null ? void 0 : opts.dontWaitAllData)\n continue;\n return void 0;\n }\n columns.push({\n id: entry.id,\n spec: entry.spec,\n data\n });\n }\n return columns;\n }\n }\n function patchInSetFilters(filters) {\n const inSetToOrEqual = (predicate) => {\n if (predicate.operator !== \"InSet\")\n return predicate;\n return {\n operator: \"Or\",\n operands: predicate.references.map((reference) => ({\n operator: \"Equal\",\n reference\n }))\n };\n };\n const mapSingleValuePredicate = (filter, cb) => {\n const operator = filter.operator;\n switch (operator) {\n case \"And\":\n return {\n ...filter,\n operands: filter.operands.map((operand) => mapSingleValuePredicate(operand, cb))\n };\n case \"Or\":\n return {\n ...filter,\n operands: filter.operands.map((operand) => mapSingleValuePredicate(operand, cb))\n };\n case \"Not\":\n return {\n ...filter,\n operand: mapSingleValuePredicate(filter.operand, cb)\n };\n default:\n return cb(filter);\n }\n };\n const mapFilter = (filter, cb) => {\n return {\n ...filter,\n predicate: mapSingleValuePredicate(filter.predicate, cb)\n };\n };\n return filters.map((filter) => mapFilter(filter, inSetToOrEqual));\n }\n function matchDomain(query, target) {\n if (query === void 0)\n return target === void 0;\n if (target === void 0)\n return true;\n for (const k in target) {\n if (query[k] !== target[k])\n return false;\n }\n return true;\n }\n function transformPColumnData(data) {\n return mapPObjectData(data, (d) => {\n if (d instanceof TreeNodeAccessor) {\n return d.handle;\n } else if (isDataInfo(d)) {\n return mapDataInfo(d, (accessor) => accessor.handle);\n } else {\n return d;\n }\n });\n }\n class ResultPool {\n constructor() {\n __publicField(this, \"ctx\", getCfgRenderCtx());\n }\n /**\n * @deprecated use getOptions()\n */\n calculateOptions(predicate) {\n return this.ctx.calculateOptions(predicate);\n }\n getOptions(predicateOrSelector, opts) {\n const predicate = typeof predicateOrSelector === \"function\" ? predicateOrSelector : selectorsToPredicate(predicateOrSelector);\n const filtered = this.getSpecs().entries.filter((s) => predicate(s.obj));\n let labelOps = {};\n let refsWithEnrichments = false;\n if (typeof opts !== \"undefined\") {\n if (typeof opts === \"function\") {\n labelOps = opts;\n } else if (typeof opts === \"object\") {\n if (\"includeNativeLabel\" in opts || \"separator\" in opts || \"addLabelAsSuffix\" in opts) {\n labelOps = opts;\n } else {\n opts = opts;\n labelOps = opts.label ?? {};\n refsWithEnrichments = opts.refsWithEnrichments ?? false;\n }\n }\n }\n if (typeof labelOps === \"object\")\n return deriveLabels(filtered, (o) => o.obj, labelOps ?? {}).map(({ value: { ref }, label }) => ({\n ref: withEnrichments(ref, refsWithEnrichments),\n label\n }));\n else\n return filtered.map(({ ref, obj }) => ({\n ref: withEnrichments(ref, refsWithEnrichments),\n label: labelOps(obj, ref)\n }));\n }\n resolveAnchorCtx(anchorsOrCtx) {\n if (anchorsOrCtx instanceof AnchoredIdDeriver)\n return anchorsOrCtx;\n const resolvedAnchors = {};\n for (const [key, value] of Object.entries(anchorsOrCtx)) {\n if (isPlRef(value)) {\n const resolvedSpec = this.getPColumnSpecByRef(value);\n if (!resolvedSpec)\n return void 0;\n resolvedAnchors[key] = resolvedSpec;\n } else {\n resolvedAnchors[key] = value;\n }\n }\n return new AnchoredIdDeriver(resolvedAnchors);\n }\n /**\n * Returns columns that match the provided anchors and selectors. It applies axis filters and label derivation.\n *\n * @param anchorsOrCtx - Anchor context for column selection (same as in getCanonicalOptions)\n * @param predicateOrSelectors - Predicate or selectors for filtering columns (same as in getCanonicalOptions)\n * @param opts - Optional configuration for label generation and data waiting\n * @returns A PFrameHandle for the created PFrame, or undefined if any required data is missing\n */\n getAnchoredPColumns(anchorsOrCtx, predicateOrSelectors, opts) {\n const anchorCtx = this.resolveAnchorCtx(anchorsOrCtx);\n if (!anchorCtx)\n return void 0;\n return new PColumnCollection().addColumnProvider(this).addAxisLabelProvider(this).getColumns(predicateOrSelectors, {\n ...opts,\n anchorCtx\n });\n }\n /**\n * Calculates anchored identifier options for columns matching a given predicate and returns their\n * canonicalized representations.\n *\n * This function filters column specifications from the result pool that match the provided predicate,\n * creates a standardized AnchorCtx from the provided anchors, and generates a list of label-value\n * pairs for UI components (like dropdowns).\n *\n * @param anchorsOrCtx - Either:\n * - An existing AnchorCtx instance\n * - A record mapping anchor IDs to PColumnSpec objects\n * - A record mapping anchor IDs to PlRef objects (which will be resolved to PColumnSpec)\n * @param predicateOrSelectors - Either:\n * - A predicate function that takes a PColumnSpec and returns a boolean.\n * Only specs that return true will be included.\n * - An APColumnSelector object for declarative filtering, which will be\n * resolved against the provided anchors and matched using matchPColumn.\n * - An array of APColumnSelector objects - columns matching ANY selector\n * in the array will be included (OR operation).\n * @param opts - Optional configuration for label generation:\n * - labelOps: Optional configuration for label generation:\n * - includeNativeLabel: Whether to include native column labels\n * - separator: String to use between label parts (defaults to \" / \")\n * - addLabelAsSuffix: Whether to add labels as suffix instead of prefix\n * - dontWaitAllData: Whether to skip columns that don't have all data (if not set, will return undefined,\n * if at least one column that requires splitting is missing data)\n * @returns An array of objects with `label` (display text) and `value` (anchored ID string) properties,\n * or undefined if any PlRef resolution fails.\n */\n getCanonicalOptions(anchorsOrCtx, predicateOrSelectors, opts) {\n const anchorCtx = this.resolveAnchorCtx(anchorsOrCtx);\n if (!anchorCtx)\n return void 0;\n const entries = new PColumnCollection().addColumnProvider(this).addAxisLabelProvider(this).getUniversalEntries(predicateOrSelectors, {\n ...opts,\n anchorCtx\n });\n if (!entries)\n return void 0;\n return entries.map((item) => ({\n value: item.id,\n label: item.label\n }));\n }\n /**\n * @deprecated use getData()\n */\n getDataFromResultPool() {\n return this.getData();\n }\n getData() {\n const result = this.ctx.getDataFromResultPool();\n return {\n isComplete: result.isComplete,\n entries: result.entries.map((e) => ({\n ref: e.ref,\n obj: {\n ...e.obj,\n data: new TreeNodeAccessor(e.obj.data, [e.ref.blockId, e.ref.name])\n }\n }))\n };\n }\n /**\n * @deprecated use getDataWithErrors()\n */\n getDataWithErrorsFromResultPool() {\n return this.getDataWithErrors();\n }\n getDataWithErrors() {\n const result = this.ctx.getDataWithErrorsFromResultPool();\n return {\n isComplete: result.isComplete,\n entries: result.entries.map((e) => ({\n ref: e.ref,\n obj: {\n ...e.obj,\n data: mapValueInVOE(e.obj.data, (handle) => new TreeNodeAccessor(handle, [e.ref.blockId, e.ref.name]))\n }\n }))\n };\n }\n /**\n * @deprecated use getSpecs()\n */\n getSpecsFromResultPool() {\n return this.getSpecs();\n }\n getSpecs() {\n return this.ctx.getSpecsFromResultPool();\n }\n /**\n * @param ref a Ref\n * @returns data associated with the ref\n */\n getDataByRef(ref) {\n var _a;\n if (typeof this.ctx.getDataFromResultPoolByRef === \"undefined\")\n return (_a = this.getData().entries.find((f) => f.ref.blockId === ref.blockId && f.ref.name === ref.name)) == null ? void 0 : _a.obj;\n const data = this.ctx.getDataFromResultPoolByRef(ref.blockId, ref.name);\n if (!data)\n return void 0;\n return mapPObjectData(data, (handle) => new TreeNodeAccessor(handle, [ref.blockId, ref.name]));\n }\n /**\n * Returns data associated with the ref ensuring that it is a p-column.\n * @param ref a Ref\n * @returns p-column associated with the ref\n */\n getPColumnByRef(ref) {\n const data = this.getDataByRef(ref);\n if (!data)\n return void 0;\n return ensurePColumn(data);\n }\n /**\n * Returns spec associated with the ref ensuring that it is a p-column spec.\n * @param ref a Ref\n * @returns p-column spec associated with the ref\n */\n getPColumnSpecByRef(ref) {\n const spec = this.getSpecByRef(ref);\n if (!spec)\n return void 0;\n if (!isPColumnSpec(spec))\n throw new Error(`not a PColumn spec (kind = ${spec.kind})`);\n return spec;\n }\n /**\n * @param ref a Ref\n * @returns object spec associated with the ref\n */\n getSpecByRef(ref) {\n return this.ctx.getSpecFromResultPoolByRef(ref.blockId, ref.name);\n }\n /**\n * @param spec object specification\n * @returns array of data objects with compatible specs\n * @deprecated delete this method after Jan 1, 2025\n */\n findDataWithCompatibleSpec(spec) {\n const result = [];\n out: for (const data of this.getData().entries) {\n if (!isPColumnSpec(data.obj.spec)) {\n continue;\n }\n const oth = data.obj.spec;\n if (spec.name !== oth.name) {\n continue;\n }\n if (spec.valueType !== oth.valueType) {\n continue;\n }\n if (spec.axesSpec.length !== oth.axesSpec.length) {\n continue;\n }\n if (!matchDomain(spec.domain, oth.domain)) {\n continue;\n }\n for (let i = 0; i < spec.axesSpec.length; ++i) {\n const qAx = spec.axesSpec[i];\n const tAx = oth.axesSpec[i];\n if (qAx.name !== tAx.name) {\n continue out;\n }\n if (qAx.type !== tAx.type) {\n continue out;\n }\n if (!matchDomain(qAx.domain, tAx.domain)) {\n continue out;\n }\n }\n result.push(data.obj);\n }\n return result;\n }\n /**\n * Find labels data for a given axis id. It will search for a label column and return its data as a map.\n * @returns a map of axis value => label\n */\n findLabels(axis) {\n const dataPool = this.getData();\n for (const column of dataPool.entries) {\n if (!isPColumn(column.obj))\n continue;\n const spec = column.obj.spec;\n if (spec.name === PColumnName.Label && spec.axesSpec.length === 1 && spec.axesSpec[0].name === axis.name && spec.axesSpec[0].type === axis.type && matchDomain(axis.domain, spec.axesSpec[0].domain)) {\n if (column.obj.data.resourceType.name !== \"PColumnData/Json\") {\n throw Error(`Expected JSON column for labels, got: ${column.obj.data.resourceType.name}`);\n }\n const labels = Object.fromEntries(Object.entries(column.obj.data.getDataAsJson().data).map((e) => [JSON.parse(e[0])[0], e[1]]));\n return labels;\n }\n }\n return void 0;\n }\n /**\n * Selects columns based on the provided selectors, returning PColumn objects\n * with lazily loaded data.\n *\n * @param selectors - A predicate function, a single selector, or an array of selectors.\n * @returns An array of PColumn objects matching the selectors. Data is loaded on first access.\n */\n selectColumns(selectors) {\n const predicate = typeof selectors === \"function\" ? selectors : selectorsToPredicate(selectors);\n const matchedSpecs = this.getSpecs().entries.filter(({ obj: spec }) => {\n if (!isPColumnSpec(spec))\n return false;\n return predicate(spec);\n });\n return matchedSpecs.map(({ ref, obj: spec }) => {\n const pcolumnSpec = spec;\n let _cachedData = null;\n const self2 = this;\n return {\n id: canonicalize(ref),\n spec: pcolumnSpec,\n get data() {\n var _a;\n if (_cachedData !== null) {\n return _cachedData;\n }\n _cachedData = (_a = self2.getPColumnByRef(ref)) == null ? void 0 : _a.data;\n return _cachedData;\n }\n };\n });\n }\n /**\n * Find labels data for a given axis id of a p-column.\n * @returns a map of axis value => label\n */\n findLabelsForColumnAxis(column, axisIdx) {\n const labels = this.findLabels(column.axesSpec[axisIdx]);\n if (!labels)\n return void 0;\n const axisKeys = readAnnotation(column, `pl7.app/axisKeys/${axisIdx}`);\n if (axisKeys !== void 0) {\n const keys = JSON.parse(axisKeys);\n return Object.fromEntries(keys.map((key) => {\n return [key, labels[key] ?? \"Unlabelled\"];\n }));\n } else {\n return labels;\n }\n }\n }\n class RenderCtx {\n constructor() {\n __publicField(this, \"ctx\");\n __publicField(this, \"_argsCache\");\n __publicField(this, \"_uiStateCache\");\n // lazy rendering because this feature is rarely used\n __publicField(this, \"_activeArgsCache\");\n __publicField(this, \"resultPool\", new ResultPool());\n this.ctx = getCfgRenderCtx();\n }\n get args() {\n if (this._argsCache === void 0) {\n const raw = this.ctx.args;\n const value = typeof raw === \"function\" ? raw() : raw;\n this._argsCache = { v: JSON.parse(value) };\n }\n return this._argsCache.v;\n }\n get uiState() {\n if (this._uiStateCache === void 0) {\n const raw = this.ctx.uiState;\n const value = typeof raw === \"function\" ? raw() : raw;\n this._uiStateCache = { v: value ? JSON.parse(value) : {} };\n }\n return this._uiStateCache.v;\n }\n /**\n * Returns args snapshot the block was executed for (i.e. when \"Run\" button was pressed).\n * Returns undefined, if block was never executed or stopped mid-way execution, so that the result was cleared.\n * */\n get activeArgs() {\n if (this._activeArgsCache === void 0) {\n const raw = this.ctx.activeArgs;\n const value = typeof raw === \"function\" ? raw() : raw;\n this._activeArgsCache = {\n v: value ? JSON.parse(value) : void 0\n };\n }\n return this._activeArgsCache.v;\n }\n // /** Can be used to determine features provided by the desktop instance. */\n // public get featureFlags() {\n // return this.ctx.featureFlags;\n // }\n getNamedAccessor(name) {\n return ifDef(this.ctx.getAccessorHandleByName(name), (accessor) => new TreeNodeAccessor(accessor, [name]));\n }\n get prerun() {\n return this.getNamedAccessor(StagingAccessorName);\n }\n get outputs() {\n return this.getNamedAccessor(MainAccessorName);\n }\n /**\n * Find labels data for a given axis id. It will search for a label column and return its data as a map.\n * @returns a map of axis value => label\n * @deprecated Use resultPool.findLabels instead\n */\n findLabels(axis) {\n return this.resultPool.findLabels(axis);\n }\n verifyInlineAndExplicitColumnsSupport(columns) {\n var _a;\n const hasInlineColumns = columns.some((c) => !(c.data instanceof TreeNodeAccessor) || isDataInfo(c.data));\n const inlineColumnsSupport = ((_a = this.ctx.featureFlags) == null ? void 0 : _a.inlineColumnsSupport) === true;\n if (hasInlineColumns && !inlineColumnsSupport)\n throw Error(`Inline or explicit columns not supported`);\n }\n patchPTableDef(def) {\n var _a, _b;\n if (!((_a = this.ctx.featureFlags) == null ? void 0 : _a.pTablePartitionFiltersSupport)) {\n def = {\n ...def,\n partitionFilters: [],\n filters: [...def.partitionFilters, ...def.filters]\n };\n }\n if (!((_b = this.ctx.featureFlags) == null ? void 0 : _b.pFrameInSetFilterSupport)) {\n def = {\n ...def,\n partitionFilters: patchInSetFilters(def.partitionFilters),\n filters: patchInSetFilters(def.filters)\n };\n }\n return def;\n }\n // TODO remove all non-PColumn fields\n createPFrame(def) {\n this.verifyInlineAndExplicitColumnsSupport(def);\n return this.ctx.createPFrame(def.map((c) => transformPColumnData(c)));\n }\n createPTable(def) {\n let rawDef;\n if (\"columns\" in def) {\n rawDef = this.patchPTableDef({\n src: {\n type: \"full\",\n entries: def.columns.map((c) => ({ type: \"column\", column: c }))\n },\n partitionFilters: def.filters ?? [],\n filters: [],\n sorting: def.sorting ?? []\n });\n } else {\n rawDef = this.patchPTableDef(def);\n }\n this.verifyInlineAndExplicitColumnsSupport(extractAllColumns(rawDef.src));\n return this.ctx.createPTable(mapPTableDef(rawDef, (po) => transformPColumnData(po)));\n }\n /** @deprecated scheduled for removal from SDK */\n getBlockLabel(blockId) {\n return this.ctx.getBlockLabel(blockId);\n }\n getCurrentUnstableMarker() {\n return this.ctx.getCurrentUnstableMarker();\n }\n logInfo(msg) {\n this.ctx.logInfo(msg);\n }\n logWarn(msg) {\n this.ctx.logWarn(msg);\n }\n logError(msg) {\n this.ctx.logError(msg);\n }\n }\n var version = \"1.45.0\";\n const PlatformaSDKVersion = version;\n function isConfigLambda(cfgOrFh) {\n return cfgOrFh.__renderLambda === true;\n }\n function downgradeCfgOrLambda(data) {\n if (data === void 0)\n return void 0;\n if (isConfigLambda(data))\n return data.handle;\n return data;\n }\n const _BlockModel = class _BlockModel {\n constructor(_renderingMode, _initialArgs, _initialUiState, _outputs, _inputsValid, _sections, _title, _enrichmentTargets, _featureFlags) {\n __publicField(this, \"_renderingMode\");\n __publicField(this, \"_initialArgs\");\n __publicField(this, \"_initialUiState\");\n __publicField(this, \"_outputs\");\n __publicField(this, \"_inputsValid\");\n __publicField(this, \"_sections\");\n __publicField(this, \"_title\");\n __publicField(this, \"_enrichmentTargets\");\n __publicField(this, \"_featureFlags\");\n this._renderingMode = _renderingMode;\n this._initialArgs = _initialArgs;\n this._initialUiState = _initialUiState;\n this._outputs = _outputs;\n this._inputsValid = _inputsValid;\n this._sections = _sections;\n this._title = _title;\n this._enrichmentTargets = _enrichmentTargets;\n this._featureFlags = _featureFlags;\n }\n static create(renderingMode = \"Heavy\") {\n return new _BlockModel(renderingMode, void 0, {}, {}, getImmediate(true), getImmediate([]), void 0, void 0, { ..._BlockModel.INITIAL_BLOCK_FEATURE_FLAGS });\n }\n output(key, cfgOrRf, flags = {}) {\n if (typeof cfgOrRf === \"function\") {\n const handle = `output#${key}`;\n tryRegisterCallback(handle, () => cfgOrRf(new RenderCtx()));\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, {\n ...this._outputs,\n [key]: {\n __renderLambda: true,\n handle,\n ...flags\n }\n }, this._inputsValid, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n } else\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, {\n ...this._outputs,\n [key]: cfgOrRf\n }, this._inputsValid, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n }\n /** Shortcut for {@link output} with retentive flag set to true. */\n retentiveOutput(key, rf) {\n return this.output(key, rf, { retentive: true });\n }\n argsValid(cfgOrRf) {\n if (typeof cfgOrRf === \"function\") {\n tryRegisterCallback(\"inputsValid\", () => cfgOrRf(new RenderCtx()));\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, {\n __renderLambda: true,\n handle: \"inputsValid\"\n }, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n } else\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, cfgOrRf, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n }\n sections(arrOrCfgOrRf) {\n if (Array.isArray(arrOrCfgOrRf)) {\n return this.sections(getImmediate(arrOrCfgOrRf));\n } else if (typeof arrOrCfgOrRf === \"function\") {\n tryRegisterCallback(\"sections\", () => arrOrCfgOrRf(new RenderCtx()));\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, this._inputsValid, { __renderLambda: true, handle: \"sections\" }, this._title, this._enrichmentTargets, this._featureFlags);\n } else\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, this._inputsValid, arrOrCfgOrRf, this._title, this._enrichmentTargets, this._featureFlags);\n }\n /** Sets a rendering function to derive block title, shown for the block in the left blocks-overview panel. */\n title(rf) {\n tryRegisterCallback(\"title\", () => rf(new RenderCtx()));\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, this._inputsValid, this._sections, { __renderLambda: true, handle: \"title\" }, this._enrichmentTargets, this._featureFlags);\n }\n /**\n * Sets initial args for the block, this value must be specified.\n * @deprecated use {@link withArgs}\n * */\n initialArgs(value) {\n return new _BlockModel(this._renderingMode, value, this._initialUiState, this._outputs, this._inputsValid, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n }\n /** Sets initial args for the block, this value must be specified. */\n withArgs(initialValue) {\n return new _BlockModel(this._renderingMode, initialValue, this._initialUiState, this._outputs, this._inputsValid, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n }\n /** Defines type and sets initial value for block UiState. */\n withUiState(initialValue) {\n return new _BlockModel(this._renderingMode, this._initialArgs, initialValue, this._outputs, this._inputsValid, this._sections, this._title, this._enrichmentTargets, this._featureFlags);\n }\n /** Sets or overrides feature flags for the block. */\n withFeatureFlags(flags) {\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, this._inputsValid, this._sections, this._title, this._enrichmentTargets, { ...this._featureFlags, ...flags });\n }\n /**\n * Defines how to derive list of upstream references this block is meant to enrich with its exports from block args.\n * Influences dependency graph construction.\n */\n enriches(lambda) {\n tryRegisterCallback(\"enrichmentTargets\", lambda);\n return new _BlockModel(this._renderingMode, this._initialArgs, this._initialUiState, this._outputs, this._inputsValid, this._sections, this._title, { __renderLambda: true, handle: \"enrichmentTargets\" }, this._featureFlags);\n }\n /** Renders all provided block settings into a pre-configured platforma API\n * instance, that can be used in frontend to interact with block state, and\n * other features provided by the platforma to the block. */\n done(apiVersion) {\n const requiresUIAPIVersion = apiVersion ?? 1;\n return this.withFeatureFlags({\n ...this._featureFlags,\n requiresUIAPIVersion\n })._done(requiresUIAPIVersion);\n }\n _done(apiVersion) {\n if (this._initialArgs === void 0)\n throw new Error(\"Initial arguments not set.\");\n const config = {\n v3: {\n sdkVersion: PlatformaSDKVersion,\n renderingMode: this._renderingMode,\n initialArgs: this._initialArgs,\n initialUiState: this._initialUiState,\n inputsValid: this._inputsValid,\n sections: this._sections,\n title: this._title,\n outputs: this._outputs,\n enrichmentTargets: this._enrichmentTargets,\n featureFlags: this._featureFlags\n },\n // fields below are added to allow previous desktop versions read generated configs\n sdkVersion: PlatformaSDKVersion,\n renderingMode: this._renderingMode,\n initialArgs: this._initialArgs,\n inputsValid: downgradeCfgOrLambda(this._inputsValid),\n sections: downgradeCfgOrLambda(this._sections),\n outputs: Object.fromEntries(Object.entries(this._outputs).map(([key, value]) => [key, downgradeCfgOrLambda(value)]))\n };\n globalThis.platformaApiVersion = apiVersion;\n if (!isInUI())\n return { config };\n else\n return getPlatformaInstance({ sdkVersion: PlatformaSDKVersion, apiVersion: platformaApiVersion });\n }\n };\n __publicField(_BlockModel, \"INITIAL_BLOCK_FEATURE_FLAGS\", {\n supportsLazyState: true,\n requiresUIAPIVersion: 1,\n requiresModelAPIVersion: 1\n });\n let BlockModel = _BlockModel;\n function isLabelColumn(column) {\n return column.axesSpec.length === 1 && column.name === PColumnName.Label;\n }\n const colId = (id, domains) => {\n let wid = id.toString();\n domains == null ? void 0 : domains.forEach((domain) => {\n if (domain) {\n for (const [k, v] of Object.entries(domain)) {\n wid += k;\n wid += v;\n }\n }\n });\n return wid;\n };\n function getKeysCombinations(idsLists) {\n if (!idsLists.length) {\n return [];\n }\n let result = [[]];\n idsLists.forEach((list) => {\n const nextResult = [];\n list.forEach((key) => {\n nextResult.push(...result.map((resultItem) => [...resultItem, key]));\n });\n result = nextResult;\n });\n return result;\n }\n function isLinkerColumn(column) {\n return !!readAnnotationJson(column, Annotation.IsLinkerColumn);\n }\n function isHiddenFromGraphColumn(column) {\n return !!readAnnotationJson(column, Annotation.HideDataFromGraphs);\n }\n function getAvailableWithLinkersAxes(linkerColumns, blockAxes) {\n const linkerMap = LinkerMap.fromColumns(linkerColumns.map(getColumnIdAndSpec));\n const startKeys = [];\n const blockAxesGrouped = [...blockAxes.values()].map((axis) => getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n for (const axesGroupBlock of blockAxesGrouped) {\n const matched = linkerMap.keyAxesIds.find((keyIds) => keyIds.every((keySourceAxis) => axesGroupBlock.find((axisSpecFromBlock) => matchAxisId(axisSpecFromBlock, keySourceAxis))));\n if (matched) {\n startKeys.push(canonicalizeJson(matched));\n }\n }\n const availableKeys = linkerMap.searchAvailableAxesKeys(startKeys);\n const availableAxes = linkerMap.getAxesListFromKeysList([...availableKeys]);\n return new Map(availableAxes.map((axisSpec) => {\n const id = getAxisId(axisSpec);\n return [canonicalizeJson(id), axisSpec];\n }));\n }\n function enrichCompatible(blockAxes, columns) {\n const result = [];\n columns.forEach((column) => {\n result.push(...getAdditionalColumnsForColumn(blockAxes, column));\n });\n return result;\n }\n function getAdditionalColumnsForColumn(blockAxes, column) {\n const columnAxesIds = column.spec.axesSpec.map(getAxisId);\n if (columnAxesIds.every((id) => blockAxes.has(canonicalizeJson(id)))) {\n return [column];\n }\n const secondaryIdsOptions = columnAxesIds.map((id) => {\n const result = [];\n for (const [_, mainId] of blockAxes) {\n if (matchAxisId(mainId, id) && !matchAxisId(id, mainId)) {\n result.push(mainId);\n }\n }\n return result;\n });\n const secondaryIdsVariants = getKeysCombinations(secondaryIdsOptions);\n const allAddedDomainValues = /* @__PURE__ */ new Set();\n const addedNotToAllVariantsDomainValues = /* @__PURE__ */ new Set();\n const addedByVariantsDomainValues = secondaryIdsVariants.map((idsList) => {\n const addedSet = /* @__PURE__ */ new Set();\n idsList.map((axisId, idx) => {\n const d1 = column.spec.axesSpec[idx].domain;\n const d2 = axisId.domain;\n Object.entries(d2 ?? {}).forEach(([key, value]) => {\n if ((d1 == null ? void 0 : d1[key]) === void 0) {\n const item = JSON.stringify([key, value]);\n addedSet.add(item);\n allAddedDomainValues.add(item);\n }\n });\n return {\n ...axisId,\n annotations: column.spec.axesSpec[idx].annotations\n };\n });\n return addedSet;\n });\n [...allAddedDomainValues].forEach((addedPart) => {\n if (addedByVariantsDomainValues.some((s) => !s.has(addedPart))) {\n addedNotToAllVariantsDomainValues.add(addedPart);\n }\n });\n const additionalColumns = secondaryIdsVariants.map((idsList, idx) => {\n const id = colId(column.id, idsList.map((id2) => id2.domain));\n const label = readAnnotation(column.spec, Annotation.Label) ?? \"\";\n const labelDomainPart = [...addedByVariantsDomainValues[idx]].filter((str) => addedNotToAllVariantsDomainValues.has(str)).sort().map((v) => {\n var _a;\n return (_a = JSON.parse(v)) == null ? void 0 : _a[1];\n }).join(\" / \");\n const annotations = {\n ...column.spec.annotations,\n [Annotation.Graph.IsVirtual]: stringifyJson(true)\n };\n if (label || labelDomainPart) {\n annotations[Annotation.Label] = label && labelDomainPart ? label + \" / \" + labelDomainPart : label + labelDomainPart;\n }\n return {\n id,\n spec: {\n ...column.spec,\n axesSpec: idsList.map((axisId, idx2) => ({\n ...axisId,\n annotations: column.spec.axesSpec[idx2].annotations\n })),\n annotations\n },\n data: column.data\n };\n });\n return [column, ...additionalColumns];\n }\n function isColumnReady(c) {\n let ready = true;\n if (c.data instanceof TreeNodeAccessor) {\n ready = ready && c.data.getIsReadyOrError();\n } else if (isDataInfo(c.data)) {\n visitDataInfo(c.data, (v) => {\n ready = ready && v.getIsReadyOrError();\n });\n }\n return ready;\n }\n function allColumnsReady(columns) {\n return columns.every(isColumnReady);\n }\n function createPFrameForGraphs(ctx, blockColumns) {\n if (!blockColumns) {\n const columns2 = new PColumnCollection();\n columns2.addColumnProvider(ctx.resultPool);\n const allColumns = columns2.getColumns((spec) => !isHiddenFromGraphColumn(spec), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? [];\n if (!allColumnsReady(allColumns)) {\n return void 0;\n }\n const allAxes2 = new Map(allColumns.flatMap((column) => getNormalizedAxesList(column.spec.axesSpec)).map((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return [canonicalizeJson(axisId), axisSpec];\n }));\n const extendedColumns2 = enrichCompatible(allAxes2, allColumns);\n return ctx.createPFrame(extendedColumns2);\n }\n if (!allColumnsReady(blockColumns)) {\n return void 0;\n }\n const columns = new PColumnCollection();\n columns.addColumnProvider(ctx.resultPool);\n columns.addColumns(blockColumns);\n const blockAxes = /* @__PURE__ */ new Map();\n const allAxes = /* @__PURE__ */ new Map();\n for (const c of blockColumns) {\n for (const spec of getNormalizedAxesList(c.spec.axesSpec)) {\n const aid = getAxisId(spec);\n blockAxes.set(canonicalizeJson(aid), spec);\n allAxes.set(canonicalizeJson(aid), spec);\n }\n }\n const linkerColumns = columns.getColumns((spec) => isLinkerColumn(spec)) ?? [];\n const availableWithLinkersAxes = getAvailableWithLinkersAxes(linkerColumns, blockAxes);\n for (const item of availableWithLinkersAxes) {\n blockAxes.set(...item);\n allAxes.set(...item);\n }\n let compatibleWithoutLabels = (columns.getColumns((spec) => !isHiddenFromGraphColumn(spec) && spec.axesSpec.some((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return Array.from(blockAxes.values()).some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => !isLabelColumn(column.spec));\n if (!allColumnsReady(compatibleWithoutLabels)) {\n return void 0;\n }\n for (const c of compatibleWithoutLabels) {\n for (const spec of getNormalizedAxesList(c.spec.axesSpec)) {\n const aid = getAxisId(spec);\n allAxes.set(canonicalizeJson(aid), spec);\n }\n }\n compatibleWithoutLabels = (columns.getColumns((spec) => !isHiddenFromGraphColumn(spec) && spec.axesSpec.every((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return Array.from(allAxes.values()).some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => !isLabelColumn(column.spec));\n const compatibleLabels = (columns.getColumns((spec) => !isHiddenFromGraphColumn(spec) && spec.axesSpec.some((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return Array.from(allAxes.values()).some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => isLabelColumn(column.spec));\n if (!allColumnsReady(compatibleLabels)) {\n return void 0;\n }\n const compatible = [...compatibleWithoutLabels, ...compatibleLabels];\n const extendedColumns = enrichCompatible(blockAxes, compatible);\n return ctx.createPFrame(extendedColumns);\n }\n var stringify = { exports: {} };\n var hasRequiredStringify;\n function requireStringify() {\n if (hasRequiredStringify) return stringify.exports;\n hasRequiredStringify = 1;\n (function(module2, exports3) {\n exports3 = module2.exports = stringify2;\n exports3.getSerialize = serializer;\n function stringify2(obj, replacer, spaces, cycleReplacer) {\n return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces);\n }\n function serializer(replacer, cycleReplacer) {\n var stack = [], keys = [];\n if (cycleReplacer == null) cycleReplacer = function(key, value) {\n if (stack[0] === value) return \"[Circular ~]\";\n return \"[Circular ~.\" + keys.slice(0, stack.indexOf(value)).join(\".\") + \"]\";\n };\n return function(key, value) {\n if (stack.length > 0) {\n var thisPos = stack.indexOf(this);\n ~thisPos ? stack.splice(thisPos + 1) : stack.push(this);\n ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key);\n if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value);\n } else stack.push(value);\n return replacer == null ? value : replacer.call(this, key, value);\n };\n }\n })(stringify, stringify.exports);\n return stringify.exports;\n }\n requireStringify();\n const BasePlErrorLike = z.object({\n type: z.literal(\"PlError\"),\n name: z.string(),\n message: z.string(),\n /** The message with all details needed for SDK developers. */\n fullMessage: z.string().optional(),\n stack: z.string().optional()\n });\n const PlErrorLike = BasePlErrorLike.extend({\n cause: z.lazy(() => ErrorLike).optional(),\n errors: z.lazy(() => ErrorLike.array()).optional()\n });\n const BaseStandardErrorLike = z.object({\n type: z.literal(\"StandardError\"),\n name: z.string(),\n message: z.string(),\n stack: z.string().optional()\n });\n const StandardErrorLike = BaseStandardErrorLike.extend({\n cause: z.lazy(() => ErrorLike).optional(),\n errors: z.lazy(() => ErrorLike.array()).optional()\n });\n const ErrorLike = z.union([StandardErrorLike, PlErrorLike]);\n const baseErrorShape = z.object({\n name: z.string(),\n message: z.string(),\n fullMessage: z.string().optional(),\n stack: z.string().optional()\n });\n const ErrorShape = baseErrorShape.extend({\n cause: z.lazy(() => ErrorShape).optional(),\n errors: z.lazy(() => ErrorShape.array()).optional()\n });\n const model = BlockModel.create().withArgs({\n scChain: \"A\",\n allele: false\n }).withUiState({\n blockTitle: \"V/J Usage\",\n weightedFlag: true,\n vUsagePlotState: {\n title: \"V Usage\",\n template: \"heatmapClustered\",\n currentTab: \"settings\",\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null\n }\n }\n },\n jUsagePlotState: {\n title: \"V Usage\",\n template: \"heatmapClustered\",\n currentTab: null,\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null\n }\n }\n },\n vjUsagePlotState: {\n title: \"V/J Usage\",\n template: \"heatmapClustered\",\n currentTab: null,\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null\n }\n }\n }\n }).argsValid((ctx) => ctx.args.datasetRef !== void 0).output(\n \"datasetOptions\",\n (ctx) => ctx.resultPool.getOptions(\n [{\n axes: [\n { name: \"pl7.app/sampleId\" },\n { name: \"pl7.app/vdj/clonotypeKey\" }\n ],\n annotations: { \"pl7.app/isAnchor\": \"true\" }\n }, {\n axes: [\n { name: \"pl7.app/sampleId\" },\n { name: \"pl7.app/vdj/scClonotypeKey\" }\n ],\n annotations: { \"pl7.app/isAnchor\": \"true\" }\n }],\n {\n // suppress native label of the column (e.g. \"Number of Reads\") to show only the dataset label\n label: { includeNativeLabel: false }\n }\n )\n ).output(\"datasetSpec\", (ctx) => {\n if (ctx.args.datasetRef === void 0) {\n return void 0;\n }\n return ctx.resultPool.getPColumnSpecByRef(ctx.args.datasetRef);\n }).output(\"pf\", (ctx) => {\n var _a, _b;\n const pCols = (_b = (_a = ctx.outputs) == null ? void 0 : _a.resolve(\"pf\")) == null ? void 0 : _b.getPColumns();\n if (pCols === void 0) {\n return void 0;\n }\n return createPFrameForGraphs(ctx, pCols);\n }).output(\"isRunning\", (ctx) => {\n var _a;\n return ((_a = ctx.outputs) == null ? void 0 : _a.getIsReadyOrError()) === false;\n }).title((ctx) => {\n var _a;\n return ((_a = ctx.uiState) == null ? void 0 : _a.blockTitle) ?? \"V/J Usage\";\n }).sections((_) => [\n { type: \"link\", href: \"/\", label: \"V Gene Usage\" },\n { type: \"link\", href: \"/jUsage\", label: \"J Gene Usage\" },\n { type: \"link\", href: \"/vjUsage\", label: \"V/J Gene Usage\" }\n ]).done(2);\n exports2.model = model;\n Object.defineProperty(exports2, Symbol.toStringTag, { value: \"Module\" });\n});\n//# sourceMappingURL=bundle.js.map\n"}}
|
|
1
|
+
{"v3":{"configVersion":3,"modelAPIVersion":1,"sdkVersion":"1.53.3","renderingMode":"Heavy","initialArgs":{"defaultBlockLabel":"Gene","customBlockLabel":"","scChain":"A","allele":false},"initialUiState":{"weightedFlag":true,"vUsagePlotState":{"title":"V Usage","template":"heatmapClustered","currentTab":"settings","layersSettings":{"heatmapClustered":{"normalizationDirection":null}}},"jUsagePlotState":{"title":"J Usage","template":"heatmapClustered","currentTab":null,"layersSettings":{"heatmapClustered":{"normalizationDirection":null}}},"vjUsagePlotState":{"title":"V/J Usage","template":"heatmapClustered","currentTab":null,"layersSettings":{"heatmapClustered":{"normalizationDirection":null}}}},"inputsValid":{"__renderLambda":true,"handle":"inputsValid"},"sections":{"__renderLambda":true,"handle":"sections"},"title":{"__renderLambda":true,"handle":"title"},"subtitle":{"__renderLambda":true,"handle":"subtitle"},"outputs":{"datasetOptions":{"__renderLambda":true,"handle":"output#datasetOptions"},"datasetSpec":{"__renderLambda":true,"handle":"output#datasetSpec"},"pf":{"__renderLambda":true,"handle":"output#pf","withStatus":true},"isRunning":{"__renderLambda":true,"handle":"output#isRunning"}},"featureFlags":{"supportsLazyState":true,"requiresUIAPIVersion":2,"requiresModelAPIVersion":1}},"sdkVersion":"1.53.3","renderingMode":"Heavy","initialArgs":{"defaultBlockLabel":"Gene","customBlockLabel":"","scChain":"A","allele":false},"inputsValid":"inputsValid","sections":"sections","outputs":{"datasetOptions":"output#datasetOptions","datasetSpec":"output#datasetSpec","pf":"output#pf","isRunning":"output#isRunning"},"code":{"type":"plain","content":"(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global[\"block-model\"] = {}));\n})(this, (function (exports) { 'use strict';\n\n /**\n * BlockStorage - Typed storage abstraction for block persistent data.\n *\n * This module provides:\n * - A typed structure for block storage with versioning and plugin support\n * - Utility functions for manipulating storage\n * - Handler interfaces for model-level customization\n *\n * @module block_storage\n */\n // =============================================================================\n // Core Types\n // =============================================================================\n /**\n * Discriminator key for BlockStorage format detection.\n * This unique hash-based key identifies data as BlockStorage vs legacy formats.\n */\n const BLOCK_STORAGE_KEY = '__pl_a7f3e2b9__';\n /**\n * Current BlockStorage schema version.\n * Increment this when the storage structure itself changes (not block state migrations).\n */\n const BLOCK_STORAGE_SCHEMA_VERSION = 'v1';\n /**\n * Default data version for new blocks without migrations.\n * Unique identifier ensures blocks are created via DataModel API.\n */\n const DATA_MODEL_DEFAULT_VERSION = '__pl_v1_d4e8f2a1__';\n /**\n * Type guard to check if a value is a valid BlockStorage object.\n * Checks for the discriminator key and valid schema version.\n */\n function isBlockStorage(value) {\n if (value === null || typeof value !== 'object')\n return false;\n const obj = value;\n const schemaVersion = obj[BLOCK_STORAGE_KEY];\n // Currently only 'v1' is valid, but this allows future versions\n return schemaVersion === 'v1'; // Add more versions as schema evolves\n }\n // =============================================================================\n // Factory Functions\n // =============================================================================\n /**\n * Creates a BlockStorage with the given initial data\n *\n * @param initialData - The initial data value (defaults to empty object)\n * @param version - The initial data version key (defaults to DATA_MODEL_DEFAULT_VERSION)\n * @returns A new BlockStorage instance with discriminator key\n */\n function createBlockStorage(initialData = {}, version = DATA_MODEL_DEFAULT_VERSION) {\n return {\n [BLOCK_STORAGE_KEY]: BLOCK_STORAGE_SCHEMA_VERSION,\n __dataVersion: version,\n __data: initialData,\n };\n }\n /**\n * Normalizes raw storage data to BlockStorage format.\n * If the input is already a BlockStorage, returns it as-is.\n * If the input is legacy format (raw state), wraps it in BlockStorage structure.\n *\n * @param raw - Raw storage data (may be legacy format or BlockStorage)\n * @returns Normalized BlockStorage\n */\n function normalizeBlockStorage(raw) {\n if (isBlockStorage(raw)) {\n const storage = raw;\n return {\n ...storage,\n // Fix for early released version where __dataVersion was a number\n __dataVersion: typeof storage.__dataVersion === 'number'\n ? DATA_MODEL_DEFAULT_VERSION\n : storage.__dataVersion,\n };\n }\n // Legacy format: raw is the state directly\n return createBlockStorage(raw);\n }\n // =============================================================================\n // Data Access & Update Functions\n // =============================================================================\n /**\n * Gets the data from BlockStorage\n *\n * @param storage - The BlockStorage instance\n * @returns The data value\n */\n function getStorageData(storage) {\n return storage.__data;\n }\n /**\n * Updates the data in BlockStorage (immutable)\n *\n * @param storage - The current BlockStorage\n * @param payload - The update payload with operation and value\n * @returns A new BlockStorage with updated data\n */\n function updateStorageData(storage, payload) {\n switch (payload.operation) {\n case 'update-data':\n return { ...storage, __data: payload.value };\n default:\n throw new Error(`Unknown storage operation: ${payload.operation}`);\n }\n }\n\n //\n // Helpers\n //\n //\n // Json\n //\n function getImmediate(value) {\n return { type: 'Immediate', value };\n }\n\n /** Utility code helping to identify whether the code is running in actual UI environment */\n function isInUI() {\n return (typeof globalThis.getPlatforma !== 'undefined' || typeof globalThis.platforma !== 'undefined');\n }\n /** Utility code helping to retrieve a platforma instance form the environment */\n function getPlatformaInstance(config) {\n if (config && typeof globalThis.getPlatforma === 'function')\n return globalThis.getPlatforma(config);\n else if (typeof globalThis.platforma !== 'undefined')\n return globalThis.platforma;\n else\n throw new Error('Can\\'t get platforma instance.');\n }\n function tryGetCfgRenderCtx() {\n if (typeof globalThis.cfgRenderCtx !== 'undefined')\n return globalThis.cfgRenderCtx;\n else\n return undefined;\n }\n function getCfgRenderCtx() {\n if (typeof globalThis.cfgRenderCtx !== 'undefined')\n return globalThis.cfgRenderCtx;\n else\n throw new Error('Not in config rendering context');\n }\n function tryRegisterCallback(key, callback) {\n const ctx = tryGetCfgRenderCtx();\n if (ctx === undefined)\n return false;\n if (key in ctx.callbackRegistry)\n throw new Error(`Callback with key ${key} already registered.`);\n ctx.callbackRegistry[key] = callback;\n return true;\n }\n const futureResolves = new Map();\n function registerFutureAwait(handle, onResolve) {\n if (!(handle in getCfgRenderCtx().callbackRegistry)) {\n getCfgRenderCtx().callbackRegistry[handle] = (value) => {\n for (const res of futureResolves.get(handle)) {\n res(value);\n }\n };\n futureResolves.set(handle, []);\n }\n futureResolves.get(handle).push(onResolve);\n }\n\n var util;\n (function (util) {\n util.assertEqual = (val) => val;\n function assertIs(_arg) { }\n util.assertIs = assertIs;\n function assertNever(_x) {\n throw new Error();\n }\n util.assertNever = assertNever;\n util.arrayToEnum = (items) => {\n const obj = {};\n for (const item of items) {\n obj[item] = item;\n }\n return obj;\n };\n util.getValidEnumValues = (obj) => {\n const validKeys = util.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== \"number\");\n const filtered = {};\n for (const k of validKeys) {\n filtered[k] = obj[k];\n }\n return util.objectValues(filtered);\n };\n util.objectValues = (obj) => {\n return util.objectKeys(obj).map(function (e) {\n return obj[e];\n });\n };\n util.objectKeys = typeof Object.keys === \"function\" // eslint-disable-line ban/ban\n ? (obj) => Object.keys(obj) // eslint-disable-line ban/ban\n : (object) => {\n const keys = [];\n for (const key in object) {\n if (Object.prototype.hasOwnProperty.call(object, key)) {\n keys.push(key);\n }\n }\n return keys;\n };\n util.find = (arr, checker) => {\n for (const item of arr) {\n if (checker(item))\n return item;\n }\n return undefined;\n };\n util.isInteger = typeof Number.isInteger === \"function\"\n ? (val) => Number.isInteger(val) // eslint-disable-line ban/ban\n : (val) => typeof val === \"number\" && isFinite(val) && Math.floor(val) === val;\n function joinValues(array, separator = \" | \") {\n return array\n .map((val) => (typeof val === \"string\" ? `'${val}'` : val))\n .join(separator);\n }\n util.joinValues = joinValues;\n util.jsonStringifyReplacer = (_, value) => {\n if (typeof value === \"bigint\") {\n return value.toString();\n }\n return value;\n };\n })(util || (util = {}));\n var objectUtil;\n (function (objectUtil) {\n objectUtil.mergeShapes = (first, second) => {\n return {\n ...first,\n ...second, // second overwrites first\n };\n };\n })(objectUtil || (objectUtil = {}));\n const ZodParsedType = util.arrayToEnum([\n \"string\",\n \"nan\",\n \"number\",\n \"integer\",\n \"float\",\n \"boolean\",\n \"date\",\n \"bigint\",\n \"symbol\",\n \"function\",\n \"undefined\",\n \"null\",\n \"array\",\n \"object\",\n \"unknown\",\n \"promise\",\n \"void\",\n \"never\",\n \"map\",\n \"set\",\n ]);\n const getParsedType = (data) => {\n const t = typeof data;\n switch (t) {\n case \"undefined\":\n return ZodParsedType.undefined;\n case \"string\":\n return ZodParsedType.string;\n case \"number\":\n return isNaN(data) ? ZodParsedType.nan : ZodParsedType.number;\n case \"boolean\":\n return ZodParsedType.boolean;\n case \"function\":\n return ZodParsedType.function;\n case \"bigint\":\n return ZodParsedType.bigint;\n case \"symbol\":\n return ZodParsedType.symbol;\n case \"object\":\n if (Array.isArray(data)) {\n return ZodParsedType.array;\n }\n if (data === null) {\n return ZodParsedType.null;\n }\n if (data.then &&\n typeof data.then === \"function\" &&\n data.catch &&\n typeof data.catch === \"function\") {\n return ZodParsedType.promise;\n }\n if (typeof Map !== \"undefined\" && data instanceof Map) {\n return ZodParsedType.map;\n }\n if (typeof Set !== \"undefined\" && data instanceof Set) {\n return ZodParsedType.set;\n }\n if (typeof Date !== \"undefined\" && data instanceof Date) {\n return ZodParsedType.date;\n }\n return ZodParsedType.object;\n default:\n return ZodParsedType.unknown;\n }\n };\n\n const ZodIssueCode = util.arrayToEnum([\n \"invalid_type\",\n \"invalid_literal\",\n \"custom\",\n \"invalid_union\",\n \"invalid_union_discriminator\",\n \"invalid_enum_value\",\n \"unrecognized_keys\",\n \"invalid_arguments\",\n \"invalid_return_type\",\n \"invalid_date\",\n \"invalid_string\",\n \"too_small\",\n \"too_big\",\n \"invalid_intersection_types\",\n \"not_multiple_of\",\n \"not_finite\",\n ]);\n const quotelessJson = (obj) => {\n const json = JSON.stringify(obj, null, 2);\n return json.replace(/\"([^\"]+)\":/g, \"$1:\");\n };\n class ZodError extends Error {\n constructor(issues) {\n super();\n this.issues = [];\n this.addIssue = (sub) => {\n this.issues = [...this.issues, sub];\n };\n this.addIssues = (subs = []) => {\n this.issues = [...this.issues, ...subs];\n };\n const actualProto = new.target.prototype;\n if (Object.setPrototypeOf) {\n // eslint-disable-next-line ban/ban\n Object.setPrototypeOf(this, actualProto);\n }\n else {\n this.__proto__ = actualProto;\n }\n this.name = \"ZodError\";\n this.issues = issues;\n }\n get errors() {\n return this.issues;\n }\n format(_mapper) {\n const mapper = _mapper ||\n function (issue) {\n return issue.message;\n };\n const fieldErrors = { _errors: [] };\n const processError = (error) => {\n for (const issue of error.issues) {\n if (issue.code === \"invalid_union\") {\n issue.unionErrors.map(processError);\n }\n else if (issue.code === \"invalid_return_type\") {\n processError(issue.returnTypeError);\n }\n else if (issue.code === \"invalid_arguments\") {\n processError(issue.argumentsError);\n }\n else if (issue.path.length === 0) {\n fieldErrors._errors.push(mapper(issue));\n }\n else {\n let curr = fieldErrors;\n let i = 0;\n while (i < issue.path.length) {\n const el = issue.path[i];\n const terminal = i === issue.path.length - 1;\n if (!terminal) {\n curr[el] = curr[el] || { _errors: [] };\n // if (typeof el === \"string\") {\n // curr[el] = curr[el] || { _errors: [] };\n // } else if (typeof el === \"number\") {\n // const errorArray: any = [];\n // errorArray._errors = [];\n // curr[el] = curr[el] || errorArray;\n // }\n }\n else {\n curr[el] = curr[el] || { _errors: [] };\n curr[el]._errors.push(mapper(issue));\n }\n curr = curr[el];\n i++;\n }\n }\n }\n };\n processError(this);\n return fieldErrors;\n }\n static assert(value) {\n if (!(value instanceof ZodError)) {\n throw new Error(`Not a ZodError: ${value}`);\n }\n }\n toString() {\n return this.message;\n }\n get message() {\n return JSON.stringify(this.issues, util.jsonStringifyReplacer, 2);\n }\n get isEmpty() {\n return this.issues.length === 0;\n }\n flatten(mapper = (issue) => issue.message) {\n const fieldErrors = {};\n const formErrors = [];\n for (const sub of this.issues) {\n if (sub.path.length > 0) {\n fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || [];\n fieldErrors[sub.path[0]].push(mapper(sub));\n }\n else {\n formErrors.push(mapper(sub));\n }\n }\n return { formErrors, fieldErrors };\n }\n get formErrors() {\n return this.flatten();\n }\n }\n ZodError.create = (issues) => {\n const error = new ZodError(issues);\n return error;\n };\n\n const errorMap = (issue, _ctx) => {\n let message;\n switch (issue.code) {\n case ZodIssueCode.invalid_type:\n if (issue.received === ZodParsedType.undefined) {\n message = \"Required\";\n }\n else {\n message = `Expected ${issue.expected}, received ${issue.received}`;\n }\n break;\n case ZodIssueCode.invalid_literal:\n message = `Invalid literal value, expected ${JSON.stringify(issue.expected, util.jsonStringifyReplacer)}`;\n break;\n case ZodIssueCode.unrecognized_keys:\n message = `Unrecognized key(s) in object: ${util.joinValues(issue.keys, \", \")}`;\n break;\n case ZodIssueCode.invalid_union:\n message = `Invalid input`;\n break;\n case ZodIssueCode.invalid_union_discriminator:\n message = `Invalid discriminator value. Expected ${util.joinValues(issue.options)}`;\n break;\n case ZodIssueCode.invalid_enum_value:\n message = `Invalid enum value. Expected ${util.joinValues(issue.options)}, received '${issue.received}'`;\n break;\n case ZodIssueCode.invalid_arguments:\n message = `Invalid function arguments`;\n break;\n case ZodIssueCode.invalid_return_type:\n message = `Invalid function return type`;\n break;\n case ZodIssueCode.invalid_date:\n message = `Invalid date`;\n break;\n case ZodIssueCode.invalid_string:\n if (typeof issue.validation === \"object\") {\n if (\"includes\" in issue.validation) {\n message = `Invalid input: must include \"${issue.validation.includes}\"`;\n if (typeof issue.validation.position === \"number\") {\n message = `${message} at one or more positions greater than or equal to ${issue.validation.position}`;\n }\n }\n else if (\"startsWith\" in issue.validation) {\n message = `Invalid input: must start with \"${issue.validation.startsWith}\"`;\n }\n else if (\"endsWith\" in issue.validation) {\n message = `Invalid input: must end with \"${issue.validation.endsWith}\"`;\n }\n else {\n util.assertNever(issue.validation);\n }\n }\n else if (issue.validation !== \"regex\") {\n message = `Invalid ${issue.validation}`;\n }\n else {\n message = \"Invalid\";\n }\n break;\n case ZodIssueCode.too_small:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.exact ? \"exactly\" : issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.exact ? \"exactly\" : issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be ${issue.exact\n ? `exactly equal to `\n : issue.inclusive\n ? `greater than or equal to `\n : `greater than `}${issue.minimum}`;\n else if (issue.type === \"date\")\n message = `Date must be ${issue.exact\n ? `exactly equal to `\n : issue.inclusive\n ? `greater than or equal to `\n : `greater than `}${new Date(Number(issue.minimum))}`;\n else\n message = \"Invalid input\";\n break;\n case ZodIssueCode.too_big:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be ${issue.exact\n ? `exactly`\n : issue.inclusive\n ? `less than or equal to`\n : `less than`} ${issue.maximum}`;\n else if (issue.type === \"bigint\")\n message = `BigInt must be ${issue.exact\n ? `exactly`\n : issue.inclusive\n ? `less than or equal to`\n : `less than`} ${issue.maximum}`;\n else if (issue.type === \"date\")\n message = `Date must be ${issue.exact\n ? `exactly`\n : issue.inclusive\n ? `smaller than or equal to`\n : `smaller than`} ${new Date(Number(issue.maximum))}`;\n else\n message = \"Invalid input\";\n break;\n case ZodIssueCode.custom:\n message = `Invalid input`;\n break;\n case ZodIssueCode.invalid_intersection_types:\n message = `Intersection results could not be merged`;\n break;\n case ZodIssueCode.not_multiple_of:\n message = `Number must be a multiple of ${issue.multipleOf}`;\n break;\n case ZodIssueCode.not_finite:\n message = \"Number must be finite\";\n break;\n default:\n message = _ctx.defaultError;\n util.assertNever(issue);\n }\n return { message };\n };\n\n let overrideErrorMap = errorMap;\n function setErrorMap(map) {\n overrideErrorMap = map;\n }\n function getErrorMap() {\n return overrideErrorMap;\n }\n\n const makeIssue = (params) => {\n const { data, path, errorMaps, issueData } = params;\n const fullPath = [...path, ...(issueData.path || [])];\n const fullIssue = {\n ...issueData,\n path: fullPath,\n };\n if (issueData.message !== undefined) {\n return {\n ...issueData,\n path: fullPath,\n message: issueData.message,\n };\n }\n let errorMessage = \"\";\n const maps = errorMaps\n .filter((m) => !!m)\n .slice()\n .reverse();\n for (const map of maps) {\n errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message;\n }\n return {\n ...issueData,\n path: fullPath,\n message: errorMessage,\n };\n };\n const EMPTY_PATH = [];\n function addIssueToContext(ctx, issueData) {\n const overrideMap = getErrorMap();\n const issue = makeIssue({\n issueData: issueData,\n data: ctx.data,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n overrideMap,\n overrideMap === errorMap ? undefined : errorMap, // then global default map\n ].filter((x) => !!x),\n });\n ctx.common.issues.push(issue);\n }\n class ParseStatus {\n constructor() {\n this.value = \"valid\";\n }\n dirty() {\n if (this.value === \"valid\")\n this.value = \"dirty\";\n }\n abort() {\n if (this.value !== \"aborted\")\n this.value = \"aborted\";\n }\n static mergeArray(status, results) {\n const arrayValue = [];\n for (const s of results) {\n if (s.status === \"aborted\")\n return INVALID;\n if (s.status === \"dirty\")\n status.dirty();\n arrayValue.push(s.value);\n }\n return { status: status.value, value: arrayValue };\n }\n static async mergeObjectAsync(status, pairs) {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n syncPairs.push({\n key,\n value,\n });\n }\n return ParseStatus.mergeObjectSync(status, syncPairs);\n }\n static mergeObjectSync(status, pairs) {\n const finalObject = {};\n for (const pair of pairs) {\n const { key, value } = pair;\n if (key.status === \"aborted\")\n return INVALID;\n if (value.status === \"aborted\")\n return INVALID;\n if (key.status === \"dirty\")\n status.dirty();\n if (value.status === \"dirty\")\n status.dirty();\n if (key.value !== \"__proto__\" &&\n (typeof value.value !== \"undefined\" || pair.alwaysSet)) {\n finalObject[key.value] = value.value;\n }\n }\n return { status: status.value, value: finalObject };\n }\n }\n const INVALID = Object.freeze({\n status: \"aborted\",\n });\n const DIRTY = (value) => ({ status: \"dirty\", value });\n const OK = (value) => ({ status: \"valid\", value });\n const isAborted = (x) => x.status === \"aborted\";\n const isDirty = (x) => x.status === \"dirty\";\n const isValid = (x) => x.status === \"valid\";\n const isAsync = (x) => typeof Promise !== \"undefined\" && x instanceof Promise;\n\n /******************************************************************************\r\n Copyright (c) Microsoft Corporation.\r\n\r\n Permission to use, copy, modify, and/or distribute this software for any\r\n purpose with or without fee is hereby granted.\r\n\r\n THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\n REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\n AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\n INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\n LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\n OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\n PERFORMANCE OF THIS SOFTWARE.\r\n ***************************************************************************** */\r\n\r\n function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (typeof state === \"function\" ? receiver !== state || true : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return state.get(receiver);\r\n }\r\n\r\n function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (typeof state === \"function\" ? receiver !== state || true : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (state.set(receiver, value)), value;\r\n }\r\n\r\n typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\r\n var e = new Error(message);\r\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\r\n };\n\n var errorUtil;\n (function (errorUtil) {\n errorUtil.errToObj = (message) => typeof message === \"string\" ? { message } : message || {};\n errorUtil.toString = (message) => typeof message === \"string\" ? message : message === null || message === void 0 ? void 0 : message.message;\n })(errorUtil || (errorUtil = {}));\n\n var _ZodEnum_cache, _ZodNativeEnum_cache;\n class ParseInputLazyPath {\n constructor(parent, value, path, key) {\n this._cachedPath = [];\n this.parent = parent;\n this.data = value;\n this._path = path;\n this._key = key;\n }\n get path() {\n if (!this._cachedPath.length) {\n if (this._key instanceof Array) {\n this._cachedPath.push(...this._path, ...this._key);\n }\n else {\n this._cachedPath.push(...this._path, this._key);\n }\n }\n return this._cachedPath;\n }\n }\n const handleResult = (ctx, result) => {\n if (isValid(result)) {\n return { success: true, data: result.value };\n }\n else {\n if (!ctx.common.issues.length) {\n throw new Error(\"Validation failed but no issues detected.\");\n }\n return {\n success: false,\n get error() {\n if (this._error)\n return this._error;\n const error = new ZodError(ctx.common.issues);\n this._error = error;\n return this._error;\n },\n };\n }\n };\n function processCreateParams(params) {\n if (!params)\n return {};\n const { errorMap, invalid_type_error, required_error, description } = params;\n if (errorMap && (invalid_type_error || required_error)) {\n throw new Error(`Can't use \"invalid_type_error\" or \"required_error\" in conjunction with custom error map.`);\n }\n if (errorMap)\n return { errorMap: errorMap, description };\n const customMap = (iss, ctx) => {\n var _a, _b;\n const { message } = params;\n if (iss.code === \"invalid_enum_value\") {\n return { message: message !== null && message !== void 0 ? message : ctx.defaultError };\n }\n if (typeof ctx.data === \"undefined\") {\n return { message: (_a = message !== null && message !== void 0 ? message : required_error) !== null && _a !== void 0 ? _a : ctx.defaultError };\n }\n if (iss.code !== \"invalid_type\")\n return { message: ctx.defaultError };\n return { message: (_b = message !== null && message !== void 0 ? message : invalid_type_error) !== null && _b !== void 0 ? _b : ctx.defaultError };\n };\n return { errorMap: customMap, description };\n }\n class ZodType {\n constructor(def) {\n /** Alias of safeParseAsync */\n this.spa = this.safeParseAsync;\n this._def = def;\n this.parse = this.parse.bind(this);\n this.safeParse = this.safeParse.bind(this);\n this.parseAsync = this.parseAsync.bind(this);\n this.safeParseAsync = this.safeParseAsync.bind(this);\n this.spa = this.spa.bind(this);\n this.refine = this.refine.bind(this);\n this.refinement = this.refinement.bind(this);\n this.superRefine = this.superRefine.bind(this);\n this.optional = this.optional.bind(this);\n this.nullable = this.nullable.bind(this);\n this.nullish = this.nullish.bind(this);\n this.array = this.array.bind(this);\n this.promise = this.promise.bind(this);\n this.or = this.or.bind(this);\n this.and = this.and.bind(this);\n this.transform = this.transform.bind(this);\n this.brand = this.brand.bind(this);\n this.default = this.default.bind(this);\n this.catch = this.catch.bind(this);\n this.describe = this.describe.bind(this);\n this.pipe = this.pipe.bind(this);\n this.readonly = this.readonly.bind(this);\n this.isNullable = this.isNullable.bind(this);\n this.isOptional = this.isOptional.bind(this);\n }\n get description() {\n return this._def.description;\n }\n _getType(input) {\n return getParsedType(input.data);\n }\n _getOrReturnCtx(input, ctx) {\n return (ctx || {\n common: input.parent.common,\n data: input.data,\n parsedType: getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n });\n }\n _processInputParams(input) {\n return {\n status: new ParseStatus(),\n ctx: {\n common: input.parent.common,\n data: input.data,\n parsedType: getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n },\n };\n }\n _parseSync(input) {\n const result = this._parse(input);\n if (isAsync(result)) {\n throw new Error(\"Synchronous parse encountered promise.\");\n }\n return result;\n }\n _parseAsync(input) {\n const result = this._parse(input);\n return Promise.resolve(result);\n }\n parse(data, params) {\n const result = this.safeParse(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n safeParse(data, params) {\n var _a;\n const ctx = {\n common: {\n issues: [],\n async: (_a = params === null || params === void 0 ? void 0 : params.async) !== null && _a !== void 0 ? _a : false,\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: getParsedType(data),\n };\n const result = this._parseSync({ data, path: ctx.path, parent: ctx });\n return handleResult(ctx, result);\n }\n async parseAsync(data, params) {\n const result = await this.safeParseAsync(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n async safeParseAsync(data, params) {\n const ctx = {\n common: {\n issues: [],\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n async: true,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: getParsedType(data),\n };\n const maybeAsyncResult = this._parse({ data, path: ctx.path, parent: ctx });\n const result = await (isAsync(maybeAsyncResult)\n ? maybeAsyncResult\n : Promise.resolve(maybeAsyncResult));\n return handleResult(ctx, result);\n }\n refine(check, message) {\n const getIssueProperties = (val) => {\n if (typeof message === \"string\" || typeof message === \"undefined\") {\n return { message };\n }\n else if (typeof message === \"function\") {\n return message(val);\n }\n else {\n return message;\n }\n };\n return this._refinement((val, ctx) => {\n const result = check(val);\n const setError = () => ctx.addIssue({\n code: ZodIssueCode.custom,\n ...getIssueProperties(val),\n });\n if (typeof Promise !== \"undefined\" && result instanceof Promise) {\n return result.then((data) => {\n if (!data) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n if (!result) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n refinement(check, refinementData) {\n return this._refinement((val, ctx) => {\n if (!check(val)) {\n ctx.addIssue(typeof refinementData === \"function\"\n ? refinementData(val, ctx)\n : refinementData);\n return false;\n }\n else {\n return true;\n }\n });\n }\n _refinement(refinement) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"refinement\", refinement },\n });\n }\n superRefine(refinement) {\n return this._refinement(refinement);\n }\n optional() {\n return ZodOptional.create(this, this._def);\n }\n nullable() {\n return ZodNullable.create(this, this._def);\n }\n nullish() {\n return this.nullable().optional();\n }\n array() {\n return ZodArray.create(this, this._def);\n }\n promise() {\n return ZodPromise.create(this, this._def);\n }\n or(option) {\n return ZodUnion.create([this, option], this._def);\n }\n and(incoming) {\n return ZodIntersection.create(this, incoming, this._def);\n }\n transform(transform) {\n return new ZodEffects({\n ...processCreateParams(this._def),\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"transform\", transform },\n });\n }\n default(def) {\n const defaultValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodDefault({\n ...processCreateParams(this._def),\n innerType: this,\n defaultValue: defaultValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodDefault,\n });\n }\n brand() {\n return new ZodBranded({\n typeName: ZodFirstPartyTypeKind.ZodBranded,\n type: this,\n ...processCreateParams(this._def),\n });\n }\n catch(def) {\n const catchValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodCatch({\n ...processCreateParams(this._def),\n innerType: this,\n catchValue: catchValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodCatch,\n });\n }\n describe(description) {\n const This = this.constructor;\n return new This({\n ...this._def,\n description,\n });\n }\n pipe(target) {\n return ZodPipeline.create(this, target);\n }\n readonly() {\n return ZodReadonly.create(this);\n }\n isOptional() {\n return this.safeParse(undefined).success;\n }\n isNullable() {\n return this.safeParse(null).success;\n }\n }\n const cuidRegex = /^c[^\\s-]{8,}$/i;\n const cuid2Regex = /^[0-9a-z]+$/;\n const ulidRegex = /^[0-9A-HJKMNP-TV-Z]{26}$/;\n // const uuidRegex =\n // /^([a-f0-9]{8}-[a-f0-9]{4}-[1-5][a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12}|00000000-0000-0000-0000-000000000000)$/i;\n const uuidRegex = /^[0-9a-fA-F]{8}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{12}$/i;\n const nanoidRegex = /^[a-z0-9_-]{21}$/i;\n const durationRegex = /^[-+]?P(?!$)(?:(?:[-+]?\\d+Y)|(?:[-+]?\\d+[.,]\\d+Y$))?(?:(?:[-+]?\\d+M)|(?:[-+]?\\d+[.,]\\d+M$))?(?:(?:[-+]?\\d+W)|(?:[-+]?\\d+[.,]\\d+W$))?(?:(?:[-+]?\\d+D)|(?:[-+]?\\d+[.,]\\d+D$))?(?:T(?=[\\d+-])(?:(?:[-+]?\\d+H)|(?:[-+]?\\d+[.,]\\d+H$))?(?:(?:[-+]?\\d+M)|(?:[-+]?\\d+[.,]\\d+M$))?(?:[-+]?\\d+(?:[.,]\\d+)?S)?)??$/;\n // from https://stackoverflow.com/a/46181/1550155\n // old version: too slow, didn't support unicode\n // const emailRegex = /^((([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+(\\.([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(\\\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))\\.)+(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))$/i;\n //old email regex\n // const emailRegex = /^(([^<>()[\\].,;:\\s@\"]+(\\.[^<>()[\\].,;:\\s@\"]+)*)|(\".+\"))@((?!-)([^<>()[\\].,;:\\s@\"]+\\.)+[^<>()[\\].,;:\\s@\"]{1,})[^-<>()[\\].,;:\\s@\"]$/i;\n // eslint-disable-next-line\n // const emailRegex =\n // /^(([^<>()[\\]\\\\.,;:\\s@\\\"]+(\\.[^<>()[\\]\\\\.,;:\\s@\\\"]+)*)|(\\\".+\\\"))@((\\[(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\\])|(\\[IPv6:(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))\\])|([A-Za-z0-9]([A-Za-z0-9-]*[A-Za-z0-9])*(\\.[A-Za-z]{2,})+))$/;\n // const emailRegex =\n // /^[a-zA-Z0-9\\.\\!\\#\\$\\%\\&\\'\\*\\+\\/\\=\\?\\^\\_\\`\\{\\|\\}\\~\\-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/;\n // const emailRegex =\n // /^(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])$/i;\n const emailRegex = /^(?!\\.)(?!.*\\.\\.)([A-Z0-9_'+\\-\\.]*)[A-Z0-9_+-]@([A-Z0-9][A-Z0-9\\-]*\\.)+[A-Z]{2,}$/i;\n // const emailRegex =\n // /^[a-z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-z0-9-]+(?:\\.[a-z0-9\\-]+)*$/i;\n // from https://thekevinscott.com/emojis-in-javascript/#writing-a-regular-expression\n const _emojiRegex = `^(\\\\p{Extended_Pictographic}|\\\\p{Emoji_Component})+$`;\n let emojiRegex;\n // faster, simpler, safer\n const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/;\n const ipv6Regex = /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/;\n // https://stackoverflow.com/questions/7860392/determine-if-string-is-in-base64-using-javascript\n const base64Regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/;\n // simple\n // const dateRegexSource = `\\\\d{4}-\\\\d{2}-\\\\d{2}`;\n // no leap year validation\n // const dateRegexSource = `\\\\d{4}-((0[13578]|10|12)-31|(0[13-9]|1[0-2])-30|(0[1-9]|1[0-2])-(0[1-9]|1\\\\d|2\\\\d))`;\n // with leap year validation\n const dateRegexSource = `((\\\\d\\\\d[2468][048]|\\\\d\\\\d[13579][26]|\\\\d\\\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\\\d{4}-((0[13578]|1[02])-(0[1-9]|[12]\\\\d|3[01])|(0[469]|11)-(0[1-9]|[12]\\\\d|30)|(02)-(0[1-9]|1\\\\d|2[0-8])))`;\n const dateRegex = new RegExp(`^${dateRegexSource}$`);\n function timeRegexSource(args) {\n // let regex = `\\\\d{2}:\\\\d{2}:\\\\d{2}`;\n let regex = `([01]\\\\d|2[0-3]):[0-5]\\\\d:[0-5]\\\\d`;\n if (args.precision) {\n regex = `${regex}\\\\.\\\\d{${args.precision}}`;\n }\n else if (args.precision == null) {\n regex = `${regex}(\\\\.\\\\d+)?`;\n }\n return regex;\n }\n function timeRegex(args) {\n return new RegExp(`^${timeRegexSource(args)}$`);\n }\n // Adapted from https://stackoverflow.com/a/3143231\n function datetimeRegex(args) {\n let regex = `${dateRegexSource}T${timeRegexSource(args)}`;\n const opts = [];\n opts.push(args.local ? `Z?` : `Z`);\n if (args.offset)\n opts.push(`([+-]\\\\d{2}:?\\\\d{2})`);\n regex = `${regex}(${opts.join(\"|\")})`;\n return new RegExp(`^${regex}$`);\n }\n function isValidIP(ip, version) {\n if ((version === \"v4\" || !version) && ipv4Regex.test(ip)) {\n return true;\n }\n if ((version === \"v6\" || !version) && ipv6Regex.test(ip)) {\n return true;\n }\n return false;\n }\n class ZodString extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = String(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.string) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.string,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const status = new ParseStatus();\n let ctx = undefined;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.length < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n exact: false,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n if (input.data.length > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n exact: false,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"length\") {\n const tooBig = input.data.length > check.value;\n const tooSmall = input.data.length < check.value;\n if (tooBig || tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n if (tooBig) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n exact: true,\n message: check.message,\n });\n }\n else if (tooSmall) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n exact: true,\n message: check.message,\n });\n }\n status.dirty();\n }\n }\n else if (check.kind === \"email\") {\n if (!emailRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"email\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"emoji\") {\n if (!emojiRegex) {\n emojiRegex = new RegExp(_emojiRegex, \"u\");\n }\n if (!emojiRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"emoji\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"uuid\") {\n if (!uuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"uuid\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"nanoid\") {\n if (!nanoidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"nanoid\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"cuid\") {\n if (!cuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"cuid\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"cuid2\") {\n if (!cuid2Regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"cuid2\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"ulid\") {\n if (!ulidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"ulid\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"url\") {\n try {\n new URL(input.data);\n }\n catch (_a) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"url\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"regex\") {\n check.regex.lastIndex = 0;\n const testResult = check.regex.test(input.data);\n if (!testResult) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"regex\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"trim\") {\n input.data = input.data.trim();\n }\n else if (check.kind === \"includes\") {\n if (!input.data.includes(check.value, check.position)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { includes: check.value, position: check.position },\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"toLowerCase\") {\n input.data = input.data.toLowerCase();\n }\n else if (check.kind === \"toUpperCase\") {\n input.data = input.data.toUpperCase();\n }\n else if (check.kind === \"startsWith\") {\n if (!input.data.startsWith(check.value)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { startsWith: check.value },\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"endsWith\") {\n if (!input.data.endsWith(check.value)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: { endsWith: check.value },\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"datetime\") {\n const regex = datetimeRegex(check);\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"datetime\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"date\") {\n const regex = dateRegex;\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"date\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"time\") {\n const regex = timeRegex(check);\n if (!regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_string,\n validation: \"time\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"duration\") {\n if (!durationRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"duration\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"ip\") {\n if (!isValidIP(input.data, check.version)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"ip\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"base64\") {\n if (!base64Regex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n validation: \"base64\",\n code: ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n _regex(regex, validation, message) {\n return this.refinement((data) => regex.test(data), {\n validation,\n code: ZodIssueCode.invalid_string,\n ...errorUtil.errToObj(message),\n });\n }\n _addCheck(check) {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n email(message) {\n return this._addCheck({ kind: \"email\", ...errorUtil.errToObj(message) });\n }\n url(message) {\n return this._addCheck({ kind: \"url\", ...errorUtil.errToObj(message) });\n }\n emoji(message) {\n return this._addCheck({ kind: \"emoji\", ...errorUtil.errToObj(message) });\n }\n uuid(message) {\n return this._addCheck({ kind: \"uuid\", ...errorUtil.errToObj(message) });\n }\n nanoid(message) {\n return this._addCheck({ kind: \"nanoid\", ...errorUtil.errToObj(message) });\n }\n cuid(message) {\n return this._addCheck({ kind: \"cuid\", ...errorUtil.errToObj(message) });\n }\n cuid2(message) {\n return this._addCheck({ kind: \"cuid2\", ...errorUtil.errToObj(message) });\n }\n ulid(message) {\n return this._addCheck({ kind: \"ulid\", ...errorUtil.errToObj(message) });\n }\n base64(message) {\n return this._addCheck({ kind: \"base64\", ...errorUtil.errToObj(message) });\n }\n ip(options) {\n return this._addCheck({ kind: \"ip\", ...errorUtil.errToObj(options) });\n }\n datetime(options) {\n var _a, _b;\n if (typeof options === \"string\") {\n return this._addCheck({\n kind: \"datetime\",\n precision: null,\n offset: false,\n local: false,\n message: options,\n });\n }\n return this._addCheck({\n kind: \"datetime\",\n precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === \"undefined\" ? null : options === null || options === void 0 ? void 0 : options.precision,\n offset: (_a = options === null || options === void 0 ? void 0 : options.offset) !== null && _a !== void 0 ? _a : false,\n local: (_b = options === null || options === void 0 ? void 0 : options.local) !== null && _b !== void 0 ? _b : false,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message),\n });\n }\n date(message) {\n return this._addCheck({ kind: \"date\", message });\n }\n time(options) {\n if (typeof options === \"string\") {\n return this._addCheck({\n kind: \"time\",\n precision: null,\n message: options,\n });\n }\n return this._addCheck({\n kind: \"time\",\n precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === \"undefined\" ? null : options === null || options === void 0 ? void 0 : options.precision,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message),\n });\n }\n duration(message) {\n return this._addCheck({ kind: \"duration\", ...errorUtil.errToObj(message) });\n }\n regex(regex, message) {\n return this._addCheck({\n kind: \"regex\",\n regex: regex,\n ...errorUtil.errToObj(message),\n });\n }\n includes(value, options) {\n return this._addCheck({\n kind: \"includes\",\n value: value,\n position: options === null || options === void 0 ? void 0 : options.position,\n ...errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message),\n });\n }\n startsWith(value, message) {\n return this._addCheck({\n kind: \"startsWith\",\n value: value,\n ...errorUtil.errToObj(message),\n });\n }\n endsWith(value, message) {\n return this._addCheck({\n kind: \"endsWith\",\n value: value,\n ...errorUtil.errToObj(message),\n });\n }\n min(minLength, message) {\n return this._addCheck({\n kind: \"min\",\n value: minLength,\n ...errorUtil.errToObj(message),\n });\n }\n max(maxLength, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxLength,\n ...errorUtil.errToObj(message),\n });\n }\n length(len, message) {\n return this._addCheck({\n kind: \"length\",\n value: len,\n ...errorUtil.errToObj(message),\n });\n }\n /**\n * @deprecated Use z.string().min(1) instead.\n * @see {@link ZodString.min}\n */\n nonempty(message) {\n return this.min(1, errorUtil.errToObj(message));\n }\n trim() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"trim\" }],\n });\n }\n toLowerCase() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"toLowerCase\" }],\n });\n }\n toUpperCase() {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"toUpperCase\" }],\n });\n }\n get isDatetime() {\n return !!this._def.checks.find((ch) => ch.kind === \"datetime\");\n }\n get isDate() {\n return !!this._def.checks.find((ch) => ch.kind === \"date\");\n }\n get isTime() {\n return !!this._def.checks.find((ch) => ch.kind === \"time\");\n }\n get isDuration() {\n return !!this._def.checks.find((ch) => ch.kind === \"duration\");\n }\n get isEmail() {\n return !!this._def.checks.find((ch) => ch.kind === \"email\");\n }\n get isURL() {\n return !!this._def.checks.find((ch) => ch.kind === \"url\");\n }\n get isEmoji() {\n return !!this._def.checks.find((ch) => ch.kind === \"emoji\");\n }\n get isUUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"uuid\");\n }\n get isNANOID() {\n return !!this._def.checks.find((ch) => ch.kind === \"nanoid\");\n }\n get isCUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid\");\n }\n get isCUID2() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid2\");\n }\n get isULID() {\n return !!this._def.checks.find((ch) => ch.kind === \"ulid\");\n }\n get isIP() {\n return !!this._def.checks.find((ch) => ch.kind === \"ip\");\n }\n get isBase64() {\n return !!this._def.checks.find((ch) => ch.kind === \"base64\");\n }\n get minLength() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxLength() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n }\n ZodString.create = (params) => {\n var _a;\n return new ZodString({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodString,\n coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false,\n ...processCreateParams(params),\n });\n };\n // https://stackoverflow.com/questions/3966484/why-does-modulus-operator-return-fractional-number-in-javascript/31711034#31711034\n function floatSafeRemainder(val, step) {\n const valDecCount = (val.toString().split(\".\")[1] || \"\").length;\n const stepDecCount = (step.toString().split(\".\")[1] || \"\").length;\n const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount;\n const valInt = parseInt(val.toFixed(decCount).replace(\".\", \"\"));\n const stepInt = parseInt(step.toFixed(decCount).replace(\".\", \"\"));\n return (valInt % stepInt) / Math.pow(10, decCount);\n }\n class ZodNumber extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n this.step = this.multipleOf;\n }\n _parse(input) {\n if (this._def.coerce) {\n input.data = Number(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.number) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.number,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n let ctx = undefined;\n const status = new ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"int\") {\n if (!util.isInteger(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: \"integer\",\n received: \"float\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"min\") {\n const tooSmall = check.inclusive\n ? input.data < check.value\n : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n exact: false,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n const tooBig = check.inclusive\n ? input.data > check.value\n : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n exact: false,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"multipleOf\") {\n if (floatSafeRemainder(input.data, check.value) !== 0) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"finite\") {\n if (!Number.isFinite(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_finite,\n message: check.message,\n });\n status.dirty();\n }\n }\n else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodNumber({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil.toString(message),\n },\n ],\n });\n }\n _addCheck(check) {\n return new ZodNumber({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n int(message) {\n return this._addCheck({\n kind: \"int\",\n message: errorUtil.toString(message),\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: false,\n message: errorUtil.toString(message),\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: false,\n message: errorUtil.toString(message),\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: true,\n message: errorUtil.toString(message),\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: true,\n message: errorUtil.toString(message),\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value: value,\n message: errorUtil.toString(message),\n });\n }\n finite(message) {\n return this._addCheck({\n kind: \"finite\",\n message: errorUtil.toString(message),\n });\n }\n safe(message) {\n return this._addCheck({\n kind: \"min\",\n inclusive: true,\n value: Number.MIN_SAFE_INTEGER,\n message: errorUtil.toString(message),\n })._addCheck({\n kind: \"max\",\n inclusive: true,\n value: Number.MAX_SAFE_INTEGER,\n message: errorUtil.toString(message),\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n get isInt() {\n return !!this._def.checks.find((ch) => ch.kind === \"int\" ||\n (ch.kind === \"multipleOf\" && util.isInteger(ch.value)));\n }\n get isFinite() {\n let max = null, min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"finite\" ||\n ch.kind === \"int\" ||\n ch.kind === \"multipleOf\") {\n return true;\n }\n else if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n else if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return Number.isFinite(min) && Number.isFinite(max);\n }\n }\n ZodNumber.create = (params) => {\n return new ZodNumber({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodNumber,\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n ...processCreateParams(params),\n });\n };\n class ZodBigInt extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n }\n _parse(input) {\n if (this._def.coerce) {\n input.data = BigInt(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.bigint) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.bigint,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n let ctx = undefined;\n const status = new ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n const tooSmall = check.inclusive\n ? input.data < check.value\n : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n type: \"bigint\",\n minimum: check.value,\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n const tooBig = check.inclusive\n ? input.data > check.value\n : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n type: \"bigint\",\n maximum: check.value,\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"multipleOf\") {\n if (input.data % check.value !== BigInt(0)) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message,\n });\n status.dirty();\n }\n }\n else {\n util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodBigInt({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil.toString(message),\n },\n ],\n });\n }\n _addCheck(check) {\n return new ZodBigInt({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: BigInt(0),\n inclusive: false,\n message: errorUtil.toString(message),\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: BigInt(0),\n inclusive: false,\n message: errorUtil.toString(message),\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: BigInt(0),\n inclusive: true,\n message: errorUtil.toString(message),\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: BigInt(0),\n inclusive: true,\n message: errorUtil.toString(message),\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value,\n message: errorUtil.toString(message),\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n }\n ZodBigInt.create = (params) => {\n var _a;\n return new ZodBigInt({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodBigInt,\n coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false,\n ...processCreateParams(params),\n });\n };\n class ZodBoolean extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = Boolean(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.boolean) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.boolean,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodBoolean.create = (params) => {\n return new ZodBoolean({\n typeName: ZodFirstPartyTypeKind.ZodBoolean,\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n ...processCreateParams(params),\n });\n };\n class ZodDate extends ZodType {\n _parse(input) {\n if (this._def.coerce) {\n input.data = new Date(input.data);\n }\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.date) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.date,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n if (isNaN(input.data.getTime())) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_date,\n });\n return INVALID;\n }\n const status = new ParseStatus();\n let ctx = undefined;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.getTime() < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n message: check.message,\n inclusive: true,\n exact: false,\n minimum: check.value,\n type: \"date\",\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n if (input.data.getTime() > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n message: check.message,\n inclusive: true,\n exact: false,\n maximum: check.value,\n type: \"date\",\n });\n status.dirty();\n }\n }\n else {\n util.assertNever(check);\n }\n }\n return {\n status: status.value,\n value: new Date(input.data.getTime()),\n };\n }\n _addCheck(check) {\n return new ZodDate({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n min(minDate, message) {\n return this._addCheck({\n kind: \"min\",\n value: minDate.getTime(),\n message: errorUtil.toString(message),\n });\n }\n max(maxDate, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxDate.getTime(),\n message: errorUtil.toString(message),\n });\n }\n get minDate() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min != null ? new Date(min) : null;\n }\n get maxDate() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max != null ? new Date(max) : null;\n }\n }\n ZodDate.create = (params) => {\n return new ZodDate({\n checks: [],\n coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false,\n typeName: ZodFirstPartyTypeKind.ZodDate,\n ...processCreateParams(params),\n });\n };\n class ZodSymbol extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.symbol) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.symbol,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodSymbol.create = (params) => {\n return new ZodSymbol({\n typeName: ZodFirstPartyTypeKind.ZodSymbol,\n ...processCreateParams(params),\n });\n };\n class ZodUndefined extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.undefined,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodUndefined.create = (params) => {\n return new ZodUndefined({\n typeName: ZodFirstPartyTypeKind.ZodUndefined,\n ...processCreateParams(params),\n });\n };\n class ZodNull extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.null) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.null,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodNull.create = (params) => {\n return new ZodNull({\n typeName: ZodFirstPartyTypeKind.ZodNull,\n ...processCreateParams(params),\n });\n };\n class ZodAny extends ZodType {\n constructor() {\n super(...arguments);\n // to prevent instances of other classes from extending ZodAny. this causes issues with catchall in ZodObject.\n this._any = true;\n }\n _parse(input) {\n return OK(input.data);\n }\n }\n ZodAny.create = (params) => {\n return new ZodAny({\n typeName: ZodFirstPartyTypeKind.ZodAny,\n ...processCreateParams(params),\n });\n };\n class ZodUnknown extends ZodType {\n constructor() {\n super(...arguments);\n // required\n this._unknown = true;\n }\n _parse(input) {\n return OK(input.data);\n }\n }\n ZodUnknown.create = (params) => {\n return new ZodUnknown({\n typeName: ZodFirstPartyTypeKind.ZodUnknown,\n ...processCreateParams(params),\n });\n };\n class ZodNever extends ZodType {\n _parse(input) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.never,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n }\n ZodNever.create = (params) => {\n return new ZodNever({\n typeName: ZodFirstPartyTypeKind.ZodNever,\n ...processCreateParams(params),\n });\n };\n class ZodVoid extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.void,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n }\n ZodVoid.create = (params) => {\n return new ZodVoid({\n typeName: ZodFirstPartyTypeKind.ZodVoid,\n ...processCreateParams(params),\n });\n };\n class ZodArray extends ZodType {\n _parse(input) {\n const { ctx, status } = this._processInputParams(input);\n const def = this._def;\n if (ctx.parsedType !== ZodParsedType.array) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.array,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n if (def.exactLength !== null) {\n const tooBig = ctx.data.length > def.exactLength.value;\n const tooSmall = ctx.data.length < def.exactLength.value;\n if (tooBig || tooSmall) {\n addIssueToContext(ctx, {\n code: tooBig ? ZodIssueCode.too_big : ZodIssueCode.too_small,\n minimum: (tooSmall ? def.exactLength.value : undefined),\n maximum: (tooBig ? def.exactLength.value : undefined),\n type: \"array\",\n inclusive: true,\n exact: true,\n message: def.exactLength.message,\n });\n status.dirty();\n }\n }\n if (def.minLength !== null) {\n if (ctx.data.length < def.minLength.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: def.minLength.value,\n type: \"array\",\n inclusive: true,\n exact: false,\n message: def.minLength.message,\n });\n status.dirty();\n }\n }\n if (def.maxLength !== null) {\n if (ctx.data.length > def.maxLength.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: def.maxLength.value,\n type: \"array\",\n inclusive: true,\n exact: false,\n message: def.maxLength.message,\n });\n status.dirty();\n }\n }\n if (ctx.common.async) {\n return Promise.all([...ctx.data].map((item, i) => {\n return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n })).then((result) => {\n return ParseStatus.mergeArray(status, result);\n });\n }\n const result = [...ctx.data].map((item, i) => {\n return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n });\n return ParseStatus.mergeArray(status, result);\n }\n get element() {\n return this._def.type;\n }\n min(minLength, message) {\n return new ZodArray({\n ...this._def,\n minLength: { value: minLength, message: errorUtil.toString(message) },\n });\n }\n max(maxLength, message) {\n return new ZodArray({\n ...this._def,\n maxLength: { value: maxLength, message: errorUtil.toString(message) },\n });\n }\n length(len, message) {\n return new ZodArray({\n ...this._def,\n exactLength: { value: len, message: errorUtil.toString(message) },\n });\n }\n nonempty(message) {\n return this.min(1, message);\n }\n }\n ZodArray.create = (schema, params) => {\n return new ZodArray({\n type: schema,\n minLength: null,\n maxLength: null,\n exactLength: null,\n typeName: ZodFirstPartyTypeKind.ZodArray,\n ...processCreateParams(params),\n });\n };\n function deepPartialify(schema) {\n if (schema instanceof ZodObject) {\n const newShape = {};\n for (const key in schema.shape) {\n const fieldSchema = schema.shape[key];\n newShape[key] = ZodOptional.create(deepPartialify(fieldSchema));\n }\n return new ZodObject({\n ...schema._def,\n shape: () => newShape,\n });\n }\n else if (schema instanceof ZodArray) {\n return new ZodArray({\n ...schema._def,\n type: deepPartialify(schema.element),\n });\n }\n else if (schema instanceof ZodOptional) {\n return ZodOptional.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodNullable) {\n return ZodNullable.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodTuple) {\n return ZodTuple.create(schema.items.map((item) => deepPartialify(item)));\n }\n else {\n return schema;\n }\n }\n class ZodObject extends ZodType {\n constructor() {\n super(...arguments);\n this._cached = null;\n /**\n * @deprecated In most cases, this is no longer needed - unknown properties are now silently stripped.\n * If you want to pass through unknown properties, use `.passthrough()` instead.\n */\n this.nonstrict = this.passthrough;\n // extend<\n // Augmentation extends ZodRawShape,\n // NewOutput extends util.flatten<{\n // [k in keyof Augmentation | keyof Output]: k extends keyof Augmentation\n // ? Augmentation[k][\"_output\"]\n // : k extends keyof Output\n // ? Output[k]\n // : never;\n // }>,\n // NewInput extends util.flatten<{\n // [k in keyof Augmentation | keyof Input]: k extends keyof Augmentation\n // ? Augmentation[k][\"_input\"]\n // : k extends keyof Input\n // ? Input[k]\n // : never;\n // }>\n // >(\n // augmentation: Augmentation\n // ): ZodObject<\n // extendShape<T, Augmentation>,\n // UnknownKeys,\n // Catchall,\n // NewOutput,\n // NewInput\n // > {\n // return new ZodObject({\n // ...this._def,\n // shape: () => ({\n // ...this._def.shape(),\n // ...augmentation,\n // }),\n // }) as any;\n // }\n /**\n * @deprecated Use `.extend` instead\n * */\n this.augment = this.extend;\n }\n _getCached() {\n if (this._cached !== null)\n return this._cached;\n const shape = this._def.shape();\n const keys = util.objectKeys(shape);\n return (this._cached = { shape, keys });\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.object) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const { status, ctx } = this._processInputParams(input);\n const { shape, keys: shapeKeys } = this._getCached();\n const extraKeys = [];\n if (!(this._def.catchall instanceof ZodNever &&\n this._def.unknownKeys === \"strip\")) {\n for (const key in ctx.data) {\n if (!shapeKeys.includes(key)) {\n extraKeys.push(key);\n }\n }\n }\n const pairs = [];\n for (const key of shapeKeys) {\n const keyValidator = shape[key];\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)),\n alwaysSet: key in ctx.data,\n });\n }\n if (this._def.catchall instanceof ZodNever) {\n const unknownKeys = this._def.unknownKeys;\n if (unknownKeys === \"passthrough\") {\n for (const key of extraKeys) {\n pairs.push({\n key: { status: \"valid\", value: key },\n value: { status: \"valid\", value: ctx.data[key] },\n });\n }\n }\n else if (unknownKeys === \"strict\") {\n if (extraKeys.length > 0) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.unrecognized_keys,\n keys: extraKeys,\n });\n status.dirty();\n }\n }\n else if (unknownKeys === \"strip\") ;\n else {\n throw new Error(`Internal ZodObject error: invalid unknownKeys value.`);\n }\n }\n else {\n // run catchall validation\n const catchall = this._def.catchall;\n for (const key of extraKeys) {\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: catchall._parse(new ParseInputLazyPath(ctx, value, ctx.path, key) //, ctx.child(key), value, getParsedType(value)\n ),\n alwaysSet: key in ctx.data,\n });\n }\n }\n if (ctx.common.async) {\n return Promise.resolve()\n .then(async () => {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n syncPairs.push({\n key,\n value,\n alwaysSet: pair.alwaysSet,\n });\n }\n return syncPairs;\n })\n .then((syncPairs) => {\n return ParseStatus.mergeObjectSync(status, syncPairs);\n });\n }\n else {\n return ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get shape() {\n return this._def.shape();\n }\n strict(message) {\n errorUtil.errToObj;\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strict\",\n ...(message !== undefined\n ? {\n errorMap: (issue, ctx) => {\n var _a, _b, _c, _d;\n const defaultError = (_c = (_b = (_a = this._def).errorMap) === null || _b === void 0 ? void 0 : _b.call(_a, issue, ctx).message) !== null && _c !== void 0 ? _c : ctx.defaultError;\n if (issue.code === \"unrecognized_keys\")\n return {\n message: (_d = errorUtil.errToObj(message).message) !== null && _d !== void 0 ? _d : defaultError,\n };\n return {\n message: defaultError,\n };\n },\n }\n : {}),\n });\n }\n strip() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strip\",\n });\n }\n passthrough() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"passthrough\",\n });\n }\n // const AugmentFactory =\n // <Def extends ZodObjectDef>(def: Def) =>\n // <Augmentation extends ZodRawShape>(\n // augmentation: Augmentation\n // ): ZodObject<\n // extendShape<ReturnType<Def[\"shape\"]>, Augmentation>,\n // Def[\"unknownKeys\"],\n // Def[\"catchall\"]\n // > => {\n // return new ZodObject({\n // ...def,\n // shape: () => ({\n // ...def.shape(),\n // ...augmentation,\n // }),\n // }) as any;\n // };\n extend(augmentation) {\n return new ZodObject({\n ...this._def,\n shape: () => ({\n ...this._def.shape(),\n ...augmentation,\n }),\n });\n }\n /**\n * Prior to zod@1.0.12 there was a bug in the\n * inferred type of merged objects. Please\n * upgrade if you are experiencing issues.\n */\n merge(merging) {\n const merged = new ZodObject({\n unknownKeys: merging._def.unknownKeys,\n catchall: merging._def.catchall,\n shape: () => ({\n ...this._def.shape(),\n ...merging._def.shape(),\n }),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n });\n return merged;\n }\n // merge<\n // Incoming extends AnyZodObject,\n // Augmentation extends Incoming[\"shape\"],\n // NewOutput extends {\n // [k in keyof Augmentation | keyof Output]: k extends keyof Augmentation\n // ? Augmentation[k][\"_output\"]\n // : k extends keyof Output\n // ? Output[k]\n // : never;\n // },\n // NewInput extends {\n // [k in keyof Augmentation | keyof Input]: k extends keyof Augmentation\n // ? Augmentation[k][\"_input\"]\n // : k extends keyof Input\n // ? Input[k]\n // : never;\n // }\n // >(\n // merging: Incoming\n // ): ZodObject<\n // extendShape<T, ReturnType<Incoming[\"_def\"][\"shape\"]>>,\n // Incoming[\"_def\"][\"unknownKeys\"],\n // Incoming[\"_def\"][\"catchall\"],\n // NewOutput,\n // NewInput\n // > {\n // const merged: any = new ZodObject({\n // unknownKeys: merging._def.unknownKeys,\n // catchall: merging._def.catchall,\n // shape: () =>\n // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n // typeName: ZodFirstPartyTypeKind.ZodObject,\n // }) as any;\n // return merged;\n // }\n setKey(key, schema) {\n return this.augment({ [key]: schema });\n }\n // merge<Incoming extends AnyZodObject>(\n // merging: Incoming\n // ): //ZodObject<T & Incoming[\"_shape\"], UnknownKeys, Catchall> = (merging) => {\n // ZodObject<\n // extendShape<T, ReturnType<Incoming[\"_def\"][\"shape\"]>>,\n // Incoming[\"_def\"][\"unknownKeys\"],\n // Incoming[\"_def\"][\"catchall\"]\n // > {\n // // const mergedShape = objectUtil.mergeShapes(\n // // this._def.shape(),\n // // merging._def.shape()\n // // );\n // const merged: any = new ZodObject({\n // unknownKeys: merging._def.unknownKeys,\n // catchall: merging._def.catchall,\n // shape: () =>\n // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n // typeName: ZodFirstPartyTypeKind.ZodObject,\n // }) as any;\n // return merged;\n // }\n catchall(index) {\n return new ZodObject({\n ...this._def,\n catchall: index,\n });\n }\n pick(mask) {\n const shape = {};\n util.objectKeys(mask).forEach((key) => {\n if (mask[key] && this.shape[key]) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n omit(mask) {\n const shape = {};\n util.objectKeys(this.shape).forEach((key) => {\n if (!mask[key]) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n /**\n * @deprecated\n */\n deepPartial() {\n return deepPartialify(this);\n }\n partial(mask) {\n const newShape = {};\n util.objectKeys(this.shape).forEach((key) => {\n const fieldSchema = this.shape[key];\n if (mask && !mask[key]) {\n newShape[key] = fieldSchema;\n }\n else {\n newShape[key] = fieldSchema.optional();\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n required(mask) {\n const newShape = {};\n util.objectKeys(this.shape).forEach((key) => {\n if (mask && !mask[key]) {\n newShape[key] = this.shape[key];\n }\n else {\n const fieldSchema = this.shape[key];\n let newField = fieldSchema;\n while (newField instanceof ZodOptional) {\n newField = newField._def.innerType;\n }\n newShape[key] = newField;\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n keyof() {\n return createZodEnum(util.objectKeys(this.shape));\n }\n }\n ZodObject.create = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n };\n ZodObject.strictCreate = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strict\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n };\n ZodObject.lazycreate = (shape, params) => {\n return new ZodObject({\n shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n };\n class ZodUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const options = this._def.options;\n function handleResults(results) {\n // return first issue-free validation if it exists\n for (const result of results) {\n if (result.result.status === \"valid\") {\n return result.result;\n }\n }\n for (const result of results) {\n if (result.result.status === \"dirty\") {\n // add issues from dirty option\n ctx.common.issues.push(...result.ctx.common.issues);\n return result.result;\n }\n }\n // return invalid\n const unionErrors = results.map((result) => new ZodError(result.ctx.common.issues));\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union,\n unionErrors,\n });\n return INVALID;\n }\n if (ctx.common.async) {\n return Promise.all(options.map(async (option) => {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n return {\n result: await option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n }),\n ctx: childCtx,\n };\n })).then(handleResults);\n }\n else {\n let dirty = undefined;\n const issues = [];\n for (const option of options) {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n const result = option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n });\n if (result.status === \"valid\") {\n return result;\n }\n else if (result.status === \"dirty\" && !dirty) {\n dirty = { result, ctx: childCtx };\n }\n if (childCtx.common.issues.length) {\n issues.push(childCtx.common.issues);\n }\n }\n if (dirty) {\n ctx.common.issues.push(...dirty.ctx.common.issues);\n return dirty.result;\n }\n const unionErrors = issues.map((issues) => new ZodError(issues));\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union,\n unionErrors,\n });\n return INVALID;\n }\n }\n get options() {\n return this._def.options;\n }\n }\n ZodUnion.create = (types, params) => {\n return new ZodUnion({\n options: types,\n typeName: ZodFirstPartyTypeKind.ZodUnion,\n ...processCreateParams(params),\n });\n };\n /////////////////////////////////////////////////////\n /////////////////////////////////////////////////////\n ////////// //////////\n ////////// ZodDiscriminatedUnion //////////\n ////////// //////////\n /////////////////////////////////////////////////////\n /////////////////////////////////////////////////////\n const getDiscriminator = (type) => {\n if (type instanceof ZodLazy) {\n return getDiscriminator(type.schema);\n }\n else if (type instanceof ZodEffects) {\n return getDiscriminator(type.innerType());\n }\n else if (type instanceof ZodLiteral) {\n return [type.value];\n }\n else if (type instanceof ZodEnum) {\n return type.options;\n }\n else if (type instanceof ZodNativeEnum) {\n // eslint-disable-next-line ban/ban\n return util.objectValues(type.enum);\n }\n else if (type instanceof ZodDefault) {\n return getDiscriminator(type._def.innerType);\n }\n else if (type instanceof ZodUndefined) {\n return [undefined];\n }\n else if (type instanceof ZodNull) {\n return [null];\n }\n else if (type instanceof ZodOptional) {\n return [undefined, ...getDiscriminator(type.unwrap())];\n }\n else if (type instanceof ZodNullable) {\n return [null, ...getDiscriminator(type.unwrap())];\n }\n else if (type instanceof ZodBranded) {\n return getDiscriminator(type.unwrap());\n }\n else if (type instanceof ZodReadonly) {\n return getDiscriminator(type.unwrap());\n }\n else if (type instanceof ZodCatch) {\n return getDiscriminator(type._def.innerType);\n }\n else {\n return [];\n }\n };\n class ZodDiscriminatedUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.object) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const discriminator = this.discriminator;\n const discriminatorValue = ctx.data[discriminator];\n const option = this.optionsMap.get(discriminatorValue);\n if (!option) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_union_discriminator,\n options: Array.from(this.optionsMap.keys()),\n path: [discriminator],\n });\n return INVALID;\n }\n if (ctx.common.async) {\n return option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n else {\n return option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n get discriminator() {\n return this._def.discriminator;\n }\n get options() {\n return this._def.options;\n }\n get optionsMap() {\n return this._def.optionsMap;\n }\n /**\n * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor.\n * However, it only allows a union of objects, all of which need to share a discriminator property. This property must\n * have a different value for each object in the union.\n * @param discriminator the name of the discriminator property\n * @param types an array of object schemas\n * @param params\n */\n static create(discriminator, options, params) {\n // Get all the valid discriminator values\n const optionsMap = new Map();\n // try {\n for (const type of options) {\n const discriminatorValues = getDiscriminator(type.shape[discriminator]);\n if (!discriminatorValues.length) {\n throw new Error(`A discriminator value for key \\`${discriminator}\\` could not be extracted from all schema options`);\n }\n for (const value of discriminatorValues) {\n if (optionsMap.has(value)) {\n throw new Error(`Discriminator property ${String(discriminator)} has duplicate value ${String(value)}`);\n }\n optionsMap.set(value, type);\n }\n }\n return new ZodDiscriminatedUnion({\n typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion,\n discriminator,\n options,\n optionsMap,\n ...processCreateParams(params),\n });\n }\n }\n function mergeValues(a, b) {\n const aType = getParsedType(a);\n const bType = getParsedType(b);\n if (a === b) {\n return { valid: true, data: a };\n }\n else if (aType === ZodParsedType.object && bType === ZodParsedType.object) {\n const bKeys = util.objectKeys(b);\n const sharedKeys = util\n .objectKeys(a)\n .filter((key) => bKeys.indexOf(key) !== -1);\n const newObj = { ...a, ...b };\n for (const key of sharedKeys) {\n const sharedValue = mergeValues(a[key], b[key]);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newObj[key] = sharedValue.data;\n }\n return { valid: true, data: newObj };\n }\n else if (aType === ZodParsedType.array && bType === ZodParsedType.array) {\n if (a.length !== b.length) {\n return { valid: false };\n }\n const newArray = [];\n for (let index = 0; index < a.length; index++) {\n const itemA = a[index];\n const itemB = b[index];\n const sharedValue = mergeValues(itemA, itemB);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newArray.push(sharedValue.data);\n }\n return { valid: true, data: newArray };\n }\n else if (aType === ZodParsedType.date &&\n bType === ZodParsedType.date &&\n +a === +b) {\n return { valid: true, data: a };\n }\n else {\n return { valid: false };\n }\n }\n class ZodIntersection extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const handleParsed = (parsedLeft, parsedRight) => {\n if (isAborted(parsedLeft) || isAborted(parsedRight)) {\n return INVALID;\n }\n const merged = mergeValues(parsedLeft.value, parsedRight.value);\n if (!merged.valid) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_intersection_types,\n });\n return INVALID;\n }\n if (isDirty(parsedLeft) || isDirty(parsedRight)) {\n status.dirty();\n }\n return { status: status.value, value: merged.data };\n };\n if (ctx.common.async) {\n return Promise.all([\n this._def.left._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n this._def.right._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n ]).then(([left, right]) => handleParsed(left, right));\n }\n else {\n return handleParsed(this._def.left._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }), this._def.right._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }));\n }\n }\n }\n ZodIntersection.create = (left, right, params) => {\n return new ZodIntersection({\n left: left,\n right: right,\n typeName: ZodFirstPartyTypeKind.ZodIntersection,\n ...processCreateParams(params),\n });\n };\n class ZodTuple extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.array) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.array,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n if (ctx.data.length < this._def.items.length) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: this._def.items.length,\n inclusive: true,\n exact: false,\n type: \"array\",\n });\n return INVALID;\n }\n const rest = this._def.rest;\n if (!rest && ctx.data.length > this._def.items.length) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: this._def.items.length,\n inclusive: true,\n exact: false,\n type: \"array\",\n });\n status.dirty();\n }\n const items = [...ctx.data]\n .map((item, itemIndex) => {\n const schema = this._def.items[itemIndex] || this._def.rest;\n if (!schema)\n return null;\n return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex));\n })\n .filter((x) => !!x); // filter nulls\n if (ctx.common.async) {\n return Promise.all(items).then((results) => {\n return ParseStatus.mergeArray(status, results);\n });\n }\n else {\n return ParseStatus.mergeArray(status, items);\n }\n }\n get items() {\n return this._def.items;\n }\n rest(rest) {\n return new ZodTuple({\n ...this._def,\n rest,\n });\n }\n }\n ZodTuple.create = (schemas, params) => {\n if (!Array.isArray(schemas)) {\n throw new Error(\"You must pass an array of schemas to z.tuple([ ... ])\");\n }\n return new ZodTuple({\n items: schemas,\n typeName: ZodFirstPartyTypeKind.ZodTuple,\n rest: null,\n ...processCreateParams(params),\n });\n };\n class ZodRecord extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.object) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.object,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const pairs = [];\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n for (const key in ctx.data) {\n pairs.push({\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)),\n value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)),\n alwaysSet: key in ctx.data,\n });\n }\n if (ctx.common.async) {\n return ParseStatus.mergeObjectAsync(status, pairs);\n }\n else {\n return ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get element() {\n return this._def.valueType;\n }\n static create(first, second, third) {\n if (second instanceof ZodType) {\n return new ZodRecord({\n keyType: first,\n valueType: second,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(third),\n });\n }\n return new ZodRecord({\n keyType: ZodString.create(),\n valueType: first,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(second),\n });\n }\n }\n class ZodMap extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.map) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.map,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n const pairs = [...ctx.data.entries()].map(([key, value], index) => {\n return {\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, \"key\"])),\n value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, \"value\"])),\n };\n });\n if (ctx.common.async) {\n const finalMap = new Map();\n return Promise.resolve().then(async () => {\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n });\n }\n else {\n const finalMap = new Map();\n for (const pair of pairs) {\n const key = pair.key;\n const value = pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n }\n }\n }\n ZodMap.create = (keyType, valueType, params) => {\n return new ZodMap({\n valueType,\n keyType,\n typeName: ZodFirstPartyTypeKind.ZodMap,\n ...processCreateParams(params),\n });\n };\n class ZodSet extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.set) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.set,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const def = this._def;\n if (def.minSize !== null) {\n if (ctx.data.size < def.minSize.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_small,\n minimum: def.minSize.value,\n type: \"set\",\n inclusive: true,\n exact: false,\n message: def.minSize.message,\n });\n status.dirty();\n }\n }\n if (def.maxSize !== null) {\n if (ctx.data.size > def.maxSize.value) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.too_big,\n maximum: def.maxSize.value,\n type: \"set\",\n inclusive: true,\n exact: false,\n message: def.maxSize.message,\n });\n status.dirty();\n }\n }\n const valueType = this._def.valueType;\n function finalizeSet(elements) {\n const parsedSet = new Set();\n for (const element of elements) {\n if (element.status === \"aborted\")\n return INVALID;\n if (element.status === \"dirty\")\n status.dirty();\n parsedSet.add(element.value);\n }\n return { status: status.value, value: parsedSet };\n }\n const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i)));\n if (ctx.common.async) {\n return Promise.all(elements).then((elements) => finalizeSet(elements));\n }\n else {\n return finalizeSet(elements);\n }\n }\n min(minSize, message) {\n return new ZodSet({\n ...this._def,\n minSize: { value: minSize, message: errorUtil.toString(message) },\n });\n }\n max(maxSize, message) {\n return new ZodSet({\n ...this._def,\n maxSize: { value: maxSize, message: errorUtil.toString(message) },\n });\n }\n size(size, message) {\n return this.min(size, message).max(size, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n }\n ZodSet.create = (valueType, params) => {\n return new ZodSet({\n valueType,\n minSize: null,\n maxSize: null,\n typeName: ZodFirstPartyTypeKind.ZodSet,\n ...processCreateParams(params),\n });\n };\n class ZodFunction extends ZodType {\n constructor() {\n super(...arguments);\n this.validate = this.implement;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.function) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.function,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n function makeArgsIssue(args, error) {\n return makeIssue({\n data: args,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n getErrorMap(),\n errorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodIssueCode.invalid_arguments,\n argumentsError: error,\n },\n });\n }\n function makeReturnsIssue(returns, error) {\n return makeIssue({\n data: returns,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n getErrorMap(),\n errorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodIssueCode.invalid_return_type,\n returnTypeError: error,\n },\n });\n }\n const params = { errorMap: ctx.common.contextualErrorMap };\n const fn = ctx.data;\n if (this._def.returns instanceof ZodPromise) {\n // Would love a way to avoid disabling this rule, but we need\n // an alias (using an arrow function was what caused 2651).\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const me = this;\n return OK(async function (...args) {\n const error = new ZodError([]);\n const parsedArgs = await me._def.args\n .parseAsync(args, params)\n .catch((e) => {\n error.addIssue(makeArgsIssue(args, e));\n throw error;\n });\n const result = await Reflect.apply(fn, this, parsedArgs);\n const parsedReturns = await me._def.returns._def.type\n .parseAsync(result, params)\n .catch((e) => {\n error.addIssue(makeReturnsIssue(result, e));\n throw error;\n });\n return parsedReturns;\n });\n }\n else {\n // Would love a way to avoid disabling this rule, but we need\n // an alias (using an arrow function was what caused 2651).\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const me = this;\n return OK(function (...args) {\n const parsedArgs = me._def.args.safeParse(args, params);\n if (!parsedArgs.success) {\n throw new ZodError([makeArgsIssue(args, parsedArgs.error)]);\n }\n const result = Reflect.apply(fn, this, parsedArgs.data);\n const parsedReturns = me._def.returns.safeParse(result, params);\n if (!parsedReturns.success) {\n throw new ZodError([makeReturnsIssue(result, parsedReturns.error)]);\n }\n return parsedReturns.data;\n });\n }\n }\n parameters() {\n return this._def.args;\n }\n returnType() {\n return this._def.returns;\n }\n args(...items) {\n return new ZodFunction({\n ...this._def,\n args: ZodTuple.create(items).rest(ZodUnknown.create()),\n });\n }\n returns(returnType) {\n return new ZodFunction({\n ...this._def,\n returns: returnType,\n });\n }\n implement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n strictImplement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n static create(args, returns, params) {\n return new ZodFunction({\n args: (args\n ? args\n : ZodTuple.create([]).rest(ZodUnknown.create())),\n returns: returns || ZodUnknown.create(),\n typeName: ZodFirstPartyTypeKind.ZodFunction,\n ...processCreateParams(params),\n });\n }\n }\n class ZodLazy extends ZodType {\n get schema() {\n return this._def.getter();\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const lazySchema = this._def.getter();\n return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx });\n }\n }\n ZodLazy.create = (getter, params) => {\n return new ZodLazy({\n getter: getter,\n typeName: ZodFirstPartyTypeKind.ZodLazy,\n ...processCreateParams(params),\n });\n };\n class ZodLiteral extends ZodType {\n _parse(input) {\n if (input.data !== this._def.value) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_literal,\n expected: this._def.value,\n });\n return INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n get value() {\n return this._def.value;\n }\n }\n ZodLiteral.create = (value, params) => {\n return new ZodLiteral({\n value: value,\n typeName: ZodFirstPartyTypeKind.ZodLiteral,\n ...processCreateParams(params),\n });\n };\n function createZodEnum(values, params) {\n return new ZodEnum({\n values,\n typeName: ZodFirstPartyTypeKind.ZodEnum,\n ...processCreateParams(params),\n });\n }\n class ZodEnum extends ZodType {\n constructor() {\n super(...arguments);\n _ZodEnum_cache.set(this, void 0);\n }\n _parse(input) {\n if (typeof input.data !== \"string\") {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n addIssueToContext(ctx, {\n expected: util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodIssueCode.invalid_type,\n });\n return INVALID;\n }\n if (!__classPrivateFieldGet(this, _ZodEnum_cache)) {\n __classPrivateFieldSet(this, _ZodEnum_cache, new Set(this._def.values));\n }\n if (!__classPrivateFieldGet(this, _ZodEnum_cache).has(input.data)) {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n get options() {\n return this._def.values;\n }\n get enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Values() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n extract(values, newDef = this._def) {\n return ZodEnum.create(values, {\n ...this._def,\n ...newDef,\n });\n }\n exclude(values, newDef = this._def) {\n return ZodEnum.create(this.options.filter((opt) => !values.includes(opt)), {\n ...this._def,\n ...newDef,\n });\n }\n }\n _ZodEnum_cache = new WeakMap();\n ZodEnum.create = createZodEnum;\n class ZodNativeEnum extends ZodType {\n constructor() {\n super(...arguments);\n _ZodNativeEnum_cache.set(this, void 0);\n }\n _parse(input) {\n const nativeEnumValues = util.getValidEnumValues(this._def.values);\n const ctx = this._getOrReturnCtx(input);\n if (ctx.parsedType !== ZodParsedType.string &&\n ctx.parsedType !== ZodParsedType.number) {\n const expectedValues = util.objectValues(nativeEnumValues);\n addIssueToContext(ctx, {\n expected: util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodIssueCode.invalid_type,\n });\n return INVALID;\n }\n if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache)) {\n __classPrivateFieldSet(this, _ZodNativeEnum_cache, new Set(util.getValidEnumValues(this._def.values)));\n }\n if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache).has(input.data)) {\n const expectedValues = util.objectValues(nativeEnumValues);\n addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return INVALID;\n }\n return OK(input.data);\n }\n get enum() {\n return this._def.values;\n }\n }\n _ZodNativeEnum_cache = new WeakMap();\n ZodNativeEnum.create = (values, params) => {\n return new ZodNativeEnum({\n values: values,\n typeName: ZodFirstPartyTypeKind.ZodNativeEnum,\n ...processCreateParams(params),\n });\n };\n class ZodPromise extends ZodType {\n unwrap() {\n return this._def.type;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== ZodParsedType.promise &&\n ctx.common.async === false) {\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.promise,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n const promisified = ctx.parsedType === ZodParsedType.promise\n ? ctx.data\n : Promise.resolve(ctx.data);\n return OK(promisified.then((data) => {\n return this._def.type.parseAsync(data, {\n path: ctx.path,\n errorMap: ctx.common.contextualErrorMap,\n });\n }));\n }\n }\n ZodPromise.create = (schema, params) => {\n return new ZodPromise({\n type: schema,\n typeName: ZodFirstPartyTypeKind.ZodPromise,\n ...processCreateParams(params),\n });\n };\n class ZodEffects extends ZodType {\n innerType() {\n return this._def.schema;\n }\n sourceType() {\n return this._def.schema._def.typeName === ZodFirstPartyTypeKind.ZodEffects\n ? this._def.schema.sourceType()\n : this._def.schema;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const effect = this._def.effect || null;\n const checkCtx = {\n addIssue: (arg) => {\n addIssueToContext(ctx, arg);\n if (arg.fatal) {\n status.abort();\n }\n else {\n status.dirty();\n }\n },\n get path() {\n return ctx.path;\n },\n };\n checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx);\n if (effect.type === \"preprocess\") {\n const processed = effect.transform(ctx.data, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(processed).then(async (processed) => {\n if (status.value === \"aborted\")\n return INVALID;\n const result = await this._def.schema._parseAsync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n if (result.status === \"aborted\")\n return INVALID;\n if (result.status === \"dirty\")\n return DIRTY(result.value);\n if (status.value === \"dirty\")\n return DIRTY(result.value);\n return result;\n });\n }\n else {\n if (status.value === \"aborted\")\n return INVALID;\n const result = this._def.schema._parseSync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n if (result.status === \"aborted\")\n return INVALID;\n if (result.status === \"dirty\")\n return DIRTY(result.value);\n if (status.value === \"dirty\")\n return DIRTY(result.value);\n return result;\n }\n }\n if (effect.type === \"refinement\") {\n const executeRefinement = (acc) => {\n const result = effect.refinement(acc, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(result);\n }\n if (result instanceof Promise) {\n throw new Error(\"Async refinement encountered during synchronous parse operation. Use .parseAsync instead.\");\n }\n return acc;\n };\n if (ctx.common.async === false) {\n const inner = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (inner.status === \"aborted\")\n return INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n // return value is ignored\n executeRefinement(inner.value);\n return { status: status.value, value: inner.value };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((inner) => {\n if (inner.status === \"aborted\")\n return INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n return executeRefinement(inner.value).then(() => {\n return { status: status.value, value: inner.value };\n });\n });\n }\n }\n if (effect.type === \"transform\") {\n if (ctx.common.async === false) {\n const base = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (!isValid(base))\n return base;\n const result = effect.transform(base.value, checkCtx);\n if (result instanceof Promise) {\n throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`);\n }\n return { status: status.value, value: result };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((base) => {\n if (!isValid(base))\n return base;\n return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result }));\n });\n }\n }\n util.assertNever(effect);\n }\n }\n ZodEffects.create = (schema, effect, params) => {\n return new ZodEffects({\n schema,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect,\n ...processCreateParams(params),\n });\n };\n ZodEffects.createWithPreprocess = (preprocess, schema, params) => {\n return new ZodEffects({\n schema,\n effect: { type: \"preprocess\", transform: preprocess },\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n ...processCreateParams(params),\n });\n };\n class ZodOptional extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === ZodParsedType.undefined) {\n return OK(undefined);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodOptional.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params),\n });\n };\n class ZodNullable extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === ZodParsedType.null) {\n return OK(null);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodNullable.create = (type, params) => {\n return new ZodNullable({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodNullable,\n ...processCreateParams(params),\n });\n };\n class ZodDefault extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n let data = ctx.data;\n if (ctx.parsedType === ZodParsedType.undefined) {\n data = this._def.defaultValue();\n }\n return this._def.innerType._parse({\n data,\n path: ctx.path,\n parent: ctx,\n });\n }\n removeDefault() {\n return this._def.innerType;\n }\n }\n ZodDefault.create = (type, params) => {\n return new ZodDefault({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodDefault,\n defaultValue: typeof params.default === \"function\"\n ? params.default\n : () => params.default,\n ...processCreateParams(params),\n });\n };\n class ZodCatch extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n // newCtx is used to not collect issues from inner types in ctx\n const newCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n };\n const result = this._def.innerType._parse({\n data: newCtx.data,\n path: newCtx.path,\n parent: {\n ...newCtx,\n },\n });\n if (isAsync(result)) {\n return result.then((result) => {\n return {\n status: \"valid\",\n value: result.status === \"valid\"\n ? result.value\n : this._def.catchValue({\n get error() {\n return new ZodError(newCtx.common.issues);\n },\n input: newCtx.data,\n }),\n };\n });\n }\n else {\n return {\n status: \"valid\",\n value: result.status === \"valid\"\n ? result.value\n : this._def.catchValue({\n get error() {\n return new ZodError(newCtx.common.issues);\n },\n input: newCtx.data,\n }),\n };\n }\n }\n removeCatch() {\n return this._def.innerType;\n }\n }\n ZodCatch.create = (type, params) => {\n return new ZodCatch({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodCatch,\n catchValue: typeof params.catch === \"function\" ? params.catch : () => params.catch,\n ...processCreateParams(params),\n });\n };\n class ZodNaN extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== ZodParsedType.nan) {\n const ctx = this._getOrReturnCtx(input);\n addIssueToContext(ctx, {\n code: ZodIssueCode.invalid_type,\n expected: ZodParsedType.nan,\n received: ctx.parsedType,\n });\n return INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n }\n ZodNaN.create = (params) => {\n return new ZodNaN({\n typeName: ZodFirstPartyTypeKind.ZodNaN,\n ...processCreateParams(params),\n });\n };\n const BRAND = Symbol(\"zod_brand\");\n class ZodBranded extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const data = ctx.data;\n return this._def.type._parse({\n data,\n path: ctx.path,\n parent: ctx,\n });\n }\n unwrap() {\n return this._def.type;\n }\n }\n class ZodPipeline extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.common.async) {\n const handleAsync = async () => {\n const inResult = await this._def.in._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (inResult.status === \"aborted\")\n return INVALID;\n if (inResult.status === \"dirty\") {\n status.dirty();\n return DIRTY(inResult.value);\n }\n else {\n return this._def.out._parseAsync({\n data: inResult.value,\n path: ctx.path,\n parent: ctx,\n });\n }\n };\n return handleAsync();\n }\n else {\n const inResult = this._def.in._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (inResult.status === \"aborted\")\n return INVALID;\n if (inResult.status === \"dirty\") {\n status.dirty();\n return {\n status: \"dirty\",\n value: inResult.value,\n };\n }\n else {\n return this._def.out._parseSync({\n data: inResult.value,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n }\n static create(a, b) {\n return new ZodPipeline({\n in: a,\n out: b,\n typeName: ZodFirstPartyTypeKind.ZodPipeline,\n });\n }\n }\n class ZodReadonly extends ZodType {\n _parse(input) {\n const result = this._def.innerType._parse(input);\n const freeze = (data) => {\n if (isValid(data)) {\n data.value = Object.freeze(data.value);\n }\n return data;\n };\n return isAsync(result)\n ? result.then((data) => freeze(data))\n : freeze(result);\n }\n unwrap() {\n return this._def.innerType;\n }\n }\n ZodReadonly.create = (type, params) => {\n return new ZodReadonly({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodReadonly,\n ...processCreateParams(params),\n });\n };\n function custom(check, params = {}, \n /**\n * @deprecated\n *\n * Pass `fatal` into the params object instead:\n *\n * ```ts\n * z.string().custom((val) => val.length > 5, { fatal: false })\n * ```\n *\n */\n fatal) {\n if (check)\n return ZodAny.create().superRefine((data, ctx) => {\n var _a, _b;\n if (!check(data)) {\n const p = typeof params === \"function\"\n ? params(data)\n : typeof params === \"string\"\n ? { message: params }\n : params;\n const _fatal = (_b = (_a = p.fatal) !== null && _a !== void 0 ? _a : fatal) !== null && _b !== void 0 ? _b : true;\n const p2 = typeof p === \"string\" ? { message: p } : p;\n ctx.addIssue({ code: \"custom\", ...p2, fatal: _fatal });\n }\n });\n return ZodAny.create();\n }\n const late = {\n object: ZodObject.lazycreate,\n };\n var ZodFirstPartyTypeKind;\n (function (ZodFirstPartyTypeKind) {\n ZodFirstPartyTypeKind[\"ZodString\"] = \"ZodString\";\n ZodFirstPartyTypeKind[\"ZodNumber\"] = \"ZodNumber\";\n ZodFirstPartyTypeKind[\"ZodNaN\"] = \"ZodNaN\";\n ZodFirstPartyTypeKind[\"ZodBigInt\"] = \"ZodBigInt\";\n ZodFirstPartyTypeKind[\"ZodBoolean\"] = \"ZodBoolean\";\n ZodFirstPartyTypeKind[\"ZodDate\"] = \"ZodDate\";\n ZodFirstPartyTypeKind[\"ZodSymbol\"] = \"ZodSymbol\";\n ZodFirstPartyTypeKind[\"ZodUndefined\"] = \"ZodUndefined\";\n ZodFirstPartyTypeKind[\"ZodNull\"] = \"ZodNull\";\n ZodFirstPartyTypeKind[\"ZodAny\"] = \"ZodAny\";\n ZodFirstPartyTypeKind[\"ZodUnknown\"] = \"ZodUnknown\";\n ZodFirstPartyTypeKind[\"ZodNever\"] = \"ZodNever\";\n ZodFirstPartyTypeKind[\"ZodVoid\"] = \"ZodVoid\";\n ZodFirstPartyTypeKind[\"ZodArray\"] = \"ZodArray\";\n ZodFirstPartyTypeKind[\"ZodObject\"] = \"ZodObject\";\n ZodFirstPartyTypeKind[\"ZodUnion\"] = \"ZodUnion\";\n ZodFirstPartyTypeKind[\"ZodDiscriminatedUnion\"] = \"ZodDiscriminatedUnion\";\n ZodFirstPartyTypeKind[\"ZodIntersection\"] = \"ZodIntersection\";\n ZodFirstPartyTypeKind[\"ZodTuple\"] = \"ZodTuple\";\n ZodFirstPartyTypeKind[\"ZodRecord\"] = \"ZodRecord\";\n ZodFirstPartyTypeKind[\"ZodMap\"] = \"ZodMap\";\n ZodFirstPartyTypeKind[\"ZodSet\"] = \"ZodSet\";\n ZodFirstPartyTypeKind[\"ZodFunction\"] = \"ZodFunction\";\n ZodFirstPartyTypeKind[\"ZodLazy\"] = \"ZodLazy\";\n ZodFirstPartyTypeKind[\"ZodLiteral\"] = \"ZodLiteral\";\n ZodFirstPartyTypeKind[\"ZodEnum\"] = \"ZodEnum\";\n ZodFirstPartyTypeKind[\"ZodEffects\"] = \"ZodEffects\";\n ZodFirstPartyTypeKind[\"ZodNativeEnum\"] = \"ZodNativeEnum\";\n ZodFirstPartyTypeKind[\"ZodOptional\"] = \"ZodOptional\";\n ZodFirstPartyTypeKind[\"ZodNullable\"] = \"ZodNullable\";\n ZodFirstPartyTypeKind[\"ZodDefault\"] = \"ZodDefault\";\n ZodFirstPartyTypeKind[\"ZodCatch\"] = \"ZodCatch\";\n ZodFirstPartyTypeKind[\"ZodPromise\"] = \"ZodPromise\";\n ZodFirstPartyTypeKind[\"ZodBranded\"] = \"ZodBranded\";\n ZodFirstPartyTypeKind[\"ZodPipeline\"] = \"ZodPipeline\";\n ZodFirstPartyTypeKind[\"ZodReadonly\"] = \"ZodReadonly\";\n })(ZodFirstPartyTypeKind || (ZodFirstPartyTypeKind = {}));\n const instanceOfType = (\n // const instanceOfType = <T extends new (...args: any[]) => any>(\n cls, params = {\n message: `Input not instance of ${cls.name}`,\n }) => custom((data) => data instanceof cls, params);\n const stringType = ZodString.create;\n const numberType = ZodNumber.create;\n const nanType = ZodNaN.create;\n const bigIntType = ZodBigInt.create;\n const booleanType = ZodBoolean.create;\n const dateType = ZodDate.create;\n const symbolType = ZodSymbol.create;\n const undefinedType = ZodUndefined.create;\n const nullType = ZodNull.create;\n const anyType = ZodAny.create;\n const unknownType = ZodUnknown.create;\n const neverType = ZodNever.create;\n const voidType = ZodVoid.create;\n const arrayType = ZodArray.create;\n const objectType = ZodObject.create;\n const strictObjectType = ZodObject.strictCreate;\n const unionType = ZodUnion.create;\n const discriminatedUnionType = ZodDiscriminatedUnion.create;\n const intersectionType = ZodIntersection.create;\n const tupleType = ZodTuple.create;\n const recordType = ZodRecord.create;\n const mapType = ZodMap.create;\n const setType = ZodSet.create;\n const functionType = ZodFunction.create;\n const lazyType = ZodLazy.create;\n const literalType = ZodLiteral.create;\n const enumType = ZodEnum.create;\n const nativeEnumType = ZodNativeEnum.create;\n const promiseType = ZodPromise.create;\n const effectsType = ZodEffects.create;\n const optionalType = ZodOptional.create;\n const nullableType = ZodNullable.create;\n const preprocessType = ZodEffects.createWithPreprocess;\n const pipelineType = ZodPipeline.create;\n const ostring = () => stringType().optional();\n const onumber = () => numberType().optional();\n const oboolean = () => booleanType().optional();\n const coerce = {\n string: ((arg) => ZodString.create({ ...arg, coerce: true })),\n number: ((arg) => ZodNumber.create({ ...arg, coerce: true })),\n boolean: ((arg) => ZodBoolean.create({\n ...arg,\n coerce: true,\n })),\n bigint: ((arg) => ZodBigInt.create({ ...arg, coerce: true })),\n date: ((arg) => ZodDate.create({ ...arg, coerce: true })),\n };\n const NEVER = INVALID;\n\n var z = /*#__PURE__*/Object.freeze({\n __proto__: null,\n defaultErrorMap: errorMap,\n setErrorMap: setErrorMap,\n getErrorMap: getErrorMap,\n makeIssue: makeIssue,\n EMPTY_PATH: EMPTY_PATH,\n addIssueToContext: addIssueToContext,\n ParseStatus: ParseStatus,\n INVALID: INVALID,\n DIRTY: DIRTY,\n OK: OK,\n isAborted: isAborted,\n isDirty: isDirty,\n isValid: isValid,\n isAsync: isAsync,\n get util () { return util; },\n get objectUtil () { return objectUtil; },\n ZodParsedType: ZodParsedType,\n getParsedType: getParsedType,\n ZodType: ZodType,\n datetimeRegex: datetimeRegex,\n ZodString: ZodString,\n ZodNumber: ZodNumber,\n ZodBigInt: ZodBigInt,\n ZodBoolean: ZodBoolean,\n ZodDate: ZodDate,\n ZodSymbol: ZodSymbol,\n ZodUndefined: ZodUndefined,\n ZodNull: ZodNull,\n ZodAny: ZodAny,\n ZodUnknown: ZodUnknown,\n ZodNever: ZodNever,\n ZodVoid: ZodVoid,\n ZodArray: ZodArray,\n ZodObject: ZodObject,\n ZodUnion: ZodUnion,\n ZodDiscriminatedUnion: ZodDiscriminatedUnion,\n ZodIntersection: ZodIntersection,\n ZodTuple: ZodTuple,\n ZodRecord: ZodRecord,\n ZodMap: ZodMap,\n ZodSet: ZodSet,\n ZodFunction: ZodFunction,\n ZodLazy: ZodLazy,\n ZodLiteral: ZodLiteral,\n ZodEnum: ZodEnum,\n ZodNativeEnum: ZodNativeEnum,\n ZodPromise: ZodPromise,\n ZodEffects: ZodEffects,\n ZodTransformer: ZodEffects,\n ZodOptional: ZodOptional,\n ZodNullable: ZodNullable,\n ZodDefault: ZodDefault,\n ZodCatch: ZodCatch,\n ZodNaN: ZodNaN,\n BRAND: BRAND,\n ZodBranded: ZodBranded,\n ZodPipeline: ZodPipeline,\n ZodReadonly: ZodReadonly,\n custom: custom,\n Schema: ZodType,\n ZodSchema: ZodType,\n late: late,\n get ZodFirstPartyTypeKind () { return ZodFirstPartyTypeKind; },\n coerce: coerce,\n any: anyType,\n array: arrayType,\n bigint: bigIntType,\n boolean: booleanType,\n date: dateType,\n discriminatedUnion: discriminatedUnionType,\n effect: effectsType,\n 'enum': enumType,\n 'function': functionType,\n 'instanceof': instanceOfType,\n intersection: intersectionType,\n lazy: lazyType,\n literal: literalType,\n map: mapType,\n nan: nanType,\n nativeEnum: nativeEnumType,\n never: neverType,\n 'null': nullType,\n nullable: nullableType,\n number: numberType,\n object: objectType,\n oboolean: oboolean,\n onumber: onumber,\n optional: optionalType,\n ostring: ostring,\n pipeline: pipelineType,\n preprocess: preprocessType,\n promise: promiseType,\n record: recordType,\n set: setType,\n strictObject: strictObjectType,\n string: stringType,\n symbol: symbolType,\n transformer: effectsType,\n tuple: tupleType,\n 'undefined': undefinedType,\n union: unionType,\n unknown: unknownType,\n 'void': voidType,\n NEVER: NEVER,\n ZodIssueCode: ZodIssueCode,\n quotelessJson: quotelessJson,\n ZodError: ZodError\n });\n\n z.object({\n type: z.literal('plain'),\n content: z.string(),\n });\n\n /** Range in bytes, from should be less than to. */\n z.object({\n /** Included left border. */\n from: z.number().min(0),\n /** Excluded right border. */\n to: z.number().min(1),\n });\n\n function assertNever(x) {\n throw new Error('Unexpected object: ' + x); // This is ok, because this is a possible runtime error\n }\n\n /**\n * Type guard function that checks if the given value is a valid DataInfo.\n *\n * @param value - The value to check\n * @returns True if the value is a valid DataInfo, false otherwise\n */\n function isDataInfo(value) {\n if (!value || typeof value !== 'object') {\n return false;\n }\n const data = value;\n if (!('type' in data)) {\n return false;\n }\n switch (data.type) {\n case 'Json':\n return (typeof data.keyLength === 'number'\n && data.data !== undefined\n && typeof data.data === 'object');\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return (typeof data.partitionKeyLength === 'number'\n && data.parts !== undefined\n && typeof data.parts === 'object');\n default:\n return false;\n }\n }\n function mapDataInfo(dataInfo, mapFn) {\n if (dataInfo === undefined) {\n return undefined;\n }\n switch (dataInfo.type) {\n case 'Json':\n // Json type doesn't contain blobs, so return as is\n return dataInfo;\n case 'JsonPartitioned': {\n // Map each blob in parts\n const newParts = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n case 'BinaryPartitioned': {\n // Map each index and values blob in parts\n const newParts = {};\n for (const [key, chunk] of Object.entries(dataInfo.parts)) {\n newParts[key] = {\n index: mapFn(chunk.index),\n values: mapFn(chunk.values),\n };\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n case 'ParquetPartitioned': {\n // Map each blob in parts\n const newParts = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n }\n }\n /**\n * @param dataInfo - The source DataInfo object\n * @param cb - Callback, function that have access to every blob to visit them all\n * @returns Nothing\n */\n function visitDataInfo(dataInfo, cb) {\n switch (dataInfo.type) {\n case 'Json':\n // Json type doesn't contain blobs, so return as is\n break;\n case 'JsonPartitioned': {\n // Visit each blob in parts\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n case 'BinaryPartitioned': {\n // Visit each index and values blob in parts\n Object.values(dataInfo.parts).forEach((chunk) => {\n cb(chunk.index);\n cb(chunk.values);\n });\n break;\n }\n case 'ParquetPartitioned': {\n // Visit each blob in parts\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n }\n }\n /**\n * Type guard function that checks if the given value is a valid DataInfoEntries.\n *\n * @param value - The value to check\n * @returns True if the value is a valid DataInfoEntries, false otherwise\n */\n function isDataInfoEntries(value) {\n if (!value || typeof value !== 'object') {\n return false;\n }\n const data = value;\n if (!('type' in data)) {\n return false;\n }\n switch (data.type) {\n case 'Json':\n return (typeof data.keyLength === 'number'\n && Array.isArray(data.data));\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return (typeof data.partitionKeyLength === 'number'\n && Array.isArray(data.parts));\n default:\n return false;\n }\n }\n /**\n * Type guard function that checks if the given value is a valid PartitionedDataInfoEntries.\n *\n * @template Blob - Type parameter representing the storage reference type\n * @param value - The value to check\n * @returns True if the value is a valid PartitionedDataInfoEntries, false otherwise\n */\n function isPartitionedDataInfoEntries(value) {\n if (!isDataInfoEntries(value))\n return false;\n switch (value.type) {\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return true;\n default:\n return false;\n }\n }\n /**\n * Converts DataInfo to DataInfoEntries\n *\n * @param dataInfo - The record-based DataInfo object\n * @returns The equivalent entry-based DataInfoEntries object\n */\n function dataInfoToEntries(dataInfo) {\n switch (dataInfo.type) {\n case 'Json': return {\n type: 'Json',\n keyLength: dataInfo.keyLength,\n data: Object.entries(dataInfo.data).map(([keyStr, value]) => {\n const key = JSON.parse(keyStr);\n return { key, value };\n }),\n };\n case 'JsonPartitioned': return {\n type: 'JsonPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr);\n return { key, value: blob };\n }),\n };\n case 'BinaryPartitioned': return {\n type: 'BinaryPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, chunk]) => {\n const key = JSON.parse(keyStr);\n return { key, value: chunk };\n }),\n };\n case 'ParquetPartitioned': return {\n type: 'ParquetPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr);\n return { key, value: blob };\n }),\n };\n default:\n assertNever(dataInfo);\n }\n }\n /**\n * Converts DataInfoEntries to DataInfo\n *\n * @param dataInfoEntries - The entry-based DataInfoEntries object\n * @returns The equivalent record-based DataInfo object\n */\n function entriesToDataInfo(dataInfoEntries) {\n switch (dataInfoEntries.type) {\n case 'Json': return {\n type: 'Json',\n keyLength: dataInfoEntries.keyLength,\n data: Object.fromEntries(dataInfoEntries.data.map(({ key, value }) => [JSON.stringify(key), value])),\n };\n case 'JsonPartitioned': return {\n type: 'JsonPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),\n };\n case 'BinaryPartitioned': return {\n type: 'BinaryPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),\n };\n case 'ParquetPartitioned': return {\n type: 'ParquetPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),\n };\n default:\n assertNever(dataInfoEntries);\n }\n }\n\n /**\n * Just for convenience, usually it is an Error with name 'AbortError'\n */\n function stringifyValue(value) {\n if (typeof value === 'string') {\n return `String value was thrown: ${value}`;\n }\n if (value && typeof value === 'object') {\n try {\n return `Plain object was thrown: ${JSON.stringify(value)}`;\n }\n catch (jsonError) {\n const errorMessage = jsonError instanceof Error ? jsonError.message : String(jsonError);\n return `Non-serializable object was thrown (JSON.stringify failed: ${errorMessage}): ${String(value)}`;\n }\n }\n return String(`Non-Error value (${typeof value}) was thrown: ${value}`);\n }\n function ensureError(value) {\n if (value instanceof Error) {\n return value;\n }\n return new Error(stringifyValue(value));\n }\n\n function getDefaultExportFromCjs (x) {\n \treturn x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;\n }\n\n /* jshint esversion: 6 */\n\n var canonicalize$1;\n var hasRequiredCanonicalize;\n\n function requireCanonicalize () {\n \tif (hasRequiredCanonicalize) return canonicalize$1;\n \thasRequiredCanonicalize = 1;\n\n \tcanonicalize$1 = function serialize (object) {\n \t if (typeof object === 'number' && isNaN(object)) {\n \t throw new Error('NaN is not allowed');\n \t }\n\n \t if (typeof object === 'number' && !isFinite(object)) {\n \t throw new Error('Infinity is not allowed');\n \t }\n\n \t if (object === null || typeof object !== 'object') {\n \t return JSON.stringify(object);\n \t }\n\n \t if (object.toJSON instanceof Function) {\n \t return serialize(object.toJSON());\n \t }\n\n \t if (Array.isArray(object)) {\n \t const values = object.reduce((t, cv, ci) => {\n \t const comma = ci === 0 ? '' : ',';\n \t const value = cv === undefined || typeof cv === 'symbol' ? null : cv;\n \t return `${t}${comma}${serialize(value)}`;\n \t }, '');\n \t return `[${values}]`;\n \t }\n\n \t const values = Object.keys(object).sort().reduce((t, cv) => {\n \t if (object[cv] === undefined ||\n \t typeof object[cv] === 'symbol') {\n \t return t;\n \t }\n \t const comma = t.length === 0 ? '' : ',';\n \t return `${t}${comma}${serialize(cv)}:${serialize(object[cv])}`;\n \t }, '');\n \t return `{${values}}`;\n \t};\n \treturn canonicalize$1;\n }\n\n var canonicalizeExports = requireCanonicalize();\n var canonicalize = /*@__PURE__*/getDefaultExportFromCjs(canonicalizeExports);\n\n function stringifyJson(value) {\n return JSON.stringify(value);\n }\n function canonicalizeJson(value) {\n return canonicalize(value);\n }\n function parseJson(value) {\n return JSON.parse(value);\n }\n\n function readMetadata(metadata, key) {\n return metadata?.[key];\n }\n function readMetadataJsonOrThrow(metadata, metadataJson, key, methodNameInError = 'readMetadataJsonOrThrow') {\n const json = readMetadata(metadata, key);\n if (json === undefined)\n return undefined;\n const schema = metadataJson[key];\n try {\n const value = JSON.parse(json);\n return schema.parse(value);\n }\n catch (error) {\n throw new Error(`${methodNameInError} failed, `\n + `key: ${String(key)}, `\n + `value: ${json}, `\n + `error: ${ensureError(error)}`);\n }\n }\n function readMetadataJson(metadata, metadataJson, key) {\n try {\n return readMetadataJsonOrThrow(metadata, metadataJson, key);\n }\n catch {\n return undefined; // treat invalid values as unset\n }\n }\n /// Well-known annotations\n const Annotation = {\n DiscreteValues: 'pl7.app/discreteValues',\n Graph: {\n Axis: {\n HighCardinality: 'pl7.app/graph/axis/highCardinality',\n LowerLimit: 'pl7.app/graph/axis/lowerLimit',\n SymmetricRange: 'pl7.app/graph/axis/symmetricRange',\n UpperLimit: 'pl7.app/graph/axis/upperLimit',\n },\n IsDenseAxis: 'pl7.app/graph/isDenseAxis',\n IsVirtual: 'pl7.app/graph/isVirtual',\n Palette: 'pl7.app/graph/palette',\n Thresholds: 'pl7.app/graph/thresholds',\n TreatAbsentValuesAs: 'pl7.app/graph/treatAbsentValuesAs',\n },\n HideDataFromUi: 'pl7.app/hideDataFromUi',\n HideDataFromGraphs: 'pl7.app/hideDataFromGraphs',\n IsDiscreteFilter: 'pl7.app/isDiscreteFilter',\n IsLinkerColumn: 'pl7.app/isLinkerColumn',\n IsSubset: 'pl7.app/isSubset',\n Label: 'pl7.app/label',\n Max: 'pl7.app/max',\n Min: 'pl7.app/min',\n MultipliesBy: 'pl7.app/multipliesBy',\n Parents: 'pl7.app/parents',\n Sequence: {\n Annotation: {\n Mapping: 'pl7.app/sequence/annotation/mapping',\n },\n IsAnnotation: 'pl7.app/sequence/isAnnotation',\n },\n Table: {\n OrderPriority: 'pl7.app/table/orderPriority'},\n Trace: 'pl7.app/trace',\n VDJ: {\n IsAssemblingFeature: 'pl7.app/vdj/isAssemblingFeature',\n },\n };\n const ValueTypeSchema = z.enum(['Int', 'Long', 'Float', 'Double', 'String']);\n const AnnotationJson = {\n [Annotation.DiscreteValues]: z.array(z.string()).or(z.array(z.number())),\n [Annotation.Graph.Axis.HighCardinality]: z.boolean(),\n [Annotation.Graph.Axis.LowerLimit]: z.number(),\n [Annotation.Graph.Axis.UpperLimit]: z.number(),\n [Annotation.Graph.Axis.SymmetricRange]: z.boolean(),\n [Annotation.Graph.IsDenseAxis]: z.boolean(),\n [Annotation.Graph.Palette]: z.object({ mapping: z.record(z.number()), name: z.string() }),\n [Annotation.Graph.Thresholds]: z.array(z.object({\n columnId: z.object({ valueType: ValueTypeSchema, name: z.string() }),\n value: z.number(),\n })),\n [Annotation.Graph.TreatAbsentValuesAs]: z.number(),\n [Annotation.Graph.IsVirtual]: z.boolean(),\n [Annotation.HideDataFromUi]: z.boolean(),\n [Annotation.HideDataFromGraphs]: z.boolean(),\n [Annotation.IsDiscreteFilter]: z.boolean(),\n [Annotation.IsLinkerColumn]: z.boolean(),\n [Annotation.IsSubset]: z.boolean(),\n [Annotation.Max]: z.number(),\n [Annotation.Min]: z.number(),\n [Annotation.MultipliesBy]: z.array(z.string()),\n [Annotation.Parents]: z.array(z.string()),\n [Annotation.Sequence.Annotation.Mapping]: z.record(z.string(), z.string()),\n [Annotation.Sequence.IsAnnotation]: z.boolean(),\n [Annotation.Table.OrderPriority]: z.number(),\n [Annotation.Trace]: z.record(z.string(), z.unknown()),\n [Annotation.VDJ.IsAssemblingFeature]: z.boolean(),\n };\n /// Helper function for reading plain annotation values\n function readAnnotation(spec, key) {\n return readMetadata(spec?.annotations, key);\n }\n /// Helper function for reading json-encoded annotation values, returns undefined on JSON parsing error\n function readAnnotationJson(spec, key) {\n return readMetadataJson(spec?.annotations, AnnotationJson, key);\n }\n function isLinkerColumn(column) {\n return !!readAnnotationJson(column, Annotation.IsLinkerColumn);\n }\n function makeAxisTree(axis) {\n return { axis, children: [] };\n }\n /** Build tree by axis parents annotations */\n function getAxesTree(rootAxis) {\n const root = makeAxisTree(rootAxis);\n let nodesQ = [root];\n while (nodesQ.length) {\n const nextNodes = [];\n for (const node of nodesQ) {\n node.children = node.axis.parentAxesSpec.map(makeAxisTree);\n nextNodes.push(...node.children);\n }\n nodesQ = nextNodes;\n }\n return root;\n }\n /** Get array of axisSpecs from axisTree */\n function getArrayFromAxisTree(tree) {\n const res = [tree.axis];\n let nodesQ = [tree];\n while (nodesQ.length) {\n const nextNodes = [];\n for (const node of nodesQ) {\n for (const parent of node.children) {\n res.push(parent.axis);\n nextNodes.push(parent);\n }\n }\n nodesQ = nextNodes;\n }\n return res;\n }\n function canonicalizeAxisWithParents(axis) {\n return canonicalizeJson(getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n }\n function normalizingAxesComparator(axis1, axis2) {\n if (axis1.name !== axis2.name) {\n return axis1.name < axis2.name ? 1 : -1;\n }\n if (axis1.type !== axis2.type) {\n return axis1.type < axis2.type ? 1 : -1;\n }\n const domain1 = canonicalizeJson(axis1.domain ?? {});\n const domain2 = canonicalizeJson(axis2.domain ?? {});\n if (domain1 !== domain2) {\n return domain1 < domain2 ? 1 : -1;\n }\n const parents1 = canonicalizeAxisWithParents(axis1);\n const parents2 = canonicalizeAxisWithParents(axis2);\n if (parents1 !== parents2) {\n return parents1 < parents2 ? 1 : -1;\n }\n const annotation1 = canonicalizeJson(axis1.annotations ?? {});\n const annotation2 = canonicalizeJson(axis2.annotations ?? {});\n if (annotation1 !== annotation2) {\n return annotation1 < annotation2 ? 1 : -1;\n }\n return 0;\n }\n function parseParentsFromAnnotations(axis) {\n const parentsList = readAnnotationJson(axis, Annotation.Parents);\n if (parentsList === undefined) {\n return [];\n }\n return parentsList;\n }\n function sortParentsDeep(axisSpec) {\n axisSpec.parentAxesSpec.forEach(sortParentsDeep);\n axisSpec.parentAxesSpec.sort(normalizingAxesComparator);\n }\n function hasCycleOfParents(axisSpec) {\n const root = makeAxisTree(axisSpec);\n let nodesQ = [root];\n const ancestors = new Set(canonicalizeJson(getAxisId(axisSpec)));\n while (nodesQ.length) {\n const nextNodes = [];\n const levelIds = new Set();\n for (const node of nodesQ) {\n node.children = node.axis.parentAxesSpec.map(makeAxisTree);\n for (const child of node.children) {\n const childId = canonicalizeJson(getAxisId(child.axis));\n if (!levelIds.has(childId)) {\n nextNodes.push(child);\n levelIds.add(childId);\n if (ancestors.has(childId)) {\n return true;\n }\n ancestors.add(childId);\n }\n }\n }\n nodesQ = nextNodes;\n }\n return false;\n }\n /** Create list of normalized axisSpec (parents are in array of specs, not indexes) */\n function getNormalizedAxesList(axes) {\n if (!axes.length) {\n return [];\n }\n const modifiedAxes = axes.map((axis) => {\n const { parentAxes: _, ...copiedRest } = axis;\n return { ...copiedRest, annotations: { ...copiedRest.annotations }, parentAxesSpec: [] };\n });\n axes.forEach((axis, idx) => {\n const modifiedAxis = modifiedAxes[idx];\n if (axis.parentAxes) { // if we have parents by indexes then take from the list\n modifiedAxis.parentAxesSpec = axis.parentAxes.map((idx) => modifiedAxes[idx]);\n }\n else { // else try to parse from annotation name\n const parents = parseParentsFromAnnotations(axis).map((name) => modifiedAxes.find((axis) => axis.name === name));\n modifiedAxis.parentAxesSpec = parents.some((p) => p === undefined) ? [] : parents;\n delete modifiedAxis.annotations?.[Annotation.Parents];\n }\n });\n if (modifiedAxes.some(hasCycleOfParents)) { // Axes list is broken\n modifiedAxes.forEach((axis) => {\n axis.parentAxesSpec = [];\n });\n }\n else {\n modifiedAxes.forEach((axis) => {\n sortParentsDeep(axis);\n });\n }\n return modifiedAxes;\n }\n /// Well-known column names\n const PColumnName = {\n Label: 'pl7.app/label'};\n function isLabelColumn(column) {\n return column.axesSpec.length === 1 && column.name === PColumnName.Label;\n }\n /** Get column id and spec from a column */\n function getColumnIdAndSpec(column) {\n return {\n columnId: column.id,\n spec: column.spec,\n };\n }\n /** Extracts axis ids from axis spec */\n function getAxisId(spec) {\n const { type, name, domain } = spec;\n const result = { type, name };\n if (domain && Object.entries(domain).length > 0) {\n Object.assign(result, { domain });\n }\n return result;\n }\n /** Extracts axes ids from axes spec array from column spec */\n function getAxesId(spec) {\n return spec.map(getAxisId);\n }\n /** Canonicalizes axis id */\n function canonicalizeAxisId(id) {\n return canonicalizeJson(getAxisId(id));\n }\n /** Returns true if all domains from query are found in target */\n function matchDomain$1(query, target) {\n if (query === undefined)\n return target === undefined;\n if (target === undefined)\n return true;\n for (const k in target) {\n if (query[k] !== target[k])\n return false;\n }\n return true;\n }\n /** Returns whether \"match\" axis id is compatible with the \"query\" */\n function matchAxisId(query, target) {\n return query.name === target.name && matchDomain$1(query.domain, target.domain);\n }\n\n function mapPTableDef(def, cb) {\n return { ...def, src: mapJoinEntry(def.src, cb) };\n }\n function mapJoinEntry(entry, cb) {\n switch (entry.type) {\n case 'column':\n return {\n type: 'column',\n column: cb(entry.column),\n };\n case 'slicedColumn':\n return {\n type: 'slicedColumn',\n column: cb(entry.column),\n newId: entry.newId,\n axisFilters: entry.axisFilters,\n };\n case 'artificialColumn':\n return {\n type: 'artificialColumn',\n column: cb(entry.column),\n newId: entry.newId,\n axesIndices: entry.axesIndices,\n };\n case 'inlineColumn':\n return entry;\n case 'inner':\n case 'full':\n return {\n type: entry.type,\n entries: entry.entries.map((col) => mapJoinEntry(col, cb)),\n };\n case 'outer':\n return {\n type: 'outer',\n primary: mapJoinEntry(entry.primary, cb),\n secondary: entry.secondary.map((col) => mapJoinEntry(col, cb)),\n };\n default:\n assertNever(entry);\n }\n }\n\n /**\n * Canonically serializes a {@link UniversalPColumnId} to a string.\n * @param id - The column identifier to serialize\n * @returns The canonically serialized string\n */\n function stringifyColumnId(id) {\n return canonicalize(id);\n }\n\n //\n // Helper functions\n //\n function axisKey(axis) {\n return canonicalize(getAxisId(axis));\n }\n function domainKey(key, value) {\n return JSON.stringify([key, value]);\n }\n /**\n * Context for resolving and generating anchored references to columns and axes\n * Maintains maps of known domain values and axes that can be referenced by anchors\n */\n class AnchoredIdDeriver {\n anchors;\n domains = new Map();\n axes = new Map();\n /**\n * Domain packs are used to group domain keys that can be anchored to the same anchor\n * This is used to optimize the lookup of domain anchors\n */\n domainPacks = [];\n /**\n * Maps domain packs to anchors\n */\n domainPackToAnchor = new Map();\n /**\n * Creates a new anchor context from a set of anchor column specifications\n * @param anchors Record of anchor column specifications indexed by anchor ID\n */\n constructor(anchors) {\n this.anchors = anchors;\n const anchorEntries = Object.entries(anchors);\n anchorEntries.sort((a, b) => a[0].localeCompare(b[0]));\n for (const [anchorId, spec] of anchorEntries) {\n for (let axisIdx = 0; axisIdx < spec.axesSpec.length; axisIdx++) {\n const axis = spec.axesSpec[axisIdx];\n const key = axisKey(axis);\n this.axes.set(key, { anchor: anchorId, idx: axisIdx });\n }\n if (spec.domain !== undefined) {\n const domainEntries = Object.entries(spec.domain);\n domainEntries.sort((a, b) => a[0].localeCompare(b[0]));\n this.domainPackToAnchor.set(JSON.stringify(domainEntries), anchorId);\n this.domainPacks.push(domainEntries.map(([dKey]) => dKey));\n for (const [dKey, dValue] of domainEntries) {\n const key = domainKey(dKey, dValue);\n this.domains.set(key, anchorId);\n }\n }\n }\n }\n /**\n * Implementation of derive method\n */\n derive(spec, axisFilters) {\n const result = {\n name: spec.name,\n axes: [],\n };\n let skipDomains = undefined;\n if (spec.domain !== undefined) {\n outer: for (const domainPack of this.domainPacks) {\n const dAnchor = [];\n for (const domainKey of domainPack) {\n const dValue = spec.domain[domainKey];\n if (dValue !== undefined)\n dAnchor.push([domainKey, dValue]);\n else\n break outer;\n }\n const domainAnchor = this.domainPackToAnchor.get(JSON.stringify(dAnchor));\n if (domainAnchor !== undefined) {\n result.domainAnchor = domainAnchor;\n skipDomains = new Set(domainPack);\n break;\n }\n }\n }\n for (const [dKey, dValue] of Object.entries(spec.domain ?? {})) {\n if (skipDomains !== undefined && skipDomains.has(dKey))\n continue;\n const key = domainKey(dKey, dValue);\n const anchorId = this.domains.get(key);\n result.domain ??= {};\n result.domain[dKey] = anchorId ? { anchor: anchorId } : dValue;\n }\n result.axes = spec.axesSpec.map((axis) => {\n const key = axisKey(axis);\n const anchorAxisRef = this.axes.get(key);\n if (anchorAxisRef === undefined)\n return getAxisId(axis);\n else\n return anchorAxisRef;\n });\n // If no axis filters are provided, return the anchored ID as is\n if (!axisFilters || axisFilters.length === 0) {\n return result;\n }\n // Process axis filters and create a sliced column ID\n const resolvedFilters = [];\n for (const filter of axisFilters) {\n const [axisIdOrIndex, value] = filter;\n // If it's already a numeric index, validate it\n if (typeof axisIdOrIndex === 'number') {\n if (axisIdOrIndex < 0 || axisIdOrIndex >= spec.axesSpec.length) {\n throw new Error(`Axis index ${axisIdOrIndex} is out of bounds (0-${spec.axesSpec.length - 1})`);\n }\n resolvedFilters.push([axisIdOrIndex, value]);\n }\n else {\n // If it's a string (axis name), resolve it to an index\n const axisIndex = spec.axesSpec.findIndex((axis) => axis.name === axisIdOrIndex);\n if (axisIndex === -1) {\n throw new Error(`Axis with name \"${axisIdOrIndex}\" not found in the column specification`);\n }\n resolvedFilters.push([axisIndex, value]);\n }\n }\n // Sort filters by axis index to ensure consistency\n resolvedFilters.sort((a, b) => a[0] - b[0]);\n return {\n source: result,\n axisFilters: resolvedFilters,\n };\n }\n /**\n * Derives a canonicalized string representation of an anchored column identifier, can be used as a unique identifier for the column\n * @param spec Column specification to anchor\n * @param axisFilters Optional axis filters to apply to the column\n * @returns A canonicalized string representation of the anchored column identifier\n */\n deriveS(spec, axisFilters) {\n return stringifyColumnId(this.derive(spec, axisFilters));\n }\n }\n /**\n * Resolves anchored references in a column matcher to create a non-anchored matcher.\n * Doing an opposite operation to {@link AnchorIdDeriver.derive()}.\n *\n * @param anchors - Record of anchor column specifications indexed by anchor id\n * @param matcher - An anchored column matcher (or id, which is subtype of it) containing references that need to be resolved\n * @param options - Options for resolving anchors\n * @returns A non-anchored column matcher with all references resolved to actual values\n */\n function resolveAnchors(anchors, matcher, options) {\n const result = { ...matcher };\n const ignoreMissingDomains = options?.ignoreMissingDomains ?? false;\n if (result.domainAnchor !== undefined) {\n const anchorSpec = anchors[result.domainAnchor];\n if (!anchorSpec)\n throw new Error(`Anchor \"${result.domainAnchor}\" not found`);\n const anchorDomains = anchorSpec.domain || {};\n result.domain = { ...anchorDomains, ...result.domain };\n delete result.domainAnchor;\n }\n if (result.domain) {\n const resolvedDomain = {};\n for (const [key, value] of Object.entries(result.domain)) {\n if (typeof value === 'string') {\n resolvedDomain[key] = value;\n }\n else {\n // It's an AnchorDomainRef\n const anchorSpec = anchors[value.anchor];\n if (!anchorSpec)\n throw new Error(`Anchor \"${value.anchor}\" not found for domain key \"${key}\"`);\n if (!anchorSpec.domain || anchorSpec.domain[key] === undefined) {\n if (!ignoreMissingDomains)\n throw new Error(`Domain key \"${key}\" not found in anchor \"${value.anchor}\"`);\n continue;\n }\n resolvedDomain[key] = anchorSpec.domain[key];\n }\n }\n result.domain = resolvedDomain;\n }\n if (result.axes)\n result.axes = result.axes.map((axis) => resolveAxisReference(anchors, axis));\n return result;\n }\n /**\n * Resolves an anchored axis reference to a concrete AxisId\n */\n function resolveAxisReference(anchors, axisRef) {\n if (!isAnchorAxisRef(axisRef))\n return axisRef;\n // It's an anchored reference\n const anchorId = axisRef.anchor;\n const anchorSpec = anchors[anchorId];\n if (!anchorSpec)\n throw new Error(`Anchor \"${anchorId}\" not found for axis reference`);\n if ('idx' in axisRef) {\n // AnchorAxisRefByIdx\n if (axisRef.idx < 0 || axisRef.idx >= anchorSpec.axesSpec.length)\n throw new Error(`Axis index ${axisRef.idx} out of bounds for anchor \"${anchorId}\"`);\n return anchorSpec.axesSpec[axisRef.idx];\n }\n else if ('name' in axisRef) {\n // AnchorAxisRefByName\n const matches = anchorSpec.axesSpec.filter((axis) => axis.name === axisRef.name);\n if (matches.length > 1)\n throw new Error(`Multiple axes with name \"${axisRef.name}\" found in anchor \"${anchorId}\"`);\n if (matches.length === 0)\n throw new Error(`Axis with name \"${axisRef.name}\" not found in anchor \"${anchorId}\"`);\n return matches[0];\n }\n else if ('id' in axisRef) {\n // AnchorAxisRefByMatcher\n const matches = anchorSpec.axesSpec.filter((axis) => matchAxisId(axisRef.id, getAxisId(axis)));\n if (matches.length > 1)\n throw new Error(`Multiple matching axes found for matcher in anchor \"${anchorId}\"`);\n if (matches.length === 0)\n throw new Error(`No matching axis found for matcher in anchor \"${anchorId}\"`);\n return matches[0];\n }\n throw new Error(`Unsupported axis reference type`);\n }\n /**\n * Type guard to check if a value is an anchored axis reference\n */\n function isAnchorAxisRef(value) {\n return typeof value === 'object' && 'anchor' in value;\n }\n\n function isPColumnSpec(spec) {\n return spec.kind === 'PColumn';\n }\n function isPColumn(obj) {\n return isPColumnSpec(obj.spec);\n }\n function ensurePColumn(obj) {\n if (!isPColumn(obj))\n throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n }\n function mapPObjectData(pObj, cb) {\n return pObj === undefined\n ? undefined\n : {\n ...pObj,\n data: cb(typeof pObj.data === 'function' ? pObj.data() : pObj.data),\n };\n }\n function extractAllColumns(entry) {\n const columns = new Map();\n const addAllColumns = (entry) => {\n switch (entry.type) {\n case 'column':\n columns.set(entry.column.id, entry.column);\n return;\n case 'slicedColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'artificialColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'inlineColumn':\n return;\n case 'full':\n case 'inner':\n for (const e of entry.entries)\n addAllColumns(e);\n return;\n case 'outer':\n addAllColumns(entry.primary);\n for (const e of entry.secondary)\n addAllColumns(e);\n return;\n default:\n assertNever(entry);\n }\n };\n addAllColumns(entry);\n return [...columns.values()];\n }\n\n /**\n * Determines if an axis ID matches an axis selector.\n *\n * @param selector - The selector with criteria to match against\n * @param axis - The AxisId to check against the selector\n * @returns true if the AxisId matches all specified criteria in the selector, false otherwise\n */\n function matchAxis(selector, axis) {\n // Match name if specified\n if (selector.name !== undefined && selector.name !== axis.name)\n return false;\n // Match type if specified\n if (selector.type !== undefined) {\n if (Array.isArray(selector.type)) {\n if (!selector.type.includes(axis.type))\n return false;\n }\n else if (selector.type !== axis.type) {\n return false;\n }\n }\n // Match domain if specified - using existing logic from matchAxisId\n if (selector.domain !== undefined) {\n const axisDomain = axis.domain || {};\n for (const [key, value] of Object.entries(selector.domain))\n if (axisDomain[key] !== value)\n return false;\n }\n return true;\n }\n /**\n * Determines if a given PColumnSpec matches a selector.\n *\n * @param pcolumn - The PColumnSpec to check against the selector\n * @param selector - The selector criteria to match against\n * @returns true if the PColumnSpec matches all criteria in the selector, false otherwise\n */\n function matchPColumn(pcolumn, selector) {\n // Match name if specified\n if (selector.name !== undefined && pcolumn.name !== selector.name)\n return false;\n // Match name pattern if specified\n if (selector.namePattern !== undefined && !new RegExp(selector.namePattern).test(pcolumn.name))\n return false;\n // Match type if specified\n if (selector.type !== undefined) {\n if (Array.isArray(selector.type)) {\n if (!selector.type.includes(pcolumn.valueType))\n return false;\n }\n else if (selector.type !== pcolumn.valueType) {\n return false;\n }\n }\n // Match domain if specified\n if (selector.domain !== undefined) {\n const columnDomain = pcolumn.domain || {};\n for (const [key, value] of Object.entries(selector.domain))\n if (columnDomain[key] !== value)\n return false;\n }\n // Match axes if specified\n if (selector.axes !== undefined) {\n const pcolumnAxes = pcolumn.axesSpec.map(getAxisId);\n if (selector.partialAxesMatch) {\n // For partial matching, all selector axes must match at least one column axis\n for (const selectorAxis of selector.axes)\n if (!pcolumnAxes.some((columnAxis) => matchAxis(selectorAxis, columnAxis)))\n return false;\n }\n else {\n // For exact matching, column must have the same number of axes and all must match\n if (pcolumnAxes.length !== selector.axes.length)\n return false;\n // Each selector axis must match a corresponding column axis\n for (let i = 0; i < selector.axes.length; i++)\n if (!matchAxis(selector.axes[i], pcolumnAxes[i]))\n return false;\n }\n }\n // Match annotations if specified\n if (selector.annotations !== undefined) {\n const columnAnnotations = pcolumn.annotations || {};\n for (const [key, value] of Object.entries(selector.annotations))\n if (columnAnnotations[key] !== value)\n return false;\n }\n // Match annotation patterns if specified\n if (selector.annotationPatterns !== undefined) {\n const columnAnnotations = pcolumn.annotations || {};\n for (const [key, pattern] of Object.entries(selector.annotationPatterns)) {\n const value = columnAnnotations[key];\n if (value === undefined || !new RegExp(pattern).test(value))\n return false;\n }\n }\n return true;\n }\n /**\n * Convert a predicate or array of selectors to a single predicate function\n * @param predicateOrSelectors - Either a function that takes a PColumnSpec and returns a boolean,\n * or an array of PColumnSelectors, or a single PColumnSelector\n * @returns A function that takes a PColumnSpec and returns a boolean\n */\n function selectorsToPredicate(predicateOrSelectors) {\n if (Array.isArray(predicateOrSelectors))\n return (spec) => predicateOrSelectors.some((selector) => isPColumnSpec(spec) && matchPColumn(spec, selector));\n else\n return (spec) => isPColumnSpec(spec) && matchPColumn(spec, predicateOrSelectors);\n }\n\n function deriveNativeId(spec) {\n const result = {\n kind: spec.kind,\n name: spec.name,\n };\n if (spec.domain !== undefined)\n result.domain = spec.domain;\n if (isPColumnSpec(spec))\n result.axesSpec = getAxesId(spec.axesSpec);\n return canonicalize(result);\n }\n\n class LinkerMap {\n /** Graph of linkers connected by axes (single or grouped by parents) */\n data;\n constructor(linkerMap) {\n this.data = linkerMap;\n }\n get keys() {\n return this.data.keys();\n }\n get keyAxesIds() {\n return [...this.data.keys()].map(parseJson);\n }\n static fromColumns(columns) {\n const result = new Map();\n for (const linker of columns.filter((l) => !!readAnnotationJson(l.spec, Annotation.IsLinkerColumn))) {\n const groups = LinkerMap.getAxesGroups(getNormalizedAxesList(linker.spec.axesSpec)); // split input axes into groups by parent links from annotation\n if (groups.length !== 2) {\n continue; // not a valid linker column\n }\n const [left, right] = groups;\n // In case of group:\n // A - C\n // \\_ B _ D\n // E/\n // put 2 variants as keys:\n // A - C\n // \\_ B _ D\n // and\n // E - B - D\n const leftKeyVariants = LinkerMap.getAxesRoots(left).map((axis) => {\n const axes = getArrayFromAxisTree(getAxesTree(axis));\n const key = canonicalizeJson(axes.map(getAxisId));\n return [key, axes];\n });\n const rightKeyVariants = LinkerMap.getAxesRoots(right).map((axis) => {\n const axes = getArrayFromAxisTree(getAxesTree(axis));\n const key = canonicalizeJson(axes.map(getAxisId));\n return [key, axes];\n });\n for (const [keyLeft, spec] of leftKeyVariants) {\n if (!result.has(keyLeft)) {\n result.set(keyLeft, { keyAxesSpec: spec, linkWith: new Map() });\n }\n }\n for (const [keyRight, spec] of rightKeyVariants) {\n if (!result.has(keyRight)) {\n result.set(keyRight, { keyAxesSpec: spec, linkWith: new Map() });\n }\n }\n for (const [keyLeft] of leftKeyVariants) {\n for (const [keyRight] of rightKeyVariants) {\n result.get(keyLeft)?.linkWith.set(keyRight, linker);\n result.get(keyRight)?.linkWith.set(keyLeft, linker);\n }\n }\n }\n return new this(result);\n }\n /** Get all available nodes of linker graphs if start from sourceAxesKeys */\n searchAvailableAxesKeys(sourceAxesKeys) {\n const startKeys = new Set(sourceAxesKeys);\n const allAvailableKeys = new Set();\n let nextKeys = sourceAxesKeys;\n while (nextKeys.length) {\n const next = [];\n for (const key of nextKeys) {\n const node = this.data.get(key);\n if (!node)\n continue;\n for (const availableKey of node.linkWith.keys()) {\n if (!allAvailableKeys.has(availableKey) && !startKeys.has(availableKey)) {\n next.push(availableKey);\n allAvailableKeys.add(availableKey);\n }\n }\n }\n nextKeys = next;\n }\n return allAvailableKeys;\n }\n /** Get all linker columns that are necessary to reach endKey from startKey */\n searchLinkerPath(startKey, endKey) {\n const previous = {};\n let nextIds = new Set([startKey]);\n const visited = new Set([startKey]);\n while (nextIds.size) {\n const next = new Set();\n for (const nextId of nextIds) {\n const node = this.data.get(nextId);\n if (!node)\n continue;\n for (const availableId of node.linkWith.keys()) {\n previous[availableId] = nextId;\n if (availableId === endKey) {\n const ids = [];\n let current = endKey;\n while (previous[current] !== startKey) {\n ids.push(current);\n current = previous[current];\n }\n ids.push(current);\n return ids.map((id) => this.data.get(id).linkWith.get(previous[id]));\n }\n else if (!visited.has(availableId)) {\n next.add(availableId);\n visited.add(availableId);\n }\n }\n }\n nextIds = next;\n }\n return [];\n }\n getLinkerColumnsForAxes({ from: sourceAxes, to: targetAxes, throwWhenNoLinkExists = true, }) {\n // start keys - all possible keys in linker map using sourceAxes (for example, all axes of block's columns or all axes of columns in data-inputs)\n const startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n return Array.from(new Map(LinkerMap.getAxesRoots(targetAxes)\n .map(LinkerMap.getLinkerKeyFromAxisSpec) // target keys contain all axes to be linked; if some of target axes has parents they must be in the key\n .flatMap((targetKey) => {\n const linkers = startKeys\n .map((startKey) => this.searchLinkerPath(startKey, targetKey))\n .reduce((shortestPath, path) => (shortestPath.length && shortestPath.length < path.length) || !path.length ? shortestPath : path, [])\n .map((linker) => [linker.columnId, linker]);\n if (!linkers.length && throwWhenNoLinkExists) {\n throw Error(`Unable to find linker column for ${targetKey}`);\n }\n return linkers;\n })).values());\n }\n /** Get list of axisSpecs from keys of linker columns map */\n getAxesListFromKeysList(keys) {\n return Array.from(new Map(keys.flatMap((key) => this.data.get(key)?.keyAxesSpec ?? [])\n .map((axis) => [canonicalizeJson(getAxisId(axis)), axis])).values());\n }\n /** Get axes of target axes that are impossible to be linked to source axes with current linker map */\n getNonLinkableAxes(sourceAxes, targetAxes) {\n const startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n // target keys contain all axes to be linked; if some of target axes has parents they must be in the key\n const targetKeys = targetAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n const axes = Array.from(new Map(targetAxes\n .filter((_targetAxis, idx) => {\n const targetKey = targetKeys[idx];\n return !startKeys.some((startKey) => this.searchLinkerPath(startKey, targetKey).length);\n })\n .flatMap((axis) => getArrayFromAxisTree(getAxesTree(axis)).map((axis) => [canonicalizeJson(getAxisId(axis)), axis]))).values());\n return axes;\n }\n /** Get all axes that can be connected to sourceAxes by linkers */\n getReachableByLinkersAxesFromAxesNormalized(sourceAxes, matchAxisIdFn) {\n let startKeys = [];\n if (matchAxisIdFn) {\n const sourceAxisIdsGrouped = sourceAxes.map((axis) => getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n for (const sourceAxisIdsGroup of sourceAxisIdsGrouped) {\n const matched = this.keyAxesIds.find((keyIds) => keyIds.every((linkerKeyAxisId) => sourceAxisIdsGroup.find((sourceAxisId) => matchAxisIdFn(linkerKeyAxisId, sourceAxisId))));\n if (matched) {\n startKeys.push(canonicalizeJson(matched));\n }\n }\n }\n else {\n startKeys = sourceAxes.map(LinkerMap.getLinkerKeyFromAxisSpec);\n }\n const availableKeys = this.searchAvailableAxesKeys(startKeys);\n return this.getAxesListFromKeysList([...availableKeys]);\n }\n getReachableByLinkersAxesFromAxes(sourceAxes, matchAxisIdFn) {\n return this.getReachableByLinkersAxesFromAxesNormalized(getNormalizedAxesList(sourceAxes), matchAxisIdFn);\n }\n static getLinkerKeyFromAxisSpec(axis) {\n return canonicalizeJson(getArrayFromAxisTree(getAxesTree(axis)).map(getAxisId));\n }\n /** Split array of axes into several arrays by parents: axes of one group are parents for each other.\n There are no order inside every group. */\n static getAxesGroups(axesSpec) {\n switch (axesSpec.length) {\n case 0: return [];\n case 1: return [[axesSpec[0]]];\n }\n const axisKeys = axesSpec.map((spec) => canonicalizeJson(getAxisId(spec)));\n const axisParentsIdxs = axesSpec.map((spec) => new Set(spec.parentAxesSpec\n .map((spec) => canonicalizeJson(getAxisId(spec)))\n .map((el) => {\n const idx = axisKeys.indexOf(el);\n if (idx === -1) {\n throw new Error(`malformed axesSpec: ${JSON.stringify(axesSpec)}, unable to locate parent ${el}`);\n }\n return idx;\n })));\n const allIdxs = [...axesSpec.keys()];\n const groups = []; // groups of axis indexes\n const usedIdxs = new Set();\n let nextFreeEl = allIdxs.find((idx) => !usedIdxs.has(idx));\n while (nextFreeEl !== undefined) {\n const currentGroup = [nextFreeEl];\n usedIdxs.add(nextFreeEl);\n let nextElsOfCurrentGroup = [nextFreeEl];\n while (nextElsOfCurrentGroup.length) {\n const next = new Set();\n for (const groupIdx of nextElsOfCurrentGroup) {\n const groupElementParents = axisParentsIdxs[groupIdx];\n allIdxs.forEach((idx) => {\n if (idx === groupIdx || usedIdxs.has(idx)) {\n return;\n }\n const parents = axisParentsIdxs[idx];\n if (parents.has(groupIdx) || groupElementParents.has(idx)) {\n currentGroup.push(idx);\n next.add(idx);\n usedIdxs.add(idx);\n }\n });\n }\n nextElsOfCurrentGroup = [...next];\n }\n groups.push([...currentGroup]);\n nextFreeEl = allIdxs.find((idx) => !usedIdxs.has(idx));\n }\n return groups.map((group) => group.map((idx) => axesSpec[idx]));\n }\n /** Get all axes that are not parents of any other axis */\n static getAxesRoots(axes) {\n const parentsSet = new Set(axes.flatMap((axis) => axis.parentAxesSpec).map((spec) => canonicalizeJson(getAxisId(spec))));\n return axes.filter((axis) => !parentsSet.has(canonicalizeJson(getAxisId(axis))));\n }\n }\n\n /** Characters in string representation */\n const PlIdLength = 24; // = 15 bytes * 8 bits / 5 bits per char in base32\n z\n .string()\n .length(PlIdLength)\n .regex(/[ABCDEFGHIJKLMNOPQRSTUVWXYZ234567]/) // RFC4648\n .brand('PlId');\n\n z\n .object({\n __isRef: z\n .literal(true)\n .describe('Crucial marker for the block dependency tree reconstruction'),\n blockId: z.string()\n .describe('Upstream block id'),\n name: z.string()\n .describe('Name of the output provided to the upstream block\\'s output context'),\n requireEnrichments: z.literal(true).optional()\n .describe('True if current block that stores this reference in its args, may need enrichments '\n + 'for the references value originating from the blocks in between current and referenced block'),\n })\n .describe('Universal reference type, allowing to set block connections. It is crucial that '\n + '{@link __isRef} is present and equal to true, internal logic relies on this marker '\n + 'to build block dependency trees.')\n .readonly();\n /**\n * Type guard to check if a value is a PlRef.\n *\n * @param value - The value to check.\n * @returns True if the value is a PlRef, false otherwise.\n */\n function isPlRef(value) {\n return (typeof value === 'object'\n && value !== null\n && '__isRef' in value\n && value.__isRef === true\n && 'blockId' in value\n && 'name' in value);\n }\n /**\n * Creates a new PlRef based on an existing one, explicitly setting (default) or removing the\n * requireEnrichments property.\n *\n * @param ref - The original PlRef object.\n * @param requireEnrichments - If true, the `requireEnrichments: true` property is added\n * to the returned PlRef. If false, the `requireEnrichments` property is removed. Defaults to true.\n * @returns A new PlRef object with the `requireEnrichments` property set or removed accordingly.\n */\n function withEnrichments(ref, requireEnrichments = true) {\n if (requireEnrichments)\n return {\n ...ref,\n requireEnrichments: true,\n };\n else {\n const { requireEnrichments: _, ...rest } = ref;\n return rest;\n }\n }\n\n function mapValueInVOE(voe, cb) {\n return voe.ok ? { ok: true, value: cb(voe.value) } : voe;\n }\n\n class FutureRef {\n handle;\n postProcess;\n isResolved = false;\n resolvedValue;\n constructor(handle, postProcess = (v) => v) {\n this.handle = handle;\n this.postProcess = postProcess;\n registerFutureAwait(handle, (value) => {\n this.resolvedValue = postProcess(value);\n this.isResolved = true;\n });\n }\n map(mapping) {\n return new FutureRef(this.handle, (v) => mapping(this.postProcess(v)));\n }\n mapDefined(mapping) {\n return new FutureRef(this.handle, (v) => {\n const vv = this.postProcess(v);\n return vv ? mapping(vv) : undefined;\n });\n }\n toJSON() {\n return this.isResolved\n ? this.resolvedValue\n : { __awaited_futures__: [this.handle] };\n }\n }\n\n function ifDef(value, cb) {\n return value === undefined ? undefined : cb(value);\n }\n /** Represent resource tree node accessor */\n class TreeNodeAccessor {\n handle;\n resolvePath;\n constructor(handle, resolvePath) {\n this.handle = handle;\n this.resolvePath = resolvePath;\n }\n resolve(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: 'Input',\n ...(typeof s === 'string' ? { field: s } : s),\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveOutput(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: 'Output',\n ...(typeof s === 'string' ? { field: s } : s),\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveInput(...steps) {\n const transformedSteps = steps.map((s) => ({\n assertFieldType: 'Input',\n ...(typeof s === 'string' ? { field: s } : s),\n }));\n return this.resolveWithCommon({}, ...transformedSteps);\n }\n resolveAny(...steps) {\n return this.resolveWithCommon({}, ...steps);\n }\n resolveWithCommon(commonOptions, ...steps) {\n const resolvePath = [\n ...this.resolvePath,\n ...steps.map((step) => typeof step === 'string' ? step : step.field),\n ];\n return ifDef(getCfgRenderCtx().resolveWithCommon(this.handle, commonOptions, ...steps), (accessor) => new TreeNodeAccessor(accessor, resolvePath));\n }\n get resourceType() {\n return getCfgRenderCtx().getResourceType(this.handle);\n }\n getInputsLocked() {\n return getCfgRenderCtx().getInputsLocked(this.handle);\n }\n getOutputsLocked() {\n return getCfgRenderCtx().getOutputsLocked(this.handle);\n }\n getIsReadyOrError() {\n return getCfgRenderCtx().getIsReadyOrError(this.handle);\n }\n getIsFinal() {\n return getCfgRenderCtx().getIsFinal(this.handle);\n }\n getError() {\n const resolvePath = [...this.resolvePath, 'error'];\n return ifDef(getCfgRenderCtx().getError(this.handle), (accsessor) => new TreeNodeAccessor(accsessor, resolvePath));\n }\n listInputFields() {\n return getCfgRenderCtx().listInputFields(this.handle);\n }\n listOutputFields() {\n return getCfgRenderCtx().listOutputFields(this.handle);\n }\n listDynamicFields() {\n return getCfgRenderCtx().listDynamicFields(this.handle);\n }\n getKeyValueBase64(key) {\n return getCfgRenderCtx().getKeyValueBase64(this.handle, key);\n }\n getKeyValueAsString(key) {\n return getCfgRenderCtx().getKeyValueAsString(this.handle, key);\n }\n getKeyValueAsJson(key) {\n const content = this.getKeyValueAsString(key);\n if (content == undefined)\n throw new Error('Resource has no content.');\n return JSON.parse(content);\n }\n getDataBase64() {\n return getCfgRenderCtx().getDataBase64(this.handle);\n }\n getDataAsString() {\n return getCfgRenderCtx().getDataAsString(this.handle);\n }\n getDataAsJson() {\n const content = this.getDataAsString();\n if (content == undefined)\n throw new Error('Resource has no content.');\n return JSON.parse(content);\n }\n /**\n *\n */\n getPColumns(errorOnUnknownField = false, prefix = '') {\n const result = this.parsePObjectCollection(errorOnUnknownField, prefix);\n if (result === undefined)\n return undefined;\n const pf = Object.entries(result).map(([, obj]) => {\n if (!isPColumn(obj))\n throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n });\n return pf;\n }\n /**\n *\n */\n parsePObjectCollection(errorOnUnknownField = false, prefix = '') {\n const pObjects = getCfgRenderCtx().parsePObjectCollection(this.handle, errorOnUnknownField, prefix, ...this.resolvePath);\n if (pObjects === undefined)\n return undefined;\n const result = {};\n for (const [key, value] of Object.entries(pObjects)) {\n const resolvePath = [...this.resolvePath, key];\n result[key] = mapPObjectData(value, (c) => new TreeNodeAccessor(c, resolvePath));\n }\n return result;\n }\n getFileContentAsBase64(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsBase64(this.handle, range));\n }\n getFileContentAsString(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsString(this.handle, range));\n }\n getFileContentAsJson(range) {\n return new FutureRef(getCfgRenderCtx().getBlobContentAsString(this.handle, range)).mapDefined((v) => JSON.parse(v));\n }\n /**\n * @deprecated use getFileContentAsBase64\n */\n getBlobContentAsBase64() {\n return this.getFileContentAsBase64();\n }\n /**\n * @deprecated use getFileContentAsString\n */\n getBlobContentAsString() {\n return this.getFileContentAsString();\n }\n /**\n * @returns downloaded file handle\n */\n getFileHandle() {\n return new FutureRef(getCfgRenderCtx().getDownloadedBlobContentHandle(this.handle));\n }\n /**\n * @deprecated use getFileHandle\n */\n getDownloadedBlobHandle() {\n return this.getFileHandle();\n }\n /**\n * @returns downloaded file handle\n */\n getRemoteFileHandle() {\n return new FutureRef(getCfgRenderCtx().getOnDemandBlobContentHandle(this.handle));\n }\n /**\n * @deprecated use getRemoteFileHandle\n */\n getOnDemandBlobHandle() {\n return this.getRemoteFileHandle();\n }\n /**\n * @returns the url to the extracted folder\n */\n extractArchiveAndGetURL(format) {\n return new FutureRef(getCfgRenderCtx().extractArchiveAndGetURL(this.handle, format));\n }\n getImportProgress() {\n return new FutureRef(getCfgRenderCtx().getImportProgress(this.handle));\n }\n getLastLogs(nLines) {\n return new FutureRef(getCfgRenderCtx().getLastLogs(this.handle, nLines));\n }\n getProgressLog(patternToSearch) {\n return new FutureRef(getCfgRenderCtx().getProgressLog(this.handle, patternToSearch));\n }\n getProgressLogWithInfo(patternToSearch) {\n return new FutureRef(getCfgRenderCtx().getProgressLogWithInfo(this.handle, patternToSearch));\n }\n getLogHandle() {\n return new FutureRef(getCfgRenderCtx().getLogHandle(this.handle));\n }\n allFieldsResolved(fieldType = 'Input') {\n switch (fieldType) {\n case 'Input':\n return (this.getInputsLocked()\n && this.listInputFields().every((field) => this.resolve({ field, assertFieldType: 'Input' }) !== undefined));\n case 'Output':\n return (this.getOutputsLocked()\n && this.listOutputFields().every((field) => this.resolve({ field, assertFieldType: 'Output' }) !== undefined));\n }\n }\n mapFields(_mapping, _ops) {\n const { fieldType, requireLocked, skipUnresolved } = {\n fieldType: 'Input',\n requireLocked: true,\n skipUnresolved: false,\n ..._ops,\n };\n const mapping = _mapping;\n if (requireLocked) {\n if (fieldType === 'Input' && !this.getInputsLocked())\n return undefined;\n if (fieldType === 'Output' && !this.getOutputsLocked())\n return undefined;\n }\n const fieldList = fieldType === 'Input'\n ? this.listInputFields()\n : fieldType === 'Output'\n ? this.listOutputFields()\n : this.listDynamicFields();\n let fieldEntries = fieldList.map((field) => [field, this.resolve({ field, assertFieldType: fieldType })]);\n if (skipUnresolved)\n fieldEntries = fieldEntries.filter((e) => e[1] !== undefined);\n return fieldEntries.map(([name, value]) => mapping(name, value));\n }\n }\n\n const StagingAccessorName = 'staging';\n const MainAccessorName = 'main';\n\n function filterDataInfoEntries(dataInfoEntries, axisFilters) {\n // Sort filters by axis index in descending order to safely remove elements from arrays\n const sortedFilters = [...axisFilters].sort((a, b) => b[0] - a[0]);\n // Check for invalid filter axes\n const { type } = dataInfoEntries;\n switch (type) {\n case 'Json': {\n const { keyLength } = dataInfoEntries;\n for (const [axisIdx] of axisFilters)\n if (axisIdx >= keyLength)\n throw new Error(`Can't filter on non-data axis ${axisIdx}. Must be >= ${keyLength}`);\n break;\n }\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned': {\n const { partitionKeyLength } = dataInfoEntries;\n for (const [axisIdx] of axisFilters)\n if (axisIdx >= partitionKeyLength)\n throw new Error(`Can't filter on non-partitioned axis ${axisIdx}. Must be >= ${partitionKeyLength}`);\n break;\n }\n default:\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw new Error(`Unsupported data info type: ${type}`);\n }\n const keyMatchesFilters = (key) => {\n for (const [axisIdx, axisValue] of sortedFilters)\n if (key[axisIdx] !== axisValue)\n return false;\n return true;\n };\n const removeFilteredAxes = (key) => {\n const newKey = [...key];\n // Remove axes in descending order to maintain correct indices\n for (const [axisIdx] of sortedFilters)\n newKey.splice(axisIdx, 1);\n return newKey;\n };\n switch (dataInfoEntries.type) {\n case 'Json': return {\n type: 'Json',\n keyLength: dataInfoEntries.keyLength - axisFilters.length,\n data: dataInfoEntries.data\n .filter((entry) => keyMatchesFilters(entry.key))\n .map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value,\n })),\n };\n case 'JsonPartitioned': return {\n type: 'JsonPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts\n .filter((entry) => keyMatchesFilters(entry.key))\n .map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value,\n })),\n };\n case 'BinaryPartitioned': return {\n type: 'BinaryPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts\n .filter((entry) => keyMatchesFilters(entry.key))\n .map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value,\n })),\n };\n case 'ParquetPartitioned': return {\n type: 'ParquetPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength - axisFilters.length,\n parts: dataInfoEntries.parts\n .filter((entry) => keyMatchesFilters(entry.key))\n .map((entry) => ({\n key: removeFilteredAxes(entry.key),\n value: entry.value,\n })),\n };\n }\n }\n\n const TraceEntry = z.object({\n type: z.string(),\n importance: z.number().optional(),\n id: z.string().optional(),\n label: z.string(),\n });\n const Trace = z.array(TraceEntry);\n const DistancePenalty = 0.001;\n const LabelType = '__LABEL__';\n const LabelTypeFull = '__LABEL__@1';\n function deriveLabels(values, specExtractor, ops = {}) {\n const importances = new Map();\n const forceTraceElements = (ops.forceTraceElements !== undefined && ops.forceTraceElements.length > 0)\n ? new Set(ops.forceTraceElements)\n : undefined;\n // number of times certain type occurred among all of the\n const numberOfRecordsWithType = new Map();\n const enrichedRecords = values.map((value) => {\n const extractorResult = specExtractor(value);\n let spec;\n let prefixTrace;\n let suffixTrace;\n // Check if the result is the new structure or just PObjectSpec\n if ('spec' in extractorResult && typeof extractorResult.spec === 'object') {\n // It's the new structure { spec, prefixTrace?, suffixTrace? }\n spec = extractorResult.spec;\n prefixTrace = extractorResult.prefixTrace;\n suffixTrace = extractorResult.suffixTrace;\n }\n else {\n // It's just PObjectSpec\n spec = extractorResult;\n }\n const label = readAnnotation(spec, Annotation.Label);\n const traceStr = readAnnotation(spec, Annotation.Trace);\n const baseTrace = (traceStr ? Trace.safeParse(parseJson(traceStr)).data : undefined) ?? [];\n const trace = [\n ...(prefixTrace ?? []),\n ...baseTrace,\n ...(suffixTrace ?? []),\n ];\n if (label !== undefined) {\n const labelEntry = { label, type: LabelType, importance: -2 };\n if (ops.addLabelAsSuffix)\n trace.push(labelEntry);\n else\n trace.splice(0, 0, labelEntry);\n }\n const fullTrace = [];\n const occurrences = new Map();\n for (let i = trace.length - 1; i >= 0; --i) {\n const { type: typeName } = trace[i];\n const importance = trace[i].importance ?? 0;\n const occurrenceIndex = (occurrences.get(typeName) ?? 0) + 1;\n occurrences.set(typeName, occurrenceIndex);\n const fullType = `${typeName}@${occurrenceIndex}`;\n numberOfRecordsWithType.set(fullType, (numberOfRecordsWithType.get(fullType) ?? 0) + 1);\n importances.set(fullType, Math.max(importances.get(fullType) ?? Number.NEGATIVE_INFINITY, importance - (trace.length - i) * DistancePenalty));\n fullTrace.push({ ...trace[i], fullType, occurrenceIndex: occurrenceIndex });\n }\n fullTrace.reverse();\n return {\n value,\n spec,\n label,\n fullTrace,\n };\n });\n // excluding repeated types (i.e. ..@2, ..@3, etc.) not found in some records\n const mainTypes = [];\n // repeated types (i.e. ..@2, ..@3, etc.) not found in some records\n const secondaryTypes = [];\n const allTypeRecords = [...importances];\n // sorting: most important types go first\n allTypeRecords.sort(([, i1], [, i2]) => i2 - i1);\n for (const [typeName] of allTypeRecords) {\n if (typeName.endsWith('@1') || numberOfRecordsWithType.get(typeName) === values.length)\n mainTypes.push(typeName);\n else\n secondaryTypes.push(typeName);\n }\n const calculate = (includedTypes, force = false) => {\n const result = [];\n for (let i = 0; i < enrichedRecords.length; i++) {\n const r = enrichedRecords[i];\n const includedTrace = r.fullTrace\n .filter((fm) => includedTypes.has(fm.fullType)\n || (forceTraceElements && forceTraceElements.has(fm.type)));\n if (includedTrace.length === 0) {\n if (force)\n result.push({\n label: 'Unlabeled',\n value: r.value,\n });\n else\n return undefined;\n }\n const labelSet = includedTrace\n .map((fm) => fm.label);\n const sep = ops.separator ?? ' / ';\n result.push({\n label: labelSet.join(sep),\n value: r.value,\n });\n }\n return result;\n };\n const countUniqueLabels = (result) => result === undefined ? 0 : new Set(result.map((c) => c.label)).size;\n // Post-processing: try removing types one by one (lowest importance first) to minimize the label set\n // Accepts removal if it doesn't decrease the number of unique labels (cardinality)\n const minimizeTypeSet = (typeSet) => {\n const initialResult = calculate(typeSet);\n if (initialResult === undefined) {\n return typeSet;\n }\n const currentCardinality = countUniqueLabels(initialResult);\n // Get types sorted by importance ascending (lowest first), excluding forced elements\n const removableSorted = [...typeSet]\n .filter((t) => !forceTraceElements?.has(t.split('@')[0])\n && !(ops.includeNativeLabel && t === LabelTypeFull))\n .sort((a, b) => (importances.get(a) ?? 0) - (importances.get(b) ?? 0));\n for (const typeToRemove of removableSorted) {\n const reducedSet = new Set(typeSet);\n reducedSet.delete(typeToRemove);\n const candidateResult = calculate(reducedSet);\n if (candidateResult !== undefined && countUniqueLabels(candidateResult) >= currentCardinality) {\n typeSet.delete(typeToRemove);\n }\n }\n return typeSet;\n };\n if (mainTypes.length === 0) {\n if (secondaryTypes.length !== 0)\n throw new Error('Non-empty secondary types list while main types list is empty.');\n return calculate(new Set(LabelTypeFull), true);\n }\n //\n // includedTypes = 2\n // * *\n // T0 T1 T2 T3 T4 T5\n // *\n // additionalType = 3\n //\n // Resulting set: T0, T1, T3\n //\n let includedTypes = 0;\n let additionalType = -1;\n while (includedTypes < mainTypes.length) {\n const currentSet = new Set();\n if (ops.includeNativeLabel)\n currentSet.add(LabelTypeFull);\n for (let i = 0; i < includedTypes; ++i)\n currentSet.add(mainTypes[i]);\n if (additionalType >= 0)\n currentSet.add(mainTypes[additionalType]);\n const candidateResult = calculate(currentSet);\n if (candidateResult !== undefined && countUniqueLabels(candidateResult) === values.length) {\n minimizeTypeSet(currentSet);\n return calculate(currentSet);\n }\n additionalType++;\n if (additionalType >= mainTypes.length) {\n includedTypes++;\n additionalType = includedTypes;\n }\n }\n // Fallback: include all types, then try to minimize\n const fallbackSet = new Set([...mainTypes, ...secondaryTypes]);\n minimizeTypeSet(fallbackSet);\n return calculate(fallbackSet, true);\n }\n\n const PCD_PREFIX = 'PColumnData/';\n const RT_RESOURCE_MAP = PCD_PREFIX + 'ResourceMap';\n const RT_RESOURCE_MAP_PARTITIONED = PCD_PREFIX + 'Partitioned/ResourceMap';\n const RT_JSON_PARTITIONED = PCD_PREFIX + 'JsonPartitioned';\n const RT_BINARY_PARTITIONED = PCD_PREFIX + 'BinaryPartitioned';\n const RT_PARQUET_PARTITIONED = PCD_PREFIX + 'ParquetPartitioned';\n const PCD_SUP_PREFIX = PCD_PREFIX + 'Partitioned/';\n const RT_JSON_SUPER_PARTITIONED = PCD_SUP_PREFIX + 'JsonPartitioned';\n const RT_BINARY_SUPER_PARTITIONED = PCD_SUP_PREFIX + 'BinaryPartitioned';\n const RT_PARQUET_SUPER_PARTITIONED = PCD_SUP_PREFIX + 'ParquetPartitioned';\n const removeIndexSuffix = (keyStr) => {\n if (keyStr.endsWith('.index')) {\n return { baseKey: keyStr.substring(0, keyStr.length - 6), type: 'index' };\n }\n else if (keyStr.endsWith('.values')) {\n return { baseKey: keyStr.substring(0, keyStr.length - 7), type: 'values' };\n }\n else {\n throw new Error(`key must ends on .index/.values for binary p-column, got: ${keyStr}`);\n }\n };\n // @TODO define a class with various resource map operations\n /** Returns a list of all partition keys appeared in the p-column */\n function getPartitionKeysList(acc) {\n if (!acc)\n return undefined;\n const rt = acc.resourceType.name;\n const meta = acc.getDataAsJson();\n const data = [];\n let keyLength = 0;\n // @TODO validate meta shape\n switch (rt) {\n case RT_RESOURCE_MAP:\n keyLength = meta['keyLength'];\n break;\n case RT_RESOURCE_MAP_PARTITIONED:\n keyLength = meta['partitionKeyLength'] + meta['keyLength'];\n break;\n case RT_JSON_PARTITIONED:\n case RT_BINARY_PARTITIONED:\n case RT_PARQUET_PARTITIONED:\n keyLength = meta['partitionKeyLength'];\n break;\n case RT_BINARY_SUPER_PARTITIONED:\n case RT_JSON_SUPER_PARTITIONED:\n case RT_PARQUET_SUPER_PARTITIONED:\n keyLength = meta['superPartitionKeyLength'] + meta['partitionKeyLength'];\n break;\n }\n switch (rt) {\n case RT_RESOURCE_MAP:\n case RT_JSON_PARTITIONED:\n case RT_BINARY_PARTITIONED:\n case RT_PARQUET_PARTITIONED:\n for (let keyStr of acc.listInputFields()) {\n if (rt === RT_BINARY_PARTITIONED) {\n keyStr = removeIndexSuffix(keyStr).baseKey;\n }\n const key = [...JSON.parse(keyStr)];\n data.push(key);\n }\n break;\n case RT_RESOURCE_MAP_PARTITIONED:\n case RT_BINARY_SUPER_PARTITIONED:\n case RT_JSON_SUPER_PARTITIONED:\n case RT_PARQUET_SUPER_PARTITIONED:\n for (const supKeyStr of acc.listInputFields()) {\n const keyPrefix = [...JSON.parse(supKeyStr)];\n const value = acc.resolve({ field: supKeyStr, assertFieldType: 'Input' });\n if (value !== undefined) {\n for (let keyStr of value.listInputFields()) {\n if (rt === RT_BINARY_SUPER_PARTITIONED) {\n keyStr = removeIndexSuffix(keyStr).baseKey;\n }\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n data.push(key);\n }\n }\n }\n break;\n }\n return { data, keyLength };\n }\n function getUniquePartitionKeysForDataEntries(list) {\n if (list.type !== 'JsonPartitioned' && list.type !== 'BinaryPartitioned' && list.type !== 'ParquetPartitioned')\n throw new Error(`Splitting requires Partitioned DataInfoEntries, got ${list.type}`);\n const { parts, partitionKeyLength } = list;\n const result = [];\n for (let i = 0; i < partitionKeyLength; ++i) {\n result.push(new Set());\n }\n for (const part of parts) {\n const key = part.key;\n if (key.length !== partitionKeyLength) {\n throw new Error(`Key length (${key.length}) does not match partition length (${partitionKeyLength}) for key: ${JSON.stringify(key)}`);\n }\n for (let i = 0; i < partitionKeyLength; ++i) {\n result[i].add(key[i]);\n }\n }\n return result.map((s) => Array.from(s.values()));\n }\n function getUniquePartitionKeys(acc) {\n if (acc === undefined)\n return undefined;\n if (isDataInfoEntries(acc))\n return getUniquePartitionKeysForDataEntries(acc);\n const list = getPartitionKeysList(acc);\n if (!list)\n return undefined;\n const { data, keyLength } = list;\n const result = [];\n for (let i = 0; i < keyLength; ++i) {\n result.push(new Set());\n }\n for (const l of data) {\n if (l.length !== keyLength) {\n throw new Error('key length does not match partition length');\n }\n for (let i = 0; i < keyLength; ++i) {\n result[i].add(l[i]);\n }\n }\n return result.map((s) => Array.from(s.values()));\n }\n /**\n * Parses the PColumn data from a TreeNodeAccessor into a DataInfoEntries structure.\n * Returns undefined if any required data is missing.\n * Throws error on validation failures.\n *\n * @param acc - The TreeNodeAccessor containing PColumn data\n * @param keyPrefix - Optional key prefix for recursive calls\n * @returns DataInfoEntries representation of the PColumn data, or undefined if incomplete\n */\n function parsePColumnData(acc, keyPrefix = []) {\n if (acc === undefined)\n return undefined;\n if (!acc.getIsReadyOrError())\n return undefined;\n const resourceType = acc.resourceType.name;\n const meta = acc.getDataAsJson();\n // Prevent recursive super-partitioned resources\n if (keyPrefix.length > 0\n && (resourceType === RT_JSON_SUPER_PARTITIONED\n || resourceType === RT_BINARY_SUPER_PARTITIONED\n || resourceType === RT_PARQUET_SUPER_PARTITIONED)) {\n throw new Error(`Unexpected nested super-partitioned resource: ${resourceType}`);\n }\n switch (resourceType) {\n case RT_RESOURCE_MAP:\n case RT_RESOURCE_MAP_PARTITIONED:\n throw new Error(`Only data columns are supported, got: ${resourceType}`);\n case RT_JSON_PARTITIONED: {\n if (typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n for (const keyStr of acc.listInputFields()) {\n const value = acc.resolve({ field: keyStr, assertFieldType: 'Input' });\n if (value === undefined)\n return undefined;\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n parts.push({ key, value });\n }\n return {\n type: 'JsonPartitioned',\n partitionKeyLength: meta.partitionKeyLength,\n parts,\n };\n }\n case RT_BINARY_PARTITIONED: {\n if (typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n const baseKeys = new Map();\n // Group fields by base key (without .index/.values suffix)\n for (const keyStr of acc.listInputFields()) {\n const suffix = removeIndexSuffix(keyStr);\n const value = acc.resolve({ field: keyStr, assertFieldType: 'Input' });\n if (value === undefined)\n return undefined;\n let entry = baseKeys.get(suffix.baseKey);\n if (!entry) {\n entry = {};\n baseKeys.set(suffix.baseKey, entry);\n }\n if (suffix.type === 'index') {\n entry.index = value;\n }\n else {\n entry.values = value;\n }\n }\n // Process complete binary chunks only\n for (const [baseKeyStr, entry] of baseKeys.entries()) {\n if (!entry.index || !entry.values)\n return undefined;\n const key = [...keyPrefix, ...JSON.parse(baseKeyStr)];\n parts.push({\n key,\n value: {\n index: entry.index,\n values: entry.values,\n },\n });\n }\n return {\n type: 'BinaryPartitioned',\n partitionKeyLength: meta.partitionKeyLength,\n parts,\n };\n }\n case RT_PARQUET_PARTITIONED: {\n if (typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing partitionKeyLength in metadata for ${resourceType}`);\n }\n const parts = [];\n for (const keyStr of acc.listInputFields()) {\n const value = acc.resolve({ field: keyStr, assertFieldType: 'Input' });\n if (value === undefined)\n return undefined;\n const key = [...keyPrefix, ...JSON.parse(keyStr)];\n parts.push({ key, value });\n }\n return {\n type: 'ParquetPartitioned',\n partitionKeyLength: meta.partitionKeyLength,\n parts,\n };\n }\n case RT_JSON_SUPER_PARTITIONED: {\n if (typeof meta?.superPartitionKeyLength !== 'number'\n || typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n // Process all super partitions\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: 'Input' });\n if (superPartition === undefined)\n return undefined;\n // Validate inner type\n if (superPartition.resourceType.name !== RT_JSON_PARTITIONED) {\n throw new Error(`Expected ${RT_JSON_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === undefined)\n return undefined;\n if (innerResult.type !== 'JsonPartitioned')\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: 'JsonPartitioned',\n partitionKeyLength: totalKeyLength,\n parts,\n };\n }\n case RT_BINARY_SUPER_PARTITIONED: {\n if (typeof meta?.superPartitionKeyLength !== 'number'\n || typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n // Process all super partitions\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: 'Input' });\n if (superPartition === undefined)\n return undefined;\n // Validate inner type\n if (superPartition.resourceType.name !== RT_BINARY_PARTITIONED) {\n throw new Error(`Expected ${RT_BINARY_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === undefined)\n return undefined;\n if (innerResult.type !== 'BinaryPartitioned')\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: 'BinaryPartitioned',\n partitionKeyLength: totalKeyLength,\n parts,\n };\n }\n case RT_PARQUET_SUPER_PARTITIONED: {\n if (typeof meta?.superPartitionKeyLength !== 'number'\n || typeof meta?.partitionKeyLength !== 'number') {\n throw new Error(`Missing superPartitionKeyLength or partitionKeyLength in metadata for ${resourceType}`);\n }\n const totalKeyLength = meta.superPartitionKeyLength + meta.partitionKeyLength;\n const parts = [];\n // Process all super partitions\n for (const supKeyStr of acc.listInputFields()) {\n const superPartition = acc.resolve({ field: supKeyStr, assertFieldType: 'Input' });\n if (superPartition === undefined)\n return undefined;\n // Validate inner type\n if (superPartition.resourceType.name !== RT_PARQUET_PARTITIONED) {\n throw new Error(`Expected ${RT_PARQUET_PARTITIONED} inside ${resourceType}, but got ${superPartition.resourceType.name}`);\n }\n const innerResult = parsePColumnData(superPartition, JSON.parse(supKeyStr));\n if (innerResult === undefined)\n return undefined;\n if (innerResult.type !== 'ParquetPartitioned')\n throw new Error(`Unexpected inner result type for ${resourceType}: ${innerResult.type}`);\n parts.push(...innerResult.parts);\n }\n return {\n type: 'ParquetPartitioned',\n partitionKeyLength: totalKeyLength,\n parts,\n };\n }\n default:\n throw new Error(`Unknown resource type: ${resourceType}`);\n }\n }\n /**\n * Converts or parses the input into DataInfoEntries format.\n\n * @param acc - The input data, which can be TreeNodeAccessor, DataInfoEntries, DataInfo, or undefined.\n * @returns The data in DataInfoEntries format, or undefined if the input was undefined or data is not ready.\n */\n function convertOrParsePColumnData(acc) {\n if (acc === undefined)\n return undefined;\n if (isDataInfoEntries(acc))\n return acc;\n if (isDataInfo(acc))\n return dataInfoToEntries(acc);\n if (acc instanceof TreeNodeAccessor)\n return parsePColumnData(acc);\n throw new Error(`Unexpected input type: ${typeof acc}`);\n }\n function isPColumnReady(c) {\n const isValues = (d) => Array.isArray(d);\n const isAccessor = (d) => d instanceof TreeNodeAccessor;\n let ready = true;\n const data = typeof c.data === 'function' ? c.data() : c.data;\n if (data == null) {\n return false;\n }\n else if (isAccessor(data)) {\n ready &&= data.getIsReadyOrError();\n }\n else if (isDataInfo(data)) {\n visitDataInfo(data, (v) => ready &&= v.getIsReadyOrError());\n }\n else if (!isValues(data)) {\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw Error(`unsupported column data type: ${data}`);\n }\n return ready;\n }\n function allPColumnsReady(columns) {\n return columns.every(isPColumnReady);\n }\n\n function isPColumnValues(value) {\n if (!Array.isArray(value))\n return false;\n if (value.length === 0)\n return true;\n const first = value[0];\n return typeof first === 'object' && first !== null && 'key' in first && 'val' in first;\n }\n /**\n * A simple implementation of {@link ColumnProvider} backed by a pre-defined array of columns.\n */\n class ArrayColumnProvider {\n columns;\n constructor(columns) {\n this.columns = columns;\n }\n selectColumns(selectors) {\n const predicate = typeof selectors === 'function' ? selectors : selectorsToPredicate(selectors);\n // Filter based on spec, ignoring data type for now\n return this.columns.filter((column) => predicate(column.spec));\n }\n }\n function splitFiltersToTrace(splitFilters) {\n if (!splitFilters)\n return undefined;\n return splitFilters.map((filter) => ({\n type: `split:${canonicalizeAxisId(filter.axisId)}`,\n label: filter.label,\n importance: 1_000_000, // High importance for split filters in labels\n }));\n }\n function splitFiltersToAxisFilter(splitFilters) {\n if (!splitFilters)\n return undefined;\n return splitFilters.map((filter) => [filter.axisIdx, filter.value]);\n }\n function fallbackIdDeriver(originalId, axisFilters) {\n if (!axisFilters || axisFilters.length === 0)\n return originalId;\n const filtersToCanonicalize = [...axisFilters].sort((a, b) => a[0] - b[0]);\n return canonicalize({ id: originalId, axisFilters: filtersToCanonicalize });\n }\n /** Checks if a selector object uses any anchor properties */\n function hasAnchors(selector) {\n if (!selector || typeof selector !== 'object')\n return false;\n const potentialAnchored = selector;\n const domainHasAnchors = potentialAnchored['domain'] && typeof potentialAnchored['domain'] === 'object' && Object.values(potentialAnchored['domain']).some((v) => typeof v === 'object' && v !== null && 'anchor' in v);\n const axesHaveAnchors = potentialAnchored['axes'] && Array.isArray(potentialAnchored['axes']) && potentialAnchored['axes'].some((a) => typeof a === 'object' && a !== null && 'anchor' in a);\n return !!potentialAnchored['domainAnchor'] || domainHasAnchors || axesHaveAnchors;\n }\n /**\n * Derives the indices of axes marked for splitting based on the selector.\n * Throws an error if splitting is requested alongside `partialAxesMatch`.\n */\n function getSplitAxisIndices(selector) {\n if (typeof selector !== 'object' || !('axes' in selector) || selector.axes === undefined) {\n return []; // No axes specified or not an object selector, no splitting\n }\n const splitIndices = selector.axes\n .map((axis, index) => (typeof axis === 'object' && 'split' in axis && axis.split === true) ? index : -1)\n .filter((index) => index !== -1);\n if (splitIndices.length > 0 && selector.partialAxesMatch !== undefined) {\n throw new Error('Axis splitting is not supported when `partialAxesMatch` is defined.');\n }\n splitIndices.sort((a, b) => a - b);\n return splitIndices;\n }\n class PColumnCollection {\n defaultProviderStore = [];\n providers = [new ArrayColumnProvider(this.defaultProviderStore)];\n axisLabelProviders = [];\n constructor() { }\n addColumnProvider(provider) {\n this.providers.push(provider);\n return this;\n }\n addAxisLabelProvider(provider) {\n this.axisLabelProviders.push(provider);\n return this;\n }\n addColumns(columns) {\n this.defaultProviderStore.push(...columns);\n return this;\n }\n addColumn(column) {\n this.defaultProviderStore.push(column);\n return this;\n }\n /** Fetches labels for a given axis from the registered providers */\n findLabels(axis) {\n for (const provider of this.axisLabelProviders) {\n const labels = provider.findLabels(axis);\n if (labels)\n return labels; // First provider wins\n }\n return undefined;\n }\n getUniversalEntries(predicateOrSelectors, opts) {\n const { anchorCtx, labelOps: rawLabelOps, dontWaitAllData = false, overrideLabelAnnotation = false, exclude, enrichByLinkers = false } = opts ?? {};\n const labelOps = {\n ...(overrideLabelAnnotation && rawLabelOps?.includeNativeLabel !== false ? { includeNativeLabel: true } : {}),\n ...(rawLabelOps ?? {}),\n };\n let excludePredicate = () => false;\n if (exclude) {\n const excludePredicartes = (Array.isArray(exclude) ? exclude : [exclude])\n .map((selector) => {\n if (hasAnchors(selector)) {\n if (!anchorCtx)\n throw new Error('Anchored selectors in exclude require an AnchoredIdDeriver to be provided in options.');\n return selectorsToPredicate(resolveAnchors(anchorCtx.anchors, selector, opts));\n }\n else\n return selectorsToPredicate(selector);\n });\n excludePredicate = (spec) => excludePredicartes.some((predicate) => predicate(spec));\n }\n const selectorsArray = typeof predicateOrSelectors === 'function'\n ? [predicateOrSelectors]\n : Array.isArray(predicateOrSelectors)\n ? predicateOrSelectors\n : [predicateOrSelectors];\n const intermediateResults = [];\n const selectedNativeIds = new Set();\n for (const rawSelector of selectorsArray) {\n const usesAnchors = hasAnchors(rawSelector);\n let currentSelector;\n if (usesAnchors) {\n if (!anchorCtx)\n throw new Error('Anchored selectors require an AnchoredIdDeriver to be provided in options.');\n currentSelector = resolveAnchors(anchorCtx.anchors, rawSelector, opts);\n }\n else\n currentSelector = rawSelector;\n const selectedIds = new Set();\n const selectedColumns = [];\n for (const provider of this.providers) {\n const providerColumns = provider.selectColumns(currentSelector);\n for (const col of providerColumns) {\n if (excludePredicate(col.spec))\n continue;\n if (selectedIds.has(col.id))\n throw new Error(`Duplicate column id ${col.id} in provider ${provider.constructor.name}`);\n const nativeId = deriveNativeId(col.spec);\n if (selectedNativeIds.has(nativeId))\n continue;\n selectedIds.add(col.id);\n selectedNativeIds.add(nativeId);\n selectedColumns.push(col);\n }\n }\n if (selectedColumns.length === 0)\n continue;\n const splitAxisIdxs = getSplitAxisIndices(rawSelector);\n const needsSplitting = splitAxisIdxs.length > 0;\n for (const column of selectedColumns) {\n if (!isPColumnSpec(column.spec))\n continue;\n const originalSpec = column.spec;\n if (needsSplitting) {\n if (isPColumnValues(column.data))\n throw new Error(`Splitting is not supported for PColumns with PColumnValues data format. Column id: ${column.id}`);\n const dataEntries = convertOrParsePColumnData(column.data);\n if (!dataEntries) {\n if (dontWaitAllData)\n continue;\n return undefined;\n }\n if (!isPartitionedDataInfoEntries(dataEntries))\n throw new Error(`Splitting requires Partitioned DataInfoEntries, but parsing resulted in ${dataEntries.type} for column ${column.id}`);\n const uniqueKeys = getUniquePartitionKeys(dataEntries);\n const maxSplitIdx = splitAxisIdxs[splitAxisIdxs.length - 1];\n if (maxSplitIdx >= dataEntries.partitionKeyLength)\n throw new Error(`Not enough partition keys (${dataEntries.partitionKeyLength}) for requested split axes (max index ${maxSplitIdx}) in column ${originalSpec.name}`);\n const axesLabels = splitAxisIdxs\n .map((idx) => this.findLabels(getAxisId(originalSpec.axesSpec[idx])));\n const keyCombinations = [];\n const generateCombinations = (currentCombo, sAxisIdx) => {\n if (sAxisIdx >= splitAxisIdxs.length) {\n keyCombinations.push([...currentCombo]);\n if (keyCombinations.length > 10000)\n throw new Error('Too many key combinations, aborting.');\n return;\n }\n const axisIdx = splitAxisIdxs[sAxisIdx];\n if (axisIdx >= uniqueKeys.length)\n throw new Error(`Axis index ${axisIdx} out of bounds for unique keys array (length ${uniqueKeys.length}) during split key generation for column ${column.id}`);\n const axisValues = uniqueKeys[axisIdx];\n if (!axisValues || axisValues.length === 0) {\n keyCombinations.length = 0; // No combinations possible if one axis has no keys\n return;\n }\n for (const val of axisValues) {\n currentCombo.push(val);\n generateCombinations(currentCombo, sAxisIdx + 1);\n currentCombo.pop();\n }\n };\n generateCombinations([], 0);\n if (keyCombinations.length === 0)\n continue;\n const newAxesSpec = [...originalSpec.axesSpec];\n const splitAxisOriginalIdxs = splitAxisIdxs.map((idx) => idx); // Keep original indices for axisId lookup\n // Remove axes in reverse order to maintain correct indices during removal\n for (let i = splitAxisIdxs.length - 1; i >= 0; i--) {\n newAxesSpec.splice(splitAxisIdxs[i], 1);\n }\n const adjustedSpec = { ...originalSpec, axesSpec: newAxesSpec };\n for (const keyCombo of keyCombinations) {\n const splitFilters = keyCombo.map((value, sAxisIdx) => {\n const axisIdx = splitAxisOriginalIdxs[sAxisIdx]; // Use original index for lookup\n const axisId = getAxisId(originalSpec.axesSpec[axisIdx]);\n const axisLabelMap = axesLabels[sAxisIdx];\n const label = axisLabelMap?.[value] ?? String(value);\n return { axisIdx, axisId, value: value, label };\n });\n intermediateResults.push({\n type: 'split',\n originalColumn: column,\n spec: originalSpec,\n adjustedSpec,\n dataEntries,\n axisFilters: splitFilters,\n });\n }\n }\n else {\n intermediateResults.push({\n type: 'direct',\n originalColumn: column,\n spec: originalSpec,\n adjustedSpec: originalSpec,\n });\n }\n }\n }\n if (intermediateResults.length === 0)\n return [];\n const labeledResults = deriveLabels(intermediateResults, (entry) => ({\n spec: entry.spec,\n suffixTrace: entry.type === 'split' ? splitFiltersToTrace(entry.axisFilters) : undefined,\n }), labelOps);\n const result = [];\n for (const { value: entry, label } of labeledResults) {\n const { originalColumn, spec: originalSpec } = entry;\n const axisFilters = entry.type === 'split' ? entry.axisFilters : undefined;\n const axisFiltersTuple = splitFiltersToAxisFilter(axisFilters);\n let finalId;\n if (anchorCtx)\n finalId = anchorCtx.deriveS(originalSpec, axisFiltersTuple);\n else\n finalId = fallbackIdDeriver(originalColumn.id, axisFiltersTuple);\n let finalSpec = { ...entry.adjustedSpec };\n if (overrideLabelAnnotation) {\n finalSpec = {\n ...finalSpec,\n annotations: {\n ...(finalSpec.annotations ?? {}),\n [Annotation.Label]: label,\n },\n };\n }\n result.push({\n id: finalId,\n spec: finalSpec,\n data: () => entry.type === 'split'\n ? entriesToDataInfo(filterDataInfoEntries(entry.dataEntries, axisFiltersTuple))\n : entry.originalColumn.data,\n label: label,\n });\n }\n const ids = new Set(result.map((entry) => entry.id));\n if (enrichByLinkers && anchorCtx) {\n const linkers = result.filter((entry) => isLinkerColumn(entry.spec));\n if (linkers.length === 0) {\n return result;\n }\n const anchorAxes = Object.values(anchorCtx.anchors).flatMap((anchor) => anchor.axesSpec);\n const linkerMap = LinkerMap.fromColumns(linkers.map(getColumnIdAndSpec));\n // loose way of matching\n function matchAxisIdFn(linkerKeyId, sourceAxisId) {\n return matchAxisId(linkerKeyId, sourceAxisId) || matchAxisId(sourceAxisId, linkerKeyId);\n }\n // search all axes that can be reached by linkers from anchor axes; anchor axes are not in this list;\n const availableByLinkersAxes = linkerMap.getReachableByLinkersAxesFromAxes(anchorAxes, matchAxisIdFn);\n // search all columns that includes at least one of additional axes;\n const availableByLinkersColumns = this.getUniversalEntries((spec) => !isLinkerColumn(spec) && spec.axesSpec.some((columnAxisSpec) => {\n const columnAxisId = getAxisId(columnAxisSpec);\n return availableByLinkersAxes.some((axis) => matchAxisIdFn(getAxisId(axis), columnAxisId));\n }), { anchorCtx, labelOps, dontWaitAllData, overrideLabelAnnotation, exclude });\n if (availableByLinkersColumns) {\n result.push(...availableByLinkersColumns.filter((entry) => !ids.has(entry.id)));\n }\n }\n return result;\n }\n getColumns(predicateOrSelectors, opts) {\n const entries = this.getUniversalEntries(predicateOrSelectors, {\n overrideLabelAnnotation: true, // default for getColumns\n ...(opts ?? {}),\n });\n if (!entries)\n return undefined;\n const columns = [];\n for (const entry of entries) {\n const data = entry.data();\n if (!data) {\n if (opts?.dontWaitAllData)\n continue;\n return undefined;\n }\n columns.push({\n id: entry.id,\n spec: entry.spec,\n data,\n });\n }\n return columns;\n }\n }\n\n function patchInSetFilters(filters) {\n const inSetToOrEqual = (predicate) => {\n if (predicate.operator !== 'InSet')\n return predicate;\n return {\n operator: 'Or',\n operands: predicate.references.map((reference) => ({\n operator: 'Equal',\n reference,\n })),\n };\n };\n const mapSingleValuePredicate = (filter, cb) => {\n const operator = filter.operator;\n switch (operator) {\n case 'And':\n return {\n ...filter,\n operands: filter.operands.map((operand) => mapSingleValuePredicate(operand, cb)),\n };\n case 'Or':\n return {\n ...filter,\n operands: filter.operands.map((operand) => mapSingleValuePredicate(operand, cb)),\n };\n case 'Not':\n return {\n ...filter,\n operand: mapSingleValuePredicate(filter.operand, cb),\n };\n default:\n return cb(filter);\n }\n };\n const mapFilter = (filter, cb) => {\n return {\n ...filter,\n predicate: mapSingleValuePredicate(filter.predicate, cb),\n };\n };\n return filters.map((filter) => mapFilter(filter, inSetToOrEqual));\n }\n\n /**\n * Helper function to match domain objects\n * @param query Optional domain to match against\n * @param target Optional domain to match\n * @returns true if domains match, false otherwise\n */\n function matchDomain(query, target) {\n if (query === undefined)\n return target === undefined;\n if (target === undefined)\n return true;\n for (const k in target) {\n if (query[k] !== target[k])\n return false;\n }\n return true;\n }\n /**\n * Transforms PColumn data into the internal representation expected by the platform\n * @param data Data from a PColumn to transform\n * @returns Transformed data compatible with platform API\n */\n function transformPColumnData(data) {\n return mapPObjectData(data, (d) => {\n if (d instanceof TreeNodeAccessor) {\n return d.handle;\n }\n else if (isDataInfo(d)) {\n return mapDataInfo(d, (accessor) => accessor.handle);\n }\n else {\n return d;\n }\n });\n }\n class ResultPool {\n ctx = getCfgRenderCtx();\n /**\n * @deprecated use getOptions()\n */\n calculateOptions(predicate) {\n return this.ctx.calculateOptions(predicate);\n }\n getOptions(predicateOrSelector, opts) {\n const predicate = typeof predicateOrSelector === 'function'\n ? predicateOrSelector\n : selectorsToPredicate(predicateOrSelector);\n const filtered = this.getSpecs().entries.filter((s) => predicate(s.obj));\n let labelOps = {};\n let refsWithEnrichments = false;\n if (typeof opts !== 'undefined') {\n if (typeof opts === 'function') {\n labelOps = opts;\n }\n else if (typeof opts === 'object') {\n if ('includeNativeLabel' in opts || 'separator' in opts || 'addLabelAsSuffix' in opts) {\n labelOps = opts;\n }\n else {\n opts = opts;\n labelOps = opts.label ?? {};\n refsWithEnrichments = opts.refsWithEnrichments ?? false;\n }\n }\n }\n if (typeof labelOps === 'object')\n return deriveLabels(filtered, (o) => o.obj, labelOps ?? {}).map(({ value: { ref }, label }) => ({\n ref: withEnrichments(ref, refsWithEnrichments),\n label,\n }));\n else\n return filtered.map(({ ref, obj }) => ({\n ref: withEnrichments(ref, refsWithEnrichments),\n label: labelOps(obj, ref),\n }));\n }\n resolveAnchorCtx(anchorsOrCtx) {\n if (anchorsOrCtx instanceof AnchoredIdDeriver)\n return anchorsOrCtx;\n const resolvedAnchors = {};\n for (const [key, value] of Object.entries(anchorsOrCtx)) {\n if (isPlRef(value)) {\n const resolvedSpec = this.getPColumnSpecByRef(value);\n if (!resolvedSpec)\n return undefined;\n resolvedAnchors[key] = resolvedSpec;\n }\n else {\n resolvedAnchors[key] = value;\n }\n }\n return new AnchoredIdDeriver(resolvedAnchors);\n }\n /**\n * Returns columns that match the provided anchors and selectors. It applies axis filters and label derivation.\n *\n * @param anchorsOrCtx - Anchor context for column selection (same as in getCanonicalOptions)\n * @param predicateOrSelectors - Predicate or selectors for filtering columns (same as in getCanonicalOptions)\n * @param opts - Optional configuration for label generation and data waiting\n * @returns A PFrameHandle for the created PFrame, or undefined if any required data is missing\n */\n getAnchoredPColumns(anchorsOrCtx, predicateOrSelectors, opts) {\n const anchorCtx = this.resolveAnchorCtx(anchorsOrCtx);\n if (!anchorCtx)\n return undefined;\n return new PColumnCollection()\n .addColumnProvider(this)\n .addAxisLabelProvider(this)\n .getColumns(predicateOrSelectors, {\n ...opts,\n anchorCtx,\n });\n }\n /**\n * Calculates anchored identifier options for columns matching a given predicate and returns their\n * canonicalized representations.\n *\n * This function filters column specifications from the result pool that match the provided predicate,\n * creates a standardized AnchorCtx from the provided anchors, and generates a list of label-value\n * pairs for UI components (like dropdowns).\n *\n * @param anchorsOrCtx - Either:\n * - An existing AnchorCtx instance\n * - A record mapping anchor IDs to PColumnSpec objects\n * - A record mapping anchor IDs to PlRef objects (which will be resolved to PColumnSpec)\n * @param predicateOrSelectors - Either:\n * - A predicate function that takes a PColumnSpec and returns a boolean.\n * Only specs that return true will be included.\n * - An APColumnSelector object for declarative filtering, which will be\n * resolved against the provided anchors and matched using matchPColumn.\n * - An array of APColumnSelector objects - columns matching ANY selector\n * in the array will be included (OR operation).\n * @param opts - Optional configuration for label generation:\n * - labelOps: Optional configuration for label generation:\n * - includeNativeLabel: Whether to include native column labels\n * - separator: String to use between label parts (defaults to \" / \")\n * - addLabelAsSuffix: Whether to add labels as suffix instead of prefix\n * - dontWaitAllData: Whether to skip columns that don't have all data (if not set, will return undefined,\n * if at least one column that requires splitting is missing data)\n * @returns An array of objects with `label` (display text) and `value` (anchored ID string) properties,\n * or undefined if any PlRef resolution fails.\n */\n getCanonicalOptions(anchorsOrCtx, predicateOrSelectors, opts) {\n const anchorCtx = this.resolveAnchorCtx(anchorsOrCtx);\n if (!anchorCtx)\n return undefined;\n const entries = new PColumnCollection()\n .addColumnProvider(this)\n .addAxisLabelProvider(this)\n .getUniversalEntries(predicateOrSelectors, {\n ...opts,\n anchorCtx,\n });\n if (!entries)\n return undefined;\n return entries.map((item) => ({\n value: item.id,\n label: item.label,\n }));\n }\n /**\n * @deprecated use getData()\n */\n getDataFromResultPool() {\n return this.getData();\n }\n getData() {\n const result = this.ctx.getDataFromResultPool();\n return {\n isComplete: result.isComplete,\n entries: result.entries.map((e) => ({\n ref: e.ref,\n obj: {\n ...e.obj,\n data: new TreeNodeAccessor(e.obj.data, [e.ref.blockId, e.ref.name]),\n },\n })),\n };\n }\n /**\n * @deprecated use getDataWithErrors()\n */\n getDataWithErrorsFromResultPool() {\n return this.getDataWithErrors();\n }\n getDataWithErrors() {\n const result = this.ctx.getDataWithErrorsFromResultPool();\n return {\n isComplete: result.isComplete,\n entries: result.entries.map((e) => ({\n ref: e.ref,\n obj: {\n ...e.obj,\n data: mapValueInVOE(e.obj.data, (handle) => new TreeNodeAccessor(handle, [e.ref.blockId, e.ref.name])),\n },\n })),\n };\n }\n /**\n * @deprecated use getSpecs()\n */\n getSpecsFromResultPool() {\n return this.getSpecs();\n }\n getSpecs() {\n return this.ctx.getSpecsFromResultPool();\n }\n /**\n * @param ref a Ref\n * @returns data associated with the ref\n */\n getDataByRef(ref) {\n // @TODO remove after 1 Jan 2025; forward compatibility\n if (typeof this.ctx.getDataFromResultPoolByRef === 'undefined')\n return this.getData().entries.find((f) => f.ref.blockId === ref.blockId && f.ref.name === ref.name)?.obj;\n const data = this.ctx.getDataFromResultPoolByRef(ref.blockId, ref.name); // Keep original call\n // Need to handle undefined case before mapping\n if (!data)\n return undefined;\n return mapPObjectData(data, (handle) => new TreeNodeAccessor(handle, [ref.blockId, ref.name]));\n }\n /**\n * Returns data associated with the ref ensuring that it is a p-column.\n * @param ref a Ref\n * @returns p-column associated with the ref\n */\n getPColumnByRef(ref) {\n const data = this.getDataByRef(ref);\n if (!data)\n return undefined;\n return ensurePColumn(data);\n }\n /**\n * Returns spec associated with the ref ensuring that it is a p-column spec.\n * @param ref a Ref\n * @returns p-column spec associated with the ref\n */\n getPColumnSpecByRef(ref) {\n const spec = this.getSpecByRef(ref);\n if (!spec)\n return undefined;\n if (!isPColumnSpec(spec))\n throw new Error(`not a PColumn spec (kind = ${spec.kind})`);\n return spec;\n }\n /**\n * @param ref a Ref\n * @returns object spec associated with the ref\n */\n getSpecByRef(ref) {\n return this.ctx.getSpecFromResultPoolByRef(ref.blockId, ref.name);\n }\n /**\n * @param spec object specification\n * @returns array of data objects with compatible specs\n * @deprecated delete this method after Jan 1, 2025\n */\n findDataWithCompatibleSpec(spec) {\n const result = [];\n out: for (const data of this.getData().entries) {\n if (!isPColumnSpec(data.obj.spec)) {\n continue;\n }\n const oth = data.obj.spec;\n if (spec.name !== oth.name) {\n continue;\n }\n if (spec.valueType !== oth.valueType) {\n continue;\n }\n if (spec.axesSpec.length !== oth.axesSpec.length) {\n continue;\n }\n if (!matchDomain(spec.domain, oth.domain)) {\n continue;\n }\n for (let i = 0; i < spec.axesSpec.length; ++i) {\n const qAx = spec.axesSpec[i];\n const tAx = oth.axesSpec[i];\n if (qAx.name !== tAx.name) {\n continue out;\n }\n if (qAx.type !== tAx.type) {\n continue out;\n }\n if (!matchDomain(qAx.domain, tAx.domain)) {\n continue out;\n }\n }\n result.push(data.obj);\n }\n return result;\n }\n /**\n * Find labels data for a given axis id. It will search for a label column and return its data as a map.\n * @returns a map of axis value => label\n */\n findLabels(axis) {\n const dataPool = this.getData();\n for (const column of dataPool.entries) {\n if (!isPColumn(column.obj))\n continue;\n const spec = column.obj.spec;\n if (spec.name === PColumnName.Label\n && spec.axesSpec.length === 1\n && spec.axesSpec[0].name === axis.name\n && spec.axesSpec[0].type === axis.type\n && matchDomain(axis.domain, spec.axesSpec[0].domain)) {\n if (column.obj.data.resourceType.name !== 'PColumnData/Json') {\n throw Error(`Expected JSON column for labels, got: ${column.obj.data.resourceType.name}`);\n }\n const labels = Object.fromEntries(Object.entries(column.obj.data.getDataAsJson().data).map((e) => [JSON.parse(e[0])[0], e[1]]));\n return labels;\n }\n }\n return undefined;\n }\n /**\n * Selects columns based on the provided selectors, returning PColumn objects\n * with lazily loaded data.\n *\n * @param selectors - A predicate function, a single selector, or an array of selectors.\n * @returns An array of PColumn objects matching the selectors. Data is loaded on first access.\n */\n selectColumns(selectors) {\n const predicate = typeof selectors === 'function' ? selectors : selectorsToPredicate(selectors);\n const matchedSpecs = this.getSpecs().entries.filter(({ obj: spec }) => {\n if (!isPColumnSpec(spec))\n return false;\n return predicate(spec);\n });\n // Map specs to PColumn objects with lazy data loading\n return matchedSpecs.map(({ ref, obj: spec }) => {\n // Type assertion needed because filter ensures it's PColumnSpec\n const pcolumnSpec = spec;\n let _cachedData = null; // Use null to distinguish initial state from undefined result\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this; // Capture 'this' for use inside the getter\n return {\n id: canonicalize(ref),\n spec: pcolumnSpec,\n get data() {\n if (_cachedData !== null) {\n return _cachedData; // Return cached data (could be undefined if fetch failed)\n }\n _cachedData = self.getPColumnByRef(ref)?.data;\n return _cachedData;\n },\n }; // Cast needed because 'data' is a getter\n });\n }\n /**\n * Find labels data for a given axis id of a p-column.\n * @returns a map of axis value => label\n */\n findLabelsForColumnAxis(column, axisIdx) {\n const labels = this.findLabels(column.axesSpec[axisIdx]);\n if (!labels)\n return undefined;\n const axisKeys = readAnnotation(column, `pl7.app/axisKeys/${axisIdx}`);\n if (axisKeys !== undefined) {\n const keys = JSON.parse(axisKeys);\n return Object.fromEntries(keys.map((key) => {\n return [key, labels[key] ?? 'Unlabelled'];\n }));\n }\n else {\n return labels;\n }\n }\n }\n /** Main entry point to the API available within model lambdas (like outputs, sections, etc..) */\n class RenderCtxBase {\n ctx;\n constructor() {\n this.ctx = getCfgRenderCtx();\n }\n _dataCache;\n get data() {\n if (this._dataCache === undefined) {\n const raw = this.ctx.data;\n const value = typeof raw === 'function' ? raw() : raw;\n this._dataCache = { v: value ? JSON.parse(value) : {} };\n }\n return this._dataCache.v;\n }\n // lazy rendering because this feature is rarely used\n _activeArgsCache;\n /**\n * Returns args snapshot the block was executed for (i.e. when \"Run\" button was pressed).\n * Returns undefined, if block was never executed or stopped mid-way execution, so that the result was cleared.\n * */\n get activeArgs() {\n if (this._activeArgsCache === undefined) {\n const raw = this.ctx.activeArgs;\n const value = typeof raw === 'function' ? raw() : raw;\n this._activeArgsCache = {\n v: value ? JSON.parse(value) : undefined,\n };\n }\n return this._activeArgsCache.v;\n }\n // /** Can be used to determine features provided by the desktop instance. */\n // public get featureFlags() {\n // return this.ctx.featureFlags;\n // }\n getNamedAccessor(name) {\n return ifDef(this.ctx.getAccessorHandleByName(name), (accessor) => new TreeNodeAccessor(accessor, [name]));\n }\n get prerun() {\n return this.getNamedAccessor(StagingAccessorName);\n }\n get outputs() {\n return this.getNamedAccessor(MainAccessorName);\n }\n resultPool = new ResultPool();\n /**\n * Find labels data for a given axis id. It will search for a label column and return its data as a map.\n * @returns a map of axis value => label\n * @deprecated Use resultPool.findLabels instead\n */\n findLabels(axis) {\n return this.resultPool.findLabels(axis);\n }\n verifyInlineAndExplicitColumnsSupport(columns) {\n const hasInlineColumns = columns.some((c) => !(c.data instanceof TreeNodeAccessor) || isDataInfo(c.data)); // Updated check for DataInfo\n const inlineColumnsSupport = this.ctx.featureFlags?.inlineColumnsSupport === true;\n if (hasInlineColumns && !inlineColumnsSupport)\n throw Error(`Inline or explicit columns not supported`); // Combined check\n // Removed redundant explicitColumns check\n }\n patchPTableDef(def) {\n if (!this.ctx.featureFlags?.pTablePartitionFiltersSupport) {\n // For old desktop move all partition filters to filters field as it doesn't read partitionFilters field\n def = {\n ...def,\n partitionFilters: [],\n filters: [...def.partitionFilters, ...def.filters],\n };\n }\n if (!this.ctx.featureFlags?.pFrameInSetFilterSupport) {\n def = {\n ...def,\n partitionFilters: patchInSetFilters(def.partitionFilters),\n filters: patchInSetFilters(def.filters),\n };\n }\n return def;\n }\n // TODO remove all non-PColumn fields\n createPFrame(def) {\n this.verifyInlineAndExplicitColumnsSupport(def);\n if (!allPColumnsReady(def))\n return undefined;\n return this.ctx.createPFrame(def.map((c) => transformPColumnData(c)));\n }\n createPTable(def) {\n let rawDef;\n if ('columns' in def) {\n rawDef = this.patchPTableDef({\n src: {\n type: 'full',\n entries: def.columns.map((c) => ({ type: 'column', column: c })),\n },\n partitionFilters: def.filters ?? [],\n filters: [],\n sorting: def.sorting ?? [],\n });\n }\n else {\n rawDef = this.patchPTableDef(def);\n }\n const columns = extractAllColumns(rawDef.src);\n this.verifyInlineAndExplicitColumnsSupport(columns);\n if (!allPColumnsReady(columns))\n return undefined;\n return this.ctx.createPTable(mapPTableDef(rawDef, (po) => transformPColumnData(po)));\n }\n /** @deprecated scheduled for removal from SDK */\n getBlockLabel(blockId) {\n return this.ctx.getBlockLabel(blockId);\n }\n getCurrentUnstableMarker() {\n return this.ctx.getCurrentUnstableMarker();\n }\n logInfo(msg) {\n this.ctx.logInfo(msg);\n }\n logWarn(msg) {\n this.ctx.logWarn(msg);\n }\n logError(msg) {\n this.ctx.logError(msg);\n }\n }\n /** Render context for legacy v1/v2 blocks - provides backward compatibility */\n class RenderCtxLegacy extends RenderCtxBase {\n _argsCache;\n get args() {\n if (this._argsCache === undefined) {\n const raw = this.ctx.args;\n const value = typeof raw === 'function' ? raw() : raw;\n this._argsCache = { v: JSON.parse(value) };\n }\n return this._argsCache.v;\n }\n _uiStateCache;\n get uiState() {\n if (this._uiStateCache === undefined) {\n const raw = this.ctx.uiState;\n const value = typeof raw === 'function' ? raw() : raw;\n this._uiStateCache = { v: value ? JSON.parse(value) : {} };\n }\n return this._uiStateCache.v;\n }\n }\n\n var version = \"1.53.3\";\n\n const PlatformaSDKVersion = version;\n\n function isConfigLambda(cfgOrFh) {\n return cfgOrFh.__renderLambda === true;\n }\n\n function downgradeCfgOrLambda(data) {\n if (data === undefined)\n return undefined;\n if (isConfigLambda(data))\n return data.handle;\n return data;\n }\n\n /** Main entry point that each block should use in it's \"config\" module. Don't forget\n * to call {@link done()} at the end of configuration. Value returned by this builder must be\n * exported as constant with name \"platforma\" from the \"config\" module. */\n class BlockModel {\n config;\n constructor(config) {\n this.config = config;\n }\n static get INITIAL_BLOCK_FEATURE_FLAGS() {\n return {\n supportsLazyState: true,\n requiresUIAPIVersion: 1,\n requiresModelAPIVersion: 1,\n };\n }\n static create(renderingMode = 'Heavy') {\n return new BlockModel({\n renderingMode,\n initialUiState: {},\n outputs: {},\n inputsValid: getImmediate(true),\n sections: getImmediate([]),\n featureFlags: BlockModel.INITIAL_BLOCK_FEATURE_FLAGS,\n });\n }\n output(key, cfgOrRf, flags = {}) {\n if (typeof cfgOrRf === 'function') {\n const handle = `output#${key}`;\n tryRegisterCallback(handle, () => cfgOrRf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n outputs: {\n ...this.config.outputs,\n [key]: {\n __renderLambda: true,\n handle,\n ...flags,\n },\n },\n });\n }\n else {\n return new BlockModel({\n ...this.config,\n outputs: {\n ...this.config.outputs,\n [key]: cfgOrRf,\n },\n });\n }\n }\n /** Shortcut for {@link output} with retentive flag set to true. */\n retentiveOutput(key, rf) {\n return this.output(key, rf, { retentive: true });\n }\n /** Shortcut for {@link output} with withStatus flag set to true. */\n outputWithStatus(key, rf) {\n return this.output(key, rf, { withStatus: true });\n }\n /** Shortcut for {@link output} with retentive and withStatus flags set to true. */\n retentiveOutputWithStatus(key, rf) {\n return this.output(key, rf, { retentive: true, withStatus: true });\n }\n argsValid(cfgOrRf) {\n if (typeof cfgOrRf === 'function') {\n tryRegisterCallback('inputsValid', () => cfgOrRf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n inputsValid: {\n __renderLambda: true,\n handle: 'inputsValid',\n },\n });\n }\n else {\n return new BlockModel({\n ...this.config,\n inputsValid: cfgOrRf,\n });\n }\n }\n sections(arrOrCfgOrRf) {\n if (Array.isArray(arrOrCfgOrRf)) {\n return this.sections(getImmediate(arrOrCfgOrRf));\n }\n else if (typeof arrOrCfgOrRf === 'function') {\n tryRegisterCallback('sections', () => arrOrCfgOrRf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n sections: {\n __renderLambda: true,\n handle: 'sections',\n },\n });\n }\n else {\n return new BlockModel({\n ...this.config,\n sections: arrOrCfgOrRf,\n });\n }\n }\n /** Sets a rendering function to derive block title, shown for the block in the left blocks-overview panel. */\n title(rf) {\n tryRegisterCallback('title', () => rf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n title: {\n __renderLambda: true,\n handle: 'title',\n },\n });\n }\n subtitle(rf) {\n tryRegisterCallback('subtitle', () => rf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n subtitle: {\n __renderLambda: true,\n handle: 'subtitle',\n },\n });\n }\n tags(rf) {\n tryRegisterCallback('tags', () => rf(new RenderCtxLegacy()));\n return new BlockModel({\n ...this.config,\n tags: {\n __renderLambda: true,\n handle: 'tags',\n },\n });\n }\n /**\n * Sets initial args for the block, this value must be specified.\n * @deprecated use {@link withArgs}\n * */\n initialArgs(value) {\n return this.withArgs(value);\n }\n /** Sets initial args for the block, this value must be specified. */\n withArgs(initialArgs) {\n return new BlockModel({\n ...this.config,\n initialArgs,\n });\n }\n /** Defines type and sets initial value for block UiState. */\n withUiState(initialUiState) {\n return new BlockModel({\n ...this.config,\n initialUiState,\n });\n }\n /** Sets or overrides feature flags for the block. */\n withFeatureFlags(flags) {\n return new BlockModel({\n ...this.config,\n featureFlags: {\n ...this.config.featureFlags,\n ...flags,\n },\n });\n }\n /**\n * Defines how to derive list of upstream references this block is meant to enrich with its exports from block args.\n * Influences dependency graph construction.\n */\n enriches(lambda) {\n tryRegisterCallback('enrichmentTargets', lambda);\n return new BlockModel({\n ...this.config,\n enrichmentTargets: {\n __renderLambda: true,\n handle: 'enrichmentTargets',\n },\n });\n }\n /** Renders all provided block settings into a pre-configured platforma API\n * instance, that can be used in frontend to interact with block state, and\n * other features provided by the platforma to the block. */\n done(apiVersion = 1) {\n return this.withFeatureFlags({\n ...this.config.featureFlags,\n requiresUIAPIVersion: apiVersion,\n }).#done();\n }\n #done() {\n if (this.config.initialArgs === undefined)\n throw new Error('Initial arguments not set.');\n const config = {\n v4: undefined,\n v3: {\n configVersion: 3,\n modelAPIVersion: 1,\n sdkVersion: PlatformaSDKVersion,\n renderingMode: this.config.renderingMode,\n initialArgs: this.config.initialArgs,\n initialUiState: this.config.initialUiState,\n inputsValid: this.config.inputsValid,\n sections: this.config.sections,\n title: this.config.title,\n subtitle: this.config.subtitle,\n tags: this.config.tags,\n outputs: this.config.outputs,\n enrichmentTargets: this.config.enrichmentTargets,\n featureFlags: this.config.featureFlags,\n },\n // fields below are added to allow previous desktop versions read generated configs\n sdkVersion: PlatformaSDKVersion,\n renderingMode: this.config.renderingMode,\n initialArgs: this.config.initialArgs,\n inputsValid: downgradeCfgOrLambda(this.config.inputsValid),\n sections: downgradeCfgOrLambda(this.config.sections),\n outputs: Object.fromEntries(Object.entries(this.config.outputs).map(([key, value]) => [key, downgradeCfgOrLambda(value)])),\n };\n globalThis.platformaApiVersion = this.config.featureFlags.requiresUIAPIVersion;\n if (!isInUI())\n // we are in the configuration rendering routine, not in actual UI\n return { config };\n // normal operation inside the UI\n else\n return {\n ...getPlatformaInstance({ sdkVersion: PlatformaSDKVersion, apiVersion: platformaApiVersion }),\n blockModelInfo: {\n outputs: Object.fromEntries(Object.entries(this.config.outputs)\n .map(([key, value]) => [key, {\n withStatus: Boolean(isConfigLambda(value) && value.withStatus),\n }])),\n },\n };\n }\n }\n\n /**\n * BlockStorage VM Integration - Internal module for VM-based storage operations.\n *\n * This module auto-registers internal callbacks that the middle layer can invoke\n * to perform storage transformations. Block developers never interact with these\n * directly - they only see `state`.\n *\n * Registered callbacks (all prefixed with `__pl_` for internal SDK use):\n * - `__pl_storage_applyUpdate`: (currentStorageJson, payload) => updatedStorageJson\n * - `__pl_storage_debugView`: (rawStorage) => JSON string with storage debug view\n * - `__pl_storage_migrate`: (currentStorageJson) => MigrationResult\n * - `__pl_args_derive`: (storageJson) => ArgsDeriveResult\n * - `__pl_prerunArgs_derive`: (storageJson) => ArgsDeriveResult\n *\n * Callbacks registered by DataModel.registerCallbacks():\n * - `__pl_data_initial`: () => initial data\n * - `__pl_data_upgrade`: (versioned) => DataMigrationResult\n * - `__pl_storage_initial`: () => initial BlockStorage as JSON string\n *\n * @module block_storage_vm\n * @internal\n */\n /**\n * Normalizes raw storage data and extracts state.\n * Handles all formats:\n * - New BlockStorage format (has discriminator)\n * - Legacy V1/V2 format ({ args, uiState })\n * - Raw V3 state (any other format)\n *\n * @param rawStorage - Raw data from blockStorage field (may be JSON string or object)\n * @returns Object with normalized storage and extracted state\n */\n function normalizeStorage(rawStorage) {\n // Handle undefined/null\n if (rawStorage === undefined || rawStorage === null) {\n const storage = createBlockStorage({});\n return { storage, data: {} };\n }\n // Parse JSON string if needed\n let parsed = rawStorage;\n if (typeof rawStorage === 'string') {\n try {\n parsed = JSON.parse(rawStorage);\n }\n catch {\n // If parsing fails, treat string as the data\n const storage = createBlockStorage(rawStorage);\n return { storage, data: rawStorage };\n }\n }\n // Check for BlockStorage format (has discriminator)\n if (isBlockStorage(parsed)) {\n const storage = normalizeBlockStorage(parsed);\n return { storage, data: getStorageData(storage) };\n }\n // Check for legacy V1/V2 format: { args, uiState }\n if (isLegacyModelV1ApiFormat(parsed)) {\n // For legacy format, the whole object IS the data\n const storage = createBlockStorage(parsed);\n return { storage, data: parsed };\n }\n // Raw V3 data - wrap it\n const storage = createBlockStorage(parsed);\n return { storage, data: parsed };\n }\n /**\n * Applies a state update to existing storage.\n * Used when setData is called from the frontend.\n *\n * @param currentStorageJson - Current storage as JSON string (must be defined)\n * @param newData - New data from application\n * @returns Updated storage as JSON string\n */\n function applyStorageUpdate(currentStorageJson, payload) {\n const { storage: currentStorage } = normalizeStorage(currentStorageJson);\n // Update data while preserving other storage fields (version, plugins)\n const updatedStorage = updateStorageData(currentStorage, payload);\n return JSON.stringify(updatedStorage);\n }\n /**\n * Checks if data is in legacy Model API v1 format.\n * Legacy format has { args, uiState? } at top level without the BlockStorage discriminator.\n */\n function isLegacyModelV1ApiFormat(data) {\n if (data === null || typeof data !== 'object')\n return false;\n if (isBlockStorage(data))\n return false;\n const obj = data;\n return 'args' in obj;\n }\n // =============================================================================\n // Auto-register internal callbacks when module is loaded in VM\n // =============================================================================\n // Register apply update callback (requires existing storage)\n tryRegisterCallback('__pl_storage_applyUpdate', (currentStorageJson, payload) => {\n return applyStorageUpdate(currentStorageJson, payload);\n });\n /**\n * Gets storage debug view from raw storage data.\n * Returns structured debug info about the storage state.\n *\n * @param rawStorage - Raw data from blockStorage field (may be JSON string or object)\n * @returns JSON string with storage debug view\n */\n function getStorageDebugView(rawStorage) {\n const { storage } = normalizeStorage(rawStorage);\n const debugView = {\n dataVersion: storage.__dataVersion,\n data: storage.__data,\n };\n return stringifyJson(debugView);\n }\n // Register debug view callback\n tryRegisterCallback('__pl_storage_debugView', (rawStorage) => {\n return getStorageDebugView(rawStorage);\n });\n /**\n * Runs storage migration using the DataModel's migrate callback.\n * This is the main entry point for the middle layer to trigger migrations.\n *\n * Uses the '__pl_data_upgrade' callback registered by DataModel.registerCallbacks() which:\n * - Handles all migration logic internally\n * - Returns { version, data, warning? } - warning present if reset to initial data\n *\n * @param currentStorageJson - Current storage as JSON string (or undefined)\n * @returns MigrationResult\n */\n function migrateStorage(currentStorageJson) {\n // Get the callback registry context\n const ctx = tryGetCfgRenderCtx();\n if (ctx === undefined) {\n return { error: 'Not in config rendering context' };\n }\n // Normalize storage to get current data and version\n const { storage: currentStorage, data: currentData } = normalizeStorage(currentStorageJson);\n const currentVersion = currentStorage.__dataVersion;\n // Helper to create storage with given data and version\n const createStorageJson = (data, version) => {\n return JSON.stringify({\n ...currentStorage,\n __dataVersion: version,\n __data: data,\n });\n };\n // Get the migrate callback (registered by DataModel.registerCallbacks())\n const migrateCallback = ctx.callbackRegistry['__pl_data_upgrade'];\n if (typeof migrateCallback !== 'function') {\n return { error: '__pl_data_upgrade callback not found (DataModel not registered)' };\n }\n // Call the migrator's migrate function\n let result;\n try {\n result = migrateCallback({ version: currentVersion, data: currentData });\n }\n catch (e) {\n const errorMsg = e instanceof Error ? e.message : String(e);\n return { error: `migrate() threw: ${errorMsg}` };\n }\n // Build info message\n const info = result.version === currentVersion\n ? `No migration needed (${currentVersion})`\n : result.warning\n ? `Reset to initial data (${result.version})`\n : `Migrated ${currentVersion}→${result.version}`;\n return {\n newStorageJson: createStorageJson(result.data, result.version),\n info,\n warn: result.warning,\n };\n }\n // Register migrate callback\n tryRegisterCallback('__pl_storage_migrate', (currentStorageJson) => {\n return migrateStorage(currentStorageJson);\n });\n /**\n * Derives args from storage using the registered 'args' callback.\n * This extracts data from storage and passes it to the block's args() function.\n *\n * @param storageJson - Storage as JSON string\n * @returns ArgsDeriveResult with derived args or error\n */\n function deriveArgsFromStorage(storageJson) {\n const ctx = tryGetCfgRenderCtx();\n if (ctx === undefined) {\n return { error: 'Not in config rendering context' };\n }\n // Extract data from storage\n const { data } = normalizeStorage(storageJson);\n // Get the args callback (registered by BlockModelV3.args())\n const argsCallback = ctx.callbackRegistry['args'];\n if (typeof argsCallback !== 'function') {\n return { error: 'args callback not found' };\n }\n // Call the args callback with extracted data\n try {\n const result = argsCallback(data);\n return { value: result };\n }\n catch (e) {\n const errorMsg = e instanceof Error ? e.message : String(e);\n return { error: `args() threw: ${errorMsg}` };\n }\n }\n // Register args derivation callback\n tryRegisterCallback('__pl_args_derive', (storageJson) => {\n return deriveArgsFromStorage(storageJson);\n });\n /**\n * Derives prerunArgs from storage using the registered 'prerunArgs' callback.\n * Falls back to 'args' callback if 'prerunArgs' is not defined.\n *\n * @param storageJson - Storage as JSON string\n * @returns ArgsDeriveResult with derived prerunArgs or error\n */\n function derivePrerunArgsFromStorage(storageJson) {\n const ctx = tryGetCfgRenderCtx();\n if (ctx === undefined) {\n return { error: 'Not in config rendering context' };\n }\n // Extract data from storage\n const { data } = normalizeStorage(storageJson);\n // Try prerunArgs callback first\n const prerunArgsCallback = ctx.callbackRegistry['prerunArgs'];\n if (typeof prerunArgsCallback === 'function') {\n try {\n const result = prerunArgsCallback(data);\n return { value: result };\n }\n catch (e) {\n const errorMsg = e instanceof Error ? e.message : String(e);\n return { error: `prerunArgs() threw: ${errorMsg}` };\n }\n }\n // Fall back to args callback\n const argsCallback = ctx.callbackRegistry['args'];\n if (typeof argsCallback !== 'function') {\n return { error: 'args callback not found (fallback from missing prerunArgs)' };\n }\n try {\n const result = argsCallback(data);\n return { value: result };\n }\n catch (e) {\n const errorMsg = e instanceof Error ? e.message : String(e);\n return { error: `args() threw (fallback): ${errorMsg}` };\n }\n }\n // Register prerunArgs derivation callback\n tryRegisterCallback('__pl_prerunArgs_derive', (storageJson) => {\n return derivePrerunArgsFromStorage(storageJson);\n });\n\n function getAllRelatedColumns(ctx, predicate) {\n // if current block doesn't produce own columns then use all columns from result pool\n const columns = new PColumnCollection();\n columns.addColumnProvider(ctx.resultPool);\n const allColumns = columns.getUniversalEntries(predicate, { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? [];\n const allAxes = new Map(allColumns\n .flatMap((column) => getNormalizedAxesList(column.spec.axesSpec))\n .map((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return [canonicalizeJson(axisId), axisSpec];\n }));\n // additional columns are duplicates with extra fields in domains for compatibility if there are ones with partial match\n const extendedColumns = enrichCompatible(allAxes, allColumns);\n return extendedColumns;\n }\n function getRelatedColumns(ctx, { columns: rootColumns, predicate }) {\n // if current block has its own columns then take from result pool only compatible with them\n const columns = new PColumnCollection();\n columns.addColumnProvider(ctx.resultPool);\n columns.addColumns(rootColumns);\n // all possible axes from block columns\n const blockAxes = new Map();\n // axes from block columns and compatible result pool columns\n const allAxes = new Map();\n for (const c of rootColumns) {\n for (const spec of getNormalizedAxesList(c.spec.axesSpec)) {\n const aid = getAxisId(spec);\n blockAxes.set(canonicalizeJson(aid), spec);\n allAxes.set(canonicalizeJson(aid), spec);\n }\n }\n // all linker columns always go to pFrame - even it's impossible to use some of them they all are hidden\n const linkerColumns = columns.getUniversalEntries((spec) => predicate(spec) && isLinkerColumn(spec)) ?? [];\n const availableWithLinkersAxes = getAvailableWithLinkersAxes(linkerColumns, blockAxes);\n // all possible axes from connected linkers\n for (const item of availableWithLinkersAxes) {\n blockAxes.set(...item);\n allAxes.set(...item);\n }\n const blockAxesArr = Array.from(blockAxes.values());\n // all compatible with block columns but without label columns\n let compatibleWithoutLabels = (columns.getUniversalEntries((spec) => predicate(spec) && spec.axesSpec.some((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return blockAxesArr.some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => !isLabelColumn(column.spec));\n // extend axes set for label columns request\n for (const c of compatibleWithoutLabels) {\n for (const spec of getNormalizedAxesList(c.spec.axesSpec)) {\n const aid = getAxisId(spec);\n allAxes.set(canonicalizeJson(aid), spec);\n }\n }\n const allAxesArr = Array.from(allAxes.values());\n // extend allowed columns - add columns thad doesn't have axes from block, but have all axes in 'allAxes' list (that means all axes from linkers or from 'hanging' of other selected columns)\n compatibleWithoutLabels = (columns.getUniversalEntries((spec) => predicate(spec) && spec.axesSpec.every((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return allAxesArr.some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => !isLabelColumn(column.spec));\n // label columns must be compatible with full set of axes - block axes and axes from compatible columns from result pool\n const compatibleLabels = (columns.getUniversalEntries((spec) => predicate(spec) && spec.axesSpec.some((axisSpec) => {\n const axisId = getAxisId(axisSpec);\n return allAxesArr.some((selectorAxisSpec) => matchAxisId(getAxisId(selectorAxisSpec), axisId));\n }), { dontWaitAllData: true, overrideLabelAnnotation: false }) ?? []).filter((column) => isLabelColumn(column.spec));\n const compatible = [...compatibleWithoutLabels, ...compatibleLabels];\n // additional columns are duplicates with extra fields in domains for compatibility if there are ones with partial match\n const extendedColumns = enrichCompatible(blockAxes, compatible);\n return extendedColumns;\n }\n\n /** Create id for column copy with added keys in axes domains */\n const colId = (id, domains) => {\n let wid = id.toString();\n domains?.forEach((domain) => {\n if (domain) {\n for (const [k, v] of Object.entries(domain)) {\n wid += k;\n wid += v;\n }\n }\n });\n return wid;\n };\n /** All combinations with 1 key from each list */\n function getKeysCombinations(idsLists) {\n if (!idsLists.length) {\n return [];\n }\n let result = [[]];\n idsLists.forEach((list) => {\n const nextResult = [];\n list.forEach((key) => {\n nextResult.push(...result.map((resultItem) => [...resultItem, key]));\n });\n result = nextResult;\n });\n return result;\n }\n function isHiddenFromGraphColumn(column) {\n return !!readAnnotationJson(column, Annotation.HideDataFromGraphs);\n }\n function isHiddenFromUIColumn(column) {\n return !!readAnnotationJson(column, Annotation.HideDataFromUi);\n }\n function getAvailableWithLinkersAxes(linkerColumns, blockAxes) {\n const linkerMap = LinkerMap.fromColumns(linkerColumns.map(getColumnIdAndSpec));\n const availableAxes = linkerMap.getReachableByLinkersAxesFromAxesNormalized([...blockAxes.values()], (linkerKeyId, sourceAxisId) => matchAxisId(sourceAxisId, linkerKeyId));\n return new Map(availableAxes.map((axisSpec) => {\n const id = getAxisId(axisSpec);\n return [canonicalizeJson(id), axisSpec];\n }));\n }\n /** Add columns with fully compatible axes created from partial compatible ones */\n function enrichCompatible(blockAxes, columns) {\n return columns.flatMap((column) => getAdditionalColumnsForColumn(blockAxes, column));\n }\n function getAdditionalColumnsForColumn(blockAxes, column) {\n const columnAxesIds = column.spec.axesSpec.map(getAxisId);\n if (columnAxesIds.every((id) => blockAxes.has(canonicalizeJson(id)))) {\n return [column]; // the column is compatible with its own domains without modifications\n }\n // options with different possible domains for every axis of secondary column\n const secondaryIdsOptions = columnAxesIds.map((id) => {\n const result = [];\n for (const [_, mainId] of blockAxes) {\n if (matchAxisId(mainId, id) && !matchAxisId(id, mainId)) {\n result.push(mainId);\n }\n }\n return result;\n });\n // all possible combinations of axes with added domains\n const secondaryIdsVariants = getKeysCombinations(secondaryIdsOptions);\n // sets of added to column domain fields\n const allAddedDomainValues = new Set();\n const addedNotToAllVariantsDomainValues = new Set();\n const addedByVariantsDomainValues = secondaryIdsVariants.map((idsList) => {\n const addedSet = new Set();\n idsList.map((axisId, idx) => {\n const d1 = column.spec.axesSpec[idx].domain;\n const d2 = axisId.domain;\n Object.entries(d2 ?? {}).forEach(([key, value]) => {\n if (d1?.[key] === undefined) {\n const item = JSON.stringify([key, value]);\n addedSet.add(item);\n allAddedDomainValues.add(item);\n }\n });\n return ({\n ...axisId,\n annotations: column.spec.axesSpec[idx].annotations,\n });\n });\n return addedSet;\n });\n [...allAddedDomainValues].forEach((addedPart) => {\n if (addedByVariantsDomainValues.some((s) => !s.has(addedPart))) {\n addedNotToAllVariantsDomainValues.add(addedPart);\n }\n });\n const additionalColumns = secondaryIdsVariants.map((idsList, idx) => {\n const id = colId(column.id, idsList.map((id) => id.domain));\n const label = readAnnotation(column.spec, Annotation.Label) ?? '';\n const labelDomainPart = ([...addedByVariantsDomainValues[idx]])\n .filter((str) => addedNotToAllVariantsDomainValues.has(str))\n .sort()\n .map((v) => JSON.parse(v)?.[1]) // use in labels only domain values, but sort them by key to save the same order in all column variants\n .join(' / ');\n const annotations = {\n ...column.spec.annotations,\n [Annotation.Graph.IsVirtual]: stringifyJson(true),\n };\n if (label || labelDomainPart) {\n annotations[Annotation.Label] = label && labelDomainPart ? label + ' / ' + labelDomainPart : label + labelDomainPart;\n }\n return {\n ...column,\n id: id,\n spec: {\n ...column.spec,\n axesSpec: idsList.map((axisId, idx) => ({\n ...axisId,\n annotations: column.spec.axesSpec[idx].annotations,\n })),\n annotations,\n },\n };\n });\n return [column, ...additionalColumns];\n }\n /**\n The aim of createPFrameForGraphs: to create pframe with block’s columns and all compatible columns from result pool\n (including linker columns and all label columns).\n Block’s columns are added to pframe as is.\n Other columns are added basing on set of axes of block’s columns, considering available with linker columns.\n Compatible columns must have at least one axis from block’s axes set. This axis of the compatible column from\n result pool must satisfy matchAxisId (it can have less domain keys than in block’s axis, but without conflicting values\n among existing ones).\n In requests to pframe (calculateTableData) columns must have strictly the same axes. For compatibility in case\n of partially matched axis we add to pframe a copy of this column with modified axis (with filled missed domains)\n and modified label (with added domain values in case if more than one copy with different domains exist).\n */\n function createPFrameForGraphs(ctx, blockColumns) {\n const suitableSpec = (spec) => !isHiddenFromUIColumn(spec) && !isHiddenFromGraphColumn(spec);\n // if current block doesn't produce own columns then use all columns from result pool\n if (!blockColumns) {\n return ctx.createPFrame(getAllRelatedColumns(ctx, suitableSpec));\n }\n return ctx.createPFrame(getRelatedColumns(ctx, { columns: blockColumns, predicate: suitableSpec }));\n }\n\n var stringify = {exports: {}};\n\n var hasRequiredStringify;\n\n function requireStringify () {\n \tif (hasRequiredStringify) return stringify.exports;\n \thasRequiredStringify = 1;\n \t(function (module, exports$1) {\n \t\texports$1 = module.exports = stringify;\n \t\texports$1.getSerialize = serializer;\n\n \t\tfunction stringify(obj, replacer, spaces, cycleReplacer) {\n \t\t return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces)\n \t\t}\n\n \t\tfunction serializer(replacer, cycleReplacer) {\n \t\t var stack = [], keys = [];\n\n \t\t if (cycleReplacer == null) cycleReplacer = function(key, value) {\n \t\t if (stack[0] === value) return \"[Circular ~]\"\n \t\t return \"[Circular ~.\" + keys.slice(0, stack.indexOf(value)).join(\".\") + \"]\"\n \t\t };\n\n \t\t return function(key, value) {\n \t\t if (stack.length > 0) {\n \t\t var thisPos = stack.indexOf(this);\n \t\t ~thisPos ? stack.splice(thisPos + 1) : stack.push(this);\n \t\t ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key);\n \t\t if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value);\n \t\t }\n \t\t else stack.push(value);\n\n \t\t return replacer == null ? value : replacer.call(this, key, value)\n \t\t }\n \t\t} \n \t} (stringify, stringify.exports));\n \treturn stringify.exports;\n }\n\n requireStringify();\n\n // We want to define StandardErrorLike and PlErrorLike, it's a way to define recursive types in zod.\n // https://zod.dev/?id=recursive-types\n // We need zod to parse error strings into these objects for keeping new UI and old blocks compatible.\n const BasePlErrorLike = z.object({\n type: z.literal('PlError'),\n name: z.string(),\n message: z.string(),\n /** The message with all details needed for SDK developers. */\n fullMessage: z.string().optional(),\n stack: z.string().optional(),\n });\n const PlErrorLike = BasePlErrorLike.extend({\n cause: z.lazy(() => ErrorLike).optional(),\n errors: z.lazy(() => ErrorLike.array()).optional(),\n });\n const BaseStandardErrorLike = z.object({\n type: z.literal('StandardError'),\n name: z.string(),\n message: z.string(),\n stack: z.string().optional(),\n });\n const StandardErrorLike = BaseStandardErrorLike.extend({\n cause: z.lazy(() => ErrorLike).optional(),\n errors: z.lazy(() => ErrorLike.array()).optional(),\n });\n const ErrorLike = z.union([StandardErrorLike, PlErrorLike]);\n // We want to define ErrorShape schema just to parse it above, it's a way to define recursive types in zod.\n // https://zod.dev/?id=recursive-types\n const baseErrorShape = z.object({\n name: z.string(),\n message: z.string(),\n fullMessage: z.string().optional(),\n stack: z.string().optional(),\n });\n const ErrorShape = baseErrorShape.extend({\n cause: z.lazy(() => ErrorShape).optional(),\n errors: z.lazy(() => ErrorShape.array()).optional(),\n });\n\n function getDefaultBlockLabel(data) {\n const parts = [];\n // Add dataset name\n if (data.datasetLabel) {\n parts.push(data.datasetLabel);\n }\n // Add allele/gene\n parts.push(data.allele ? 'Allele' : 'Gene');\n // Add chain info for single-cell datasets\n if (data.isSingleCell && data.chainLabel) {\n parts.push(data.chainLabel);\n }\n return parts.join(' - ');\n }\n\n const model = BlockModel.create()\n .withArgs({\n defaultBlockLabel: getDefaultBlockLabel({\n allele: false,\n isSingleCell: false,\n }),\n customBlockLabel: '',\n scChain: 'A',\n allele: false,\n })\n .withUiState({\n weightedFlag: true,\n vUsagePlotState: {\n title: 'V Usage',\n template: 'heatmapClustered',\n currentTab: 'settings',\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null,\n },\n },\n },\n jUsagePlotState: {\n title: 'J Usage',\n template: 'heatmapClustered',\n currentTab: null,\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null,\n },\n },\n },\n vjUsagePlotState: {\n title: 'V/J Usage',\n template: 'heatmapClustered',\n currentTab: null,\n layersSettings: {\n heatmapClustered: {\n normalizationDirection: null,\n },\n },\n },\n })\n .argsValid((ctx) => ctx.args.datasetRef !== undefined)\n .output('datasetOptions', (ctx) => ctx.resultPool.getOptions([{\n axes: [\n { name: 'pl7.app/sampleId' },\n { name: 'pl7.app/vdj/clonotypeKey' },\n ],\n annotations: { 'pl7.app/isAnchor': 'true' },\n }, {\n axes: [\n { name: 'pl7.app/sampleId' },\n { name: 'pl7.app/vdj/scClonotypeKey' },\n ],\n annotations: { 'pl7.app/isAnchor': 'true' },\n }], {\n // suppress native label of the column (e.g. \"Number of Reads\") to show only the dataset label\n label: { includeNativeLabel: false },\n }))\n .output('datasetSpec', (ctx) => {\n if (ctx.args.datasetRef === undefined) {\n return undefined;\n }\n return ctx.resultPool.getPColumnSpecByRef(ctx.args.datasetRef);\n })\n .outputWithStatus('pf', (ctx) => {\n const pCols = ctx.outputs?.resolve('pf')?.getPColumns();\n if (pCols === undefined) {\n return undefined;\n }\n return createPFrameForGraphs(ctx, pCols);\n })\n .output('isRunning', (ctx) => ctx.outputs?.getIsReadyOrError() === false)\n .title(() => 'V/J Usage')\n .subtitle((ctx) => ctx.args.customBlockLabel || ctx.args.defaultBlockLabel)\n .sections((_) => [\n { type: 'link', href: '/', label: 'V Gene Usage' },\n { type: 'link', href: '/jUsage', label: 'J Gene Usage' },\n { type: 'link', href: '/vjUsage', label: 'V/J Gene Usage' },\n ])\n .done(2);\n\n exports.getDefaultBlockLabel = getDefaultBlockLabel;\n exports.model = model;\n\n}));\n//# sourceMappingURL=bundle.js.map\n"}}
|