@weavelogic/knowledge-graph-agent 0.7.3 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/README.md +4 -0
  2. package/dist/_virtual/__vite-browser-external.js +2 -2
  3. package/dist/_virtual/__vite-browser-external.js.map +1 -1
  4. package/dist/_virtual/browser.js +2 -3
  5. package/dist/_virtual/browser.js.map +1 -1
  6. package/dist/cli/commands/hive-mind/add-frontmatter.js +2 -2
  7. package/dist/cli/commands/hive-mind/add-frontmatter.js.map +1 -1
  8. package/dist/cli/commands/hive-mind/analyze-links.js +2 -2
  9. package/dist/cli/commands/hive-mind/analyze-links.js.map +1 -1
  10. package/dist/cli/commands/hive-mind/find-connections.js +2 -2
  11. package/dist/cli/commands/hive-mind/find-connections.js.map +1 -1
  12. package/dist/cli/commands/hive-mind/validate-names.js +2 -2
  13. package/dist/cli/commands/hive-mind/validate-names.js.map +1 -1
  14. package/dist/graphql/server.js +2 -2
  15. package/dist/graphql/server.js.map +1 -1
  16. package/dist/mcp-server/tools/audit/index.d.ts +4 -0
  17. package/dist/mcp-server/tools/audit/index.d.ts.map +1 -1
  18. package/dist/node_modules/@typescript-eslint/project-service/dist/index.js +1 -1
  19. package/dist/node_modules/@typescript-eslint/types/dist/index.js +1 -1
  20. package/dist/node_modules/@typescript-eslint/visitor-keys/dist/index.js +1 -1
  21. package/dist/node_modules/debug/src/browser.js +1 -1
  22. package/dist/node_modules/fdir/dist/index.js +13 -13
  23. package/dist/node_modules/fdir/dist/index.js.map +1 -1
  24. package/dist/node_modules/tinyglobby/dist/index.js +14 -14
  25. package/dist/node_modules/tinyglobby/dist/index.js.map +1 -1
  26. package/dist/node_modules/typescript/lib/typescript.js +24 -24
  27. package/dist/node_modules/typescript/lib/typescript.js.map +1 -1
  28. package/dist/vector/services/embedding-service.js +1 -7
  29. package/dist/vector/services/embedding-service.js.map +1 -1
  30. package/package.json +3 -2
  31. package/dist/_virtual/browser2.js +0 -5
  32. package/dist/_virtual/browser2.js.map +0 -1
  33. package/dist/_virtual/index12.js +0 -7
  34. package/dist/_virtual/index12.js.map +0 -1
  35. package/dist/_virtual/ort-web.min.js +0 -8
  36. package/dist/_virtual/ort-web.min.js.map +0 -1
  37. package/dist/_virtual/ort-web.min2.js +0 -5
  38. package/dist/_virtual/ort-web.min2.js.map +0 -1
  39. package/dist/node_modules/@huggingface/jinja/dist/index.js +0 -118
  40. package/dist/node_modules/@huggingface/jinja/dist/index.js.map +0 -1
  41. package/dist/node_modules/@xenova/transformers/src/backends/onnx.js +0 -24
  42. package/dist/node_modules/@xenova/transformers/src/backends/onnx.js.map +0 -1
  43. package/dist/node_modules/@xenova/transformers/src/configs.js +0 -52
  44. package/dist/node_modules/@xenova/transformers/src/configs.js.map +0 -1
  45. package/dist/node_modules/@xenova/transformers/src/env.js +0 -35
  46. package/dist/node_modules/@xenova/transformers/src/env.js.map +0 -1
  47. package/dist/node_modules/@xenova/transformers/src/models.js +0 -3852
  48. package/dist/node_modules/@xenova/transformers/src/models.js.map +0 -1
  49. package/dist/node_modules/@xenova/transformers/src/tokenizers.js +0 -144
  50. package/dist/node_modules/@xenova/transformers/src/tokenizers.js.map +0 -1
  51. package/dist/node_modules/@xenova/transformers/src/utils/core.js +0 -52
  52. package/dist/node_modules/@xenova/transformers/src/utils/core.js.map +0 -1
  53. package/dist/node_modules/@xenova/transformers/src/utils/generation.js +0 -623
  54. package/dist/node_modules/@xenova/transformers/src/utils/generation.js.map +0 -1
  55. package/dist/node_modules/@xenova/transformers/src/utils/hub.js +0 -395
  56. package/dist/node_modules/@xenova/transformers/src/utils/hub.js.map +0 -1
  57. package/dist/node_modules/@xenova/transformers/src/utils/image.js +0 -12
  58. package/dist/node_modules/@xenova/transformers/src/utils/image.js.map +0 -1
  59. package/dist/node_modules/@xenova/transformers/src/utils/maths.js +0 -89
  60. package/dist/node_modules/@xenova/transformers/src/utils/maths.js.map +0 -1
  61. package/dist/node_modules/@xenova/transformers/src/utils/tensor.js +0 -750
  62. package/dist/node_modules/@xenova/transformers/src/utils/tensor.js.map +0 -1
  63. package/dist/node_modules/onnxruntime-common/dist/lib/backend-impl.js +0 -67
  64. package/dist/node_modules/onnxruntime-common/dist/lib/backend-impl.js.map +0 -1
  65. package/dist/node_modules/onnxruntime-common/dist/lib/env-impl.js +0 -24
  66. package/dist/node_modules/onnxruntime-common/dist/lib/env-impl.js.map +0 -1
  67. package/dist/node_modules/onnxruntime-common/dist/lib/env.js +0 -6
  68. package/dist/node_modules/onnxruntime-common/dist/lib/env.js.map +0 -1
  69. package/dist/node_modules/onnxruntime-common/dist/lib/index.js +0 -11
  70. package/dist/node_modules/onnxruntime-common/dist/lib/index.js.map +0 -1
  71. package/dist/node_modules/onnxruntime-common/dist/lib/inference-session-impl.js +0 -162
  72. package/dist/node_modules/onnxruntime-common/dist/lib/inference-session-impl.js.map +0 -1
  73. package/dist/node_modules/onnxruntime-common/dist/lib/inference-session.js +0 -6
  74. package/dist/node_modules/onnxruntime-common/dist/lib/inference-session.js.map +0 -1
  75. package/dist/node_modules/onnxruntime-common/dist/lib/tensor-impl.js +0 -393
  76. package/dist/node_modules/onnxruntime-common/dist/lib/tensor-impl.js.map +0 -1
  77. package/dist/node_modules/onnxruntime-common/dist/lib/tensor.js +0 -6
  78. package/dist/node_modules/onnxruntime-common/dist/lib/tensor.js.map +0 -1
  79. package/dist/node_modules/onnxruntime-web/dist/ort-web.min.js +0 -12919
  80. package/dist/node_modules/onnxruntime-web/dist/ort-web.min.js.map +0 -1
  81. package/dist/node_modules/ws/browser.js +0 -16
  82. package/dist/node_modules/ws/browser.js.map +0 -1
@@ -1 +0,0 @@
1
- {"version":3,"file":"tensor.js","sources":["../../../../../../node_modules/@xenova/transformers/src/utils/tensor.js"],"sourcesContent":["/**\n * @file Helper module for `Tensor` processing.\n * \n * These functions and classes are only used internally, \n * meaning an end-user shouldn't need to access anything here.\n * \n * @module utils/tensor\n */\n\nimport { ONNX } from '../backends/onnx.js';\n\nimport {\n interpolate_data,\n permute_data\n} from './maths.js';\n\n\nconst DataTypeMap = Object.freeze({\n float32: Float32Array,\n float64: Float64Array,\n string: Array, // string[]\n int8: Int8Array,\n uint8: Uint8Array,\n int16: Int16Array,\n uint16: Uint16Array,\n int32: Int32Array,\n uint32: Uint32Array,\n int64: BigInt64Array,\n uint64: BigUint64Array,\n bool: Uint8Array,\n});\n\n/**\n * @typedef {keyof typeof DataTypeMap} DataType\n * @typedef {import('./maths.js').AnyTypedArray | any[]} DataArray\n */\n\nconst ONNXTensor = ONNX.Tensor;\n\nexport class Tensor {\n /** @type {number[]} Dimensions of the tensor. */\n dims;\n\n /** @type {DataType} Type of the tensor. */\n type;\n\n /** @type {DataArray} The data stored in the tensor. */\n data;\n\n /** @type {number} The number of elements in the tensor. */\n size;\n\n /**\n * Create a new Tensor or copy an existing Tensor.\n * @param {[DataType, DataArray, number[]]|[import('onnxruntime-common').Tensor]} args\n */\n constructor(...args) {\n if (args[0] instanceof ONNXTensor) {\n // Create shallow copy\n Object.assign(this, args[0]);\n\n } else {\n // Create new tensor\n Object.assign(this, new ONNXTensor(\n /** @type {DataType} */(args[0]),\n /** @type {Exclude<import('./maths.js').AnyTypedArray, Uint8ClampedArray>} */(args[1]),\n args[2]\n ));\n }\n\n return new Proxy(this, {\n get: (obj, key) => {\n if (typeof key === 'string') {\n let index = Number(key);\n if (Number.isInteger(index)) {\n // key is an integer (i.e., index)\n return obj._getitem(index);\n }\n }\n // @ts-ignore\n return obj[key];\n },\n set: (obj, key, value) => {\n // TODO allow setting of data\n\n // @ts-ignore\n return obj[key] = value;\n }\n });\n }\n\n /**\n * Returns an iterator object for iterating over the tensor data in row-major order.\n * If the tensor has more than one dimension, the iterator will yield subarrays.\n * @returns {Iterator} An iterator object for iterating over the tensor data in row-major order.\n */\n *[Symbol.iterator]() {\n const [iterLength, ...iterDims] = this.dims;\n\n if (iterDims.length > 0) {\n const iterSize = iterDims.reduce((a, b) => a * b);\n for (let i = 0; i < iterLength; ++i) {\n yield this._subarray(i, iterSize, iterDims);\n }\n } else {\n yield* this.data\n }\n\n }\n\n /**\n * Index into a Tensor object.\n * @param {number} index The index to access.\n * @returns {Tensor} The data at the specified index.\n */\n _getitem(index) {\n const [iterLength, ...iterDims] = this.dims;\n\n index = safeIndex(index, iterLength);\n\n if (iterDims.length > 0) {\n const iterSize = iterDims.reduce((a, b) => a * b);\n return this._subarray(index, iterSize, iterDims);\n } else {\n return new Tensor(this.type, [this.data[index]], iterDims);\n }\n }\n\n /**\n * @param {number|bigint} item The item to search for in the tensor\n * @returns {number} The index of the first occurrence of item in the tensor data.\n */\n indexOf(item) {\n for (let index = 0; index < this.data.length; ++index) {\n // Note: == instead of === so we can match Ints with BigInts\n if (this.data[index] == item) {\n return index;\n }\n }\n return -1;\n }\n\n /**\n * @param {number} index \n * @param {number} iterSize \n * @param {any} iterDims \n * @returns {Tensor}\n */\n _subarray(index, iterSize, iterDims) {\n const o1 = index * iterSize;\n const o2 = (index + 1) * iterSize;\n\n // We use subarray if available (typed array), otherwise we use slice (normal array)\n const data =\n ('subarray' in this.data)\n ? this.data.subarray(o1, o2)\n : this.data.slice(o1, o2);\n return new Tensor(this.type, data, iterDims);\n }\n\n /**\n * Returns the value of this tensor as a standard JavaScript Number. This only works\n * for tensors with one element. For other cases, see `Tensor.tolist()`.\n * @returns {number|bigint} The value of this tensor as a standard JavaScript Number.\n * @throws {Error} If the tensor has more than one element.\n */\n item() {\n if (this.data.length !== 1) {\n throw new Error(`a Tensor with ${this.data.length} elements cannot be converted to Scalar`);\n }\n return this.data[0];\n }\n\n /**\n * Convert tensor data to a n-dimensional JS list\n * @returns {Array}\n */\n tolist() {\n return reshape(this.data, this.dims)\n }\n\n /**\n * Return a new Tensor with the sigmoid function applied to each element.\n * @returns {Tensor} The tensor with the sigmoid function applied.\n */\n sigmoid() {\n return this.clone().sigmoid_();\n }\n\n /**\n * Applies the sigmoid function to the tensor in place.\n * @returns {Tensor} Returns `this`.\n */\n sigmoid_() {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] = 1 / (1 + Math.exp(-this.data[i]));\n }\n return this;\n }\n\n /**\n * Return a new Tensor with every element multiplied by a constant.\n * @param {number} val The value to multiply by.\n * @returns {Tensor} The new tensor.\n */\n mul(val) {\n return this.clone().mul_(val);\n }\n\n /**\n * Multiply the tensor by a constant in place.\n * @param {number} val The value to multiply by.\n * @returns {Tensor} Returns `this`.\n */\n mul_(val) {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] *= val;\n }\n return this;\n }\n\n\n /**\n * Return a new Tensor with every element added by a constant.\n * @param {number} val The value to add by.\n * @returns {Tensor} The new tensor.\n */\n add(val) {\n return this.clone().add_(val);\n }\n\n /**\n * Add the tensor by a constant in place.\n * @param {number} val The value to add by.\n * @returns {Tensor} Returns `this`.\n */\n add_(val) {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] += val;\n }\n return this;\n }\n clone() {\n return new Tensor(this.type, this.data.slice(), this.dims.slice());\n }\n\n slice(...slices) {\n // This allows for slicing with ranges and numbers\n let newTensorDims = [];\n let newOffsets = [];\n\n // slices is an array of numbers or arrays of numbers\n // e.g., slices = [0, [1, 3], null, [0, 3]]\n for (let sliceIndex = 0; sliceIndex < this.dims.length; ++sliceIndex) {\n let slice = slices[sliceIndex];\n\n if (slice === null || slice === undefined) {\n // null or undefined means take the whole dimension\n newOffsets.push([0, this.dims[sliceIndex]]);\n newTensorDims.push(this.dims[sliceIndex]);\n\n } else if (typeof slice === 'number') {\n slice = safeIndex(slice, this.dims[sliceIndex], sliceIndex);\n\n // A number means take a single element\n newOffsets.push([slice, slice + 1]);\n\n } else if (Array.isArray(slice) && slice.length === 2) {\n // An array of length 2 means take a range of elements\n\n if (slice[0] > slice[1]) {\n throw new Error(`Invalid slice: ${slice}`);\n }\n\n let offsets = [\n Math.max(slice[0], 0),\n Math.min(slice[1], this.dims[sliceIndex])\n ];\n\n newOffsets.push(offsets);\n newTensorDims.push(offsets[1] - offsets[0]);\n\n } else {\n throw new Error(`Invalid slice: ${slice}`);\n }\n }\n\n let newDims = newOffsets.map(([start, end]) => end - start);\n let newBufferSize = newDims.reduce((a, b) => a * b);\n\n // Allocate memory\n // @ts-ignore\n let data = new this.data.constructor(newBufferSize);\n\n // Precompute strides\n const stride = this.stride();\n\n for (let i = 0; i < newBufferSize; ++i) {\n let originalIndex = 0;\n for (let j = newDims.length - 1, num = i; j >= 0; --j) {\n const size = newDims[j];\n originalIndex += ((num % size) + newOffsets[j][0]) * stride[j];\n num = Math.floor(num / size);\n }\n data[i] = this.data[originalIndex];\n }\n return new Tensor(this.type, data, newTensorDims);\n\n }\n\n /**\n * Return a permuted version of this Tensor, according to the provided dimensions.\n * @param {...number} dims Dimensions to permute.\n * @returns {Tensor} The permuted tensor.\n */\n permute(...dims) {\n return permute(this, dims);\n }\n\n // TODO: implement transpose. For now (backwards compatibility), it's just an alias for permute()\n transpose(...dims) {\n return this.permute(...dims);\n }\n\n // TODO add .max() and .min() methods\n\n /**\n * Returns the sum of each row of the input tensor in the given dimension dim.\n * \n * @param {number} [dim=null] The dimension or dimensions to reduce. If `null`, all dimensions are reduced.\n * @param {boolean} keepdim Whether the output tensor has `dim` retained or not.\n * @returns The summed tensor\n */\n sum(dim = null, keepdim = false) {\n return this.norm(1, dim, keepdim);\n }\n\n /**\n * Returns the matrix norm or vector norm of a given tensor.\n * @param {number|string} [p='fro'] The order of norm\n * @param {number} [dim=null] Specifies which dimension of the tensor to calculate the norm across.\n * If dim is None, the norm will be calculated across all dimensions of input.\n * @param {boolean} [keepdim=false] Whether the output tensors have dim retained or not.\n * @returns {Tensor} The norm of the tensor.\n */\n norm(p = 'fro', dim = null, keepdim = false) {\n if (p === 'fro') {\n // NOTE: Since we only support integer dims, Frobenius norm produces the same result as p=2.\n p = 2;\n } else if (typeof p === 'string') {\n throw Error(`Unsupported norm: ${p}`);\n }\n\n if (dim === null) {\n // @ts-ignore\n let val = this.data.reduce((a, b) => a + (b ** p), 0) ** (1 / p);\n return new Tensor(this.type, [val], []);\n }\n\n // Negative indexing\n dim = safeIndex(dim, this.dims.length);\n\n // Calculate the shape of the resulting array after summation\n const resultDims = this.dims.slice(); // Copy the original dimensions\n resultDims[dim] = 1; // Remove the specified axis\n\n // Create a new array to store the accumulated values\n // @ts-ignore\n const result = new this.data.constructor(this.data.length / this.dims[dim]);\n\n // Iterate over the data array\n for (let i = 0; i < this.data.length; ++i) {\n\n // Calculate the index in the resulting array\n let resultIndex = 0;\n\n for (let j = this.dims.length - 1, num = i, resultMultiplier = 1; j >= 0; --j) {\n const size = this.dims[j];\n if (j !== dim) {\n const index = num % size;\n resultIndex += index * resultMultiplier;\n resultMultiplier *= resultDims[j];\n }\n num = Math.floor(num / size);\n }\n\n // Accumulate the value at the current index\n result[resultIndex] += (this.data[i]) ** p;\n }\n\n if (p !== 1) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = result[i] ** (1 / p);\n }\n }\n\n if (!keepdim) {\n resultDims.splice(dim, 1);\n }\n\n return new Tensor(this.type, result, resultDims);\n }\n\n /**\n * Performs `L_p` normalization of inputs over specified dimension. Operates in place.\n * @param {number} [p=2] The exponent value in the norm formulation\n * @param {number} [dim=1] The dimension to reduce\n * @returns {Tensor} `this` for operation chaining.\n */\n normalize_(p = 2.0, dim = 1) {\n dim = safeIndex(dim, this.dims.length);\n\n const norm = this.norm(p, dim, true);\n\n for (let i = 0; i < this.data.length; ++i) {\n\n // Calculate the index in the resulting array\n let resultIndex = 0;\n\n for (let j = this.dims.length - 1, num = i, resultMultiplier = 1; j >= 0; --j) {\n const size = this.dims[j];\n if (j !== dim) {\n const index = num % size;\n resultIndex += index * resultMultiplier;\n resultMultiplier *= this.dims[j];\n }\n num = Math.floor(num / size);\n }\n\n // Divide by normalized value\n this.data[i] /= norm.data[resultIndex];\n }\n\n return this;\n }\n\n /**\n * Performs `L_p` normalization of inputs over specified dimension.\n * @param {number} [p=2] The exponent value in the norm formulation\n * @param {number} [dim=1] The dimension to reduce\n * @returns {Tensor} The normalized tensor.\n */\n normalize(p = 2.0, dim = 1) {\n return this.clone().normalize_(p, dim);\n }\n\n /**\n * Compute and return the stride of this tensor.\n * Stride is the jump necessary to go from one element to the next one in the specified dimension dim.\n * @returns {number[]} The stride of this tensor.\n */\n stride() {\n return dimsToStride(this.dims);\n }\n\n /**\n * Returns a tensor with all specified dimensions of input of size 1 removed.\n * \n * NOTE: The returned tensor shares the storage with the input tensor, so changing the contents of one will change the contents of the other.\n * If you would like a copy, use `tensor.clone()` before squeezing.\n * \n * @param {number} [dim=null] If given, the input will be squeezed only in the specified dimensions.\n * @returns The squeezed tensor\n */\n squeeze(dim = null) {\n return new Tensor(\n this.type,\n this.data,\n calc_squeeze_dims(this.dims, dim)\n )\n }\n\n /**\n * In-place version of @see {@link Tensor.squeeze}\n */\n squeeze_(dim = null) {\n this.dims = calc_squeeze_dims(this.dims, dim);\n return this;\n }\n\n /**\n * Returns a new tensor with a dimension of size one inserted at the specified position.\n * \n * NOTE: The returned tensor shares the same underlying data with this tensor.\n * \n * @param {number} dim The index at which to insert the singleton dimension\n * @returns The unsqueezed tensor\n */\n unsqueeze(dim = null) {\n return new Tensor(\n this.type,\n this.data,\n calc_unsqueeze_dims(this.dims, dim)\n );\n }\n\n /**\n * In-place version of @see {@link Tensor.unsqueeze}\n */\n unsqueeze_(dim = null) {\n this.dims = calc_unsqueeze_dims(this.dims, dim);\n return this;\n }\n\n /**\n * In-place version of @see {@link Tensor.flatten}\n */\n flatten_(start_dim = 0, end_dim = -1) {\n // TODO validate inputs\n end_dim = (end_dim + this.dims.length) % this.dims.length;\n\n let dimsToKeepBefore = this.dims.slice(0, start_dim);\n let dimsToFlatten = this.dims.slice(start_dim, end_dim + 1);\n let dimsToKeepAfter = this.dims.slice(end_dim + 1);\n\n this.dims = [...dimsToKeepBefore, dimsToFlatten.reduce((a, b) => a * b, 1), ...dimsToKeepAfter]\n return this;\n }\n\n /**\n * Flattens input by reshaping it into a one-dimensional tensor.\n * If `start_dim` or `end_dim` are passed, only dimensions starting with `start_dim`\n * and ending with `end_dim` are flattened. The order of elements in input is unchanged.\n * @param {number} start_dim the first dim to flatten\n * @param {number} end_dim the last dim to flatten\n * @returns The flattened tensor.\n */\n flatten(start_dim = 0, end_dim = -1) {\n return this.clone().flatten_(start_dim, end_dim);\n }\n\n /**\n * Returns a new tensor with the same data as the `self` tensor but of a different `shape`.\n * @param {...number} dims the desired size\n * @returns {Tensor} The tensor with the same data but different shape\n */\n view(...dims) {\n // TODO: validate dims\n let inferredIndex = -1;\n for (let i = 0; i < dims.length; ++i) {\n if (dims[i] === -1) {\n if (inferredIndex !== -1) {\n throw new Error(\"Only one dimension can be inferred\");\n }\n inferredIndex = i;\n }\n }\n\n if (inferredIndex !== -1) {\n // Some dimension must be inferred\n const productOther = dims.reduce((product, curr, index) => {\n return index !== inferredIndex ? product * curr : product\n }, 1);\n\n dims[inferredIndex] = this.data.length / productOther;\n }\n return new Tensor(this.type, this.data, dims); // NOTE: uses same underlying storage\n }\n\n neg_() {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] = -this.data[i];\n }\n return this;\n }\n neg() {\n return this.clone().neg_();\n }\n\n /**\n * In-place version of @see {@link Tensor.clamp}\n */\n clamp_(min, max) {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] = Math.min(Math.max(this.data[i], min), max);\n }\n return this;\n }\n\n /**\n * Clamps all elements in input into the range [ min, max ]\n * @param {number} min lower-bound of the range to be clamped to\n * @param {number} max upper-bound of the range to be clamped to\n * @returns the output tensor.\n */\n clamp(min, max) {\n return this.clone().clamp_(min, max);\n }\n\n /**\n * In-place version of @see {@link Tensor.round}\n */\n round_() {\n for (let i = 0; i < this.data.length; ++i) {\n this.data[i] = Math.round(this.data[i]);\n }\n return this;\n }\n\n /**\n * Rounds elements of input to the nearest integer.\n * @returns the output tensor.\n */\n round() {\n return this.clone().round_();\n }\n\n /**\n * Performs Tensor dtype conversion.\n * @param {DataType} type The desired data type.\n * @returns {Tensor} The converted tensor.\n */\n to(type) {\n // If the self Tensor already has the correct dtype, then self is returned.\n if (this.type === type) return this;\n\n // Otherwise, the returned tensor is a copy of self with the desired dtype.\n if (!DataTypeMap.hasOwnProperty(type)) {\n throw new Error(`Unsupported type: ${type}`);\n }\n // @ts-ignore\n return new Tensor(type, DataTypeMap[type].from(this.data), this.dims);\n }\n}\n\n/**\n * This creates a nested array of a given type and depth (see examples).\n * \n * @example\n * NestArray<string, 1>; // string[]\n * @example\n * NestArray<number, 2>; // number[][]\n * @example\n * NestArray<string, 3>; // string[][][] etc.\n * @template T\n * @template {number} Depth\n * @template {never[]} [Acc=[]]\n * @typedef {Acc['length'] extends Depth ? T : NestArray<T[], Depth, [...Acc, never]>} NestArray\n */\n\n/**\n * Reshapes a 1-dimensional array into an n-dimensional array, according to the provided dimensions.\n *\n * @example\n * reshape([10 ], [1 ]); // Type: number[] Value: [10]\n * reshape([1, 2, 3, 4 ], [2, 2 ]); // Type: number[][] Value: [[1, 2], [3, 4]]\n * reshape([1, 2, 3, 4, 5, 6, 7, 8], [2, 2, 2]); // Type: number[][][] Value: [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]\n * reshape([1, 2, 3, 4, 5, 6, 7, 8], [4, 2 ]); // Type: number[][] Value: [[1, 2], [3, 4], [5, 6], [7, 8]]\n * @param {T[]|DataArray} data The input array to reshape.\n * @param {DIM} dimensions The target shape/dimensions.\n * @template T\n * @template {[number]|number[]} DIM\n * @returns {NestArray<T, DIM[\"length\"]>} The reshaped array.\n */\nfunction reshape(data, dimensions) {\n\n const totalElements = data.length;\n const dimensionSize = dimensions.reduce((a, b) => a * b);\n\n if (totalElements !== dimensionSize) {\n throw Error(`cannot reshape array of size ${totalElements} into shape (${dimensions})`);\n }\n\n /** @type {any} */\n let reshapedArray = data;\n\n for (let i = dimensions.length - 1; i >= 0; i--) {\n reshapedArray = reshapedArray.reduce((acc, val) => {\n let lastArray = acc[acc.length - 1];\n\n if (lastArray.length < dimensions[i]) {\n lastArray.push(val);\n } else {\n acc.push([val]);\n }\n\n return acc;\n }, [[]]);\n }\n\n return reshapedArray[0];\n}\n\n/**\n * Permutes a tensor according to the provided axes.\n * @param {any} tensor The input tensor to permute.\n * @param {Array} axes The axes to permute the tensor along.\n * @returns {Tensor} The permuted tensor.\n */\nexport function permute(tensor, axes) {\n const [permutedData, shape] = permute_data(tensor.data, tensor.dims, axes);\n return new Tensor(tensor.type, permutedData, shape);\n}\n\n\n/**\n * Interpolates an Tensor to the given size.\n * @param {Tensor} input The input tensor to interpolate. Data must be channel-first (i.e., [c, h, w])\n * @param {number[]} size The output size of the image\n * @param {string} mode The interpolation mode\n * @param {boolean} align_corners Whether to align corners.\n * @returns {Tensor} The interpolated tensor.\n */\nexport function interpolate(input, [out_height, out_width], mode = 'bilinear', align_corners = false) {\n\n // Input image dimensions\n const in_channels = input.dims.at(-3) ?? 1;\n const in_height = input.dims.at(-2);\n const in_width = input.dims.at(-1);\n\n let output = interpolate_data(\n /** @type {import('./maths.js').TypedArray}*/(input.data),\n [in_channels, in_height, in_width],\n [out_height, out_width],\n mode,\n align_corners\n );\n return new Tensor(input.type, output, [in_channels, out_height, out_width]);\n}\n\n/**\n * Perform mean pooling of the last hidden state followed by a normalization step.\n * @param {Tensor} last_hidden_state Tensor of shape [batchSize, seqLength, embedDim]\n * @param {Tensor} attention_mask Tensor of shape [batchSize, seqLength]\n * @returns {Tensor} Returns a new Tensor of shape [batchSize, embedDim].\n */\nexport function mean_pooling(last_hidden_state, attention_mask) {\n // last_hidden_state: [batchSize, seqLength, embedDim]\n // attention_mask: [batchSize, seqLength]\n\n let shape = [last_hidden_state.dims[0], last_hidden_state.dims[2]];\n // @ts-ignore\n let returnedData = new last_hidden_state.data.constructor(shape[0] * shape[1]);\n let [batchSize, seqLength, embedDim] = last_hidden_state.dims;\n\n let outIndex = 0;\n for (let i = 0; i < batchSize; ++i) {\n let offset = i * embedDim * seqLength;\n\n for (let k = 0; k < embedDim; ++k) {\n let sum = 0;\n let count = 0;\n\n let attnMaskOffset = i * seqLength;\n let offset2 = offset + k;\n // Pool over all words in sequence\n for (let j = 0; j < seqLength; ++j) {\n // index into attention mask\n let attn = Number(attention_mask.data[attnMaskOffset + j]);\n\n count += attn;\n sum += last_hidden_state.data[offset2 + j * embedDim] * attn;\n }\n\n let avg = sum / count;\n returnedData[outIndex++] = avg;\n }\n }\n\n return new Tensor(\n last_hidden_state.type,\n returnedData,\n shape\n )\n}\n\n/**\n * Apply Layer Normalization for last certain number of dimensions.\n * @param {Tensor} input The input tensor\n * @param {number[]} normalized_shape input shape from an expected input of size\n * @param {Object} options The options for the layer normalization\n * @param {number} [options.eps=1e-5] A value added to the denominator for numerical stability.\n * @returns {Tensor} The normalized tensor.\n */\nexport function layer_norm(input, normalized_shape, {\n eps = 1e-5,\n} = {}) {\n if (input.dims.length !== 2) {\n throw new Error('`layer_norm` currently only supports 2D input.');\n }\n\n const [batchSize, featureDim] = input.dims;\n\n if (normalized_shape.length !== 1 && normalized_shape[0] !== featureDim) {\n throw new Error('`normalized_shape` must be a 1D array with shape `[input.dims[1]]`.');\n }\n\n const [std, mean] = std_mean(input, 1, 0, true);\n\n // @ts-ignore\n const returnedData = new input.data.constructor(input.data.length);\n\n for (let i = 0; i < batchSize; ++i) {\n const offset = i * featureDim;\n for (let j = 0; j < featureDim; ++j) {\n const offset2 = offset + j;\n returnedData[offset2] = (input.data[offset2] - mean.data[i]) / (std.data[i] + eps);\n }\n }\n return new Tensor(input.type, returnedData, input.dims);\n}\n\n/**\n * Helper function to calculate new dimensions when performing a squeeze operation.\n * @param {number[]} dims The dimensions of the tensor.\n * @param {number|number[]|null} dim The dimension(s) to squeeze.\n * @returns The new dimensions.\n * @private\n */\nfunction calc_squeeze_dims(dims, dim) {\n dims = dims.slice();\n if (dim === null) {\n dims = dims.filter((d) => d !== 1);\n } else if (typeof dim === 'number') {\n if (dims[dim] === 1) {\n dims.splice(dim, 1);\n }\n } else if (Array.isArray(dim)) {\n dims = dims.filter((x, i) => {\n return x !== 1 || !dim.includes(i);\n });\n }\n return dims;\n}\n\n/**\n * Helper function to calculate new dimensions when performing an unsqueeze operation.\n * @param {number[]} dims The dimensions of the tensor.\n * @param {number} dim The dimension to unsqueeze.\n * @returns The new dimensions.\n * @private\n */\nfunction calc_unsqueeze_dims(dims, dim) {\n // Dimension out of range (e.g., \"expected to be in range of [-4, 3], but got 4\")\n // + 1 since we allow inserting at the end (i.e. dim = -1)\n dim = safeIndex(dim, dims.length + 1);\n dims = dims.slice();\n // Insert 1 into specified dimension\n dims.splice(dim, 0, 1);\n return dims;\n}\n\n/**\n * Safely calculate the index for an array of a given size, allowing negative indexing.\n * @param {number} index The index that will be used.\n * @param {number} size The size of the array.\n * @param {number} [dimension=null] The dimension that the index is for (optional).\n * @returns {number} The index, guaranteed to be non-negative and less than `arrayLength`.\n * \n * @throws {Error} If the index is out of range.\n * @private\n */\nfunction safeIndex(index, size, dimension = null) {\n if (index < -size || index >= size) {\n throw new Error(`IndexError: index ${index} is out of bounds for dimension${dimension === null ? '' : ' ' + dimension} with size ${size}`);\n }\n\n if (index < 0) {\n // Negative indexing, ensuring positive index\n index = ((index % size) + size) % size;\n }\n return index;\n}\n\n/**\n * Concatenates an array of tensors along a specified dimension.\n * @param {Tensor[]} tensors The array of tensors to concatenate.\n * @param {number} dim The dimension to concatenate along.\n * @returns {Tensor} The concatenated tensor.\n */\nexport function cat(tensors, dim = 0) {\n dim = safeIndex(dim, tensors[0].dims.length);\n\n // TODO do validation of shapes\n\n const resultDims = tensors[0].dims.slice();\n resultDims[dim] = tensors.reduce((a, b) => a + b.dims[dim], 0);\n\n // Create a new array to store the accumulated values\n const resultSize = resultDims.reduce((a, b) => a * b, 1);\n // @ts-ignore\n const result = new tensors[0].data.constructor(resultSize);\n\n // Create output tensor of same type as first\n const resultType = tensors[0].type;\n\n if (dim === 0) {\n // Handle special case for performance reasons\n\n let offset = 0;\n for (let t of tensors) {\n result.set(t.data, offset);\n offset += t.data.length;\n }\n\n } else {\n\n let currentDim = 0;\n\n for (let t = 0; t < tensors.length; ++t) {\n let tensor = tensors[t];\n\n // Iterate over the data array\n for (let i = 0; i < tensor.data.length; ++i) {\n // Calculate the index in the resulting array\n let resultIndex = 0;\n\n for (let j = tensor.dims.length - 1, num = i, resultMultiplier = 1; j >= 0; --j) {\n const size = tensor.dims[j];\n let index = num % size;\n if (j === dim) {\n index += currentDim;\n }\n resultIndex += index * resultMultiplier;\n resultMultiplier *= resultDims[j];\n num = Math.floor(num / size);\n }\n // Accumulate the value at the current index\n result[resultIndex] = tensor.data[i];\n }\n\n currentDim += tensor.dims[dim];\n }\n }\n return new Tensor(resultType, result, resultDims);\n}\n\n/**\n * Stack an array of tensors along a specified dimension.\n * @param {Tensor[]} tensors The array of tensors to stack.\n * @param {number} dim The dimension to stack along.\n * @returns {Tensor} The stacked tensor.\n */\nexport function stack(tensors, dim = 0) {\n // TODO do validation of shapes\n // NOTE: stack expects each tensor to be equal size\n return cat(tensors.map(t => t.unsqueeze(dim)), dim);\n}\n\n\n/**\n * Calculates the standard deviation and mean over the dimensions specified by dim. dim can be a single dimension or `null` to reduce over all dimensions.\n * @param {Tensor} input the input tenso\n * @param {number|null} dim the dimension to reduce. If None, all dimensions are reduced.\n * @param {number} correction difference between the sample size and sample degrees of freedom. Defaults to Bessel's correction, correction=1.\n * @param {boolean} keepdim whether the output tensor has dim retained or not.\n * @returns {Tensor[]} A tuple of (std, mean) tensors.\n */\nexport function std_mean(input, dim = null, correction = 1, keepdim = false) {\n\n if (dim === null) {\n // None to reduce over all dimensions.\n // @ts-ignore\n const sum = input.data.reduce((a, b) => a + b, 0);\n const mean = sum / input.data.length;\n // @ts-ignore\n const std = Math.sqrt(input.data.reduce((a, b) => a + (b - mean) ** 2, 0) / (input.data.length - correction));\n\n const meanTensor = new Tensor(input.type, [mean], [/* scalar */]);\n const stdTensor = new Tensor(input.type, [std], [/* scalar */]);\n\n return [stdTensor, meanTensor];\n }\n\n // Negative indexing\n dim = safeIndex(dim, input.dims.length);\n\n const meanTensor = mean(input, dim, keepdim);\n\n // Calculate the shape of the resulting array after summation\n const resultDims = input.dims.slice(); // Copy the original dimensions\n resultDims[dim] = 1; // Remove the specified axis\n\n // Create a new array to store the accumulated values\n // @ts-ignore\n const result = new input.data.constructor(input.data.length / input.dims[dim]);\n\n // Iterate over the data array\n for (let i = 0; i < input.data.length; ++i) {\n\n // Calculate the index in the resulting array\n let resultIndex = 0;\n\n for (let j = input.dims.length - 1, num = i, resultMultiplier = 1; j >= 0; --j) {\n const size = input.dims[j];\n if (j !== dim) {\n const index = num % size;\n resultIndex += index * resultMultiplier;\n resultMultiplier *= resultDims[j];\n }\n num = Math.floor(num / size);\n }\n\n // Accumulate the value at the current index\n result[resultIndex] += (input.data[i] - meanTensor.data[resultIndex]) ** 2;\n }\n\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.sqrt(result[i] / (input.dims[dim] - correction));\n }\n\n if (!keepdim) {\n resultDims.splice(dim, 1);\n }\n\n const stdTensor = new Tensor(input.type, result, resultDims);\n\n return [stdTensor, meanTensor];\n}\n\n\n/**\n * Returns the mean value of each row of the input tensor in the given dimension dim.\n * @param {Tensor} input the input tensor.\n * @param {number|null} dim the dimension to reduce.\n * @param {boolean} keepdim whether the output tensor has dim retained or not.\n * @returns A new tensor with means taken along the specified dimension.\n */\nexport function mean(input, dim = null, keepdim = false) {\n\n if (dim === null) {\n // None to reduce over all dimensions.\n // @ts-ignore\n let val = input.data.reduce((a, b) => a + b, 0);\n return new Tensor(input.type, [val / input.data.length], [/* scalar */]);\n }\n\n // Negative indexing\n dim = safeIndex(dim, input.dims.length);\n\n // Calculate the shape of the resulting array after summation\n const resultDims = input.dims.slice(); // Copy the original dimensions\n resultDims[dim] = 1; // Remove the specified axis\n\n // Create a new array to store the accumulated values\n // @ts-ignore\n const result = new input.data.constructor(input.data.length / input.dims[dim]);\n\n // Iterate over the data array\n for (let i = 0; i < input.data.length; ++i) {\n\n // Calculate the index in the resulting array\n let resultIndex = 0;\n\n for (let j = input.dims.length - 1, num = i, resultMultiplier = 1; j >= 0; --j) {\n const size = input.dims[j];\n if (j !== dim) {\n const index = num % size;\n resultIndex += index * resultMultiplier;\n resultMultiplier *= resultDims[j];\n }\n num = Math.floor(num / size);\n }\n\n // Accumulate the value at the current index\n result[resultIndex] += input.data[i];\n }\n\n if (input.dims[dim] !== 1) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = result[i] / input.dims[dim];\n }\n }\n\n if (!keepdim) {\n resultDims.splice(dim, 1);\n }\n\n return new Tensor(input.type, result, resultDims);\n}\n\n\n/**\n *\n * Measures similarity between two temporal sequences (e.g., input audio and output tokens\n * to generate token-level timestamps).\n * @param {Tensor} matrix \n * @returns {number[][]}\n */\nexport function dynamicTimeWarping(matrix) {\n const [output_length, input_length] = matrix.dims;\n\n const outputShape = [output_length + 1, input_length + 1];\n\n const cost = new Tensor(\n 'float32',\n new Float32Array(outputShape[0] * outputShape[1]).fill(Infinity),\n outputShape\n );\n\n const trace = new Tensor(\n 'float32',\n new Float32Array(outputShape[0] * outputShape[1]).fill(-1),\n outputShape\n )\n\n // same as `cost[0][0] = 0`;\n cost[0].data[0] = 0;\n\n for (let j = 1; j < input_length + 1; ++j) {\n for (let i = 1; i < output_length + 1; ++i) {\n\n const c0 = cost[i - 1][j - 1].item();\n const c1 = cost[i - 1][j].item();\n const c2 = cost[i][j - 1].item();\n\n let c, t;\n if (c0 < c1 && c0 < c2) {\n c = c0;\n t = 0;\n } else if (c1 < c0 && c1 < c2) {\n c = c1;\n t = 1;\n } else {\n c = c2;\n t = 2;\n }\n\n cost[i].data[j] = matrix[i - 1][j - 1].item() + c;\n trace[i].data[j] = t;\n }\n }\n\n // backtrace\n let i = output_length;\n let j = input_length;\n\n // @ts-ignore\n trace.data.fill(2, 0, outputShape[1]) // trace[0, :] = 2\n for (let i = 0; i < outputShape[0]; ++i) { // trace[:, 0] = 1\n trace[i].data[0] = 1;\n }\n\n let text_indices = [];\n let time_indices = [];\n\n while (i > 0 || j > 0) {\n text_indices.push(i - 1);\n time_indices.push(j - 1);\n\n const t = trace[i][j].item();\n switch (t) {\n case 0:\n --i; --j;\n break;\n case 1:\n --i;\n break;\n case 2:\n --j;\n break;\n default:\n throw new Error(\n `Internal error in dynamic time warping. Unexpected trace[${i}, ${j}]. Please file a bug report.`\n )\n }\n }\n\n text_indices.reverse();\n time_indices.reverse();\n\n return [text_indices, time_indices];\n\n}\n\nfunction dimsToStride(dims) {\n const stride = new Array(dims.length);\n for (let i = dims.length - 1, s2 = 1; i >= 0; --i) {\n stride[i] = s2;\n s2 *= dims[i];\n }\n return stride;\n}\n\n/**\n * Returns a tensor filled with the scalar value 1, with the shape defined by the variable argument size.\n * @param {number[]} size A sequence of integers defining the shape of the output tensor.\n */\nexport function ones(size) {\n const numElements = size.reduce((a, b) => a * b, 1);\n return new Tensor(\n 'int64',\n new BigInt64Array(numElements).fill(1n),\n size\n )\n}\n\n/**\n * Returns a tensor filled with the scalar value 1, with the same size as input.\n * @param {Tensor} tensor The size of input will determine size of the output tensor.\n * @returns The ones tensor.\n */\nexport function ones_like(tensor) {\n return ones(tensor.dims);\n}\n\n/**\n * Quantizes the embeddings tensor to binary or unsigned binary precision.\n * @param {Tensor} tensor The tensor to quantize.\n * @param {'binary'|'ubinary'} precision The precision to use for quantization.\n * @returns {Tensor} The quantized tensor.\n */\nexport function quantize_embeddings(tensor, precision) {\n if (tensor.dims.length !== 2) {\n throw new Error(\"The tensor must have 2 dimensions\");\n }\n if (tensor.dims.at(-1) % 8 !== 0) {\n throw new Error(\"The last dimension of the tensor must be a multiple of 8\");\n }\n if (!['binary', 'ubinary'].includes(precision)) {\n throw new Error(\"The precision must be either 'binary' or 'ubinary'\");\n }\n\n const signed = precision === 'binary';\n const dtype = signed ? 'int8' : 'uint8';\n\n // Create a typed array to store the packed bits\n const cls = signed ? Int8Array : Uint8Array;\n const inputData = tensor.data;\n const outputData = new cls(inputData.length / 8);\n\n // Iterate over each number in the array\n for (let i = 0; i < inputData.length; ++i) {\n // Determine if the number is greater than 0\n const bit = inputData[i] > 0 ? 1 : 0;\n\n // Calculate the index in the typed array and the position within the byte\n const arrayIndex = Math.floor(i / 8);\n const bitPosition = i % 8;\n\n // Pack the bit into the typed array\n outputData[arrayIndex] |= bit << (7 - bitPosition);\n if (signed && bitPosition === 0) {\n outputData[arrayIndex] -= 128;\n }\n };\n\n return new Tensor(dtype, outputData, [tensor.dims[0], tensor.dims[1] / 8]);\n}\n"],"names":["mean","meanTensor","stdTensor","j","i"],"mappings":";;AAiBA,MAAM,cAAc,OAAO,OAAO;AAAA,EAC9B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA;AAAA,EACR,MAAM;AAAA,EACN,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AACV,CAAC;AAOD,MAAM,aAAa,KAAK;AAEjB,MAAM,OAAO;AAAA;AAAA,EAEhB;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eAAe,MAAM;AACjB,QAAI,KAAK,CAAC,aAAa,YAAY;AAE/B,aAAO,OAAO,MAAM,KAAK,CAAC,CAAC;AAAA,IAE/B,OAAO;AAEH,aAAO,OAAO,MAAM,IAAI;AAAA;AAAA,QACI,KAAK,CAAC;AAAA;AAAA,QACgD,KAAK,CAAC;AAAA,QACpF,KAAK,CAAC;AAAA,MACtB,CAAa;AAAA,IACL;AAEA,WAAO,IAAI,MAAM,MAAM;AAAA,MACnB,KAAK,CAAC,KAAK,QAAQ;AACf,YAAI,OAAO,QAAQ,UAAU;AACzB,cAAI,QAAQ,OAAO,GAAG;AACtB,cAAI,OAAO,UAAU,KAAK,GAAG;AAEzB,mBAAO,IAAI,SAAS,KAAK;AAAA,UAC7B;AAAA,QACJ;AAEA,eAAO,IAAI,GAAG;AAAA,MAClB;AAAA,MACA,KAAK,CAAC,KAAK,KAAK,UAAU;AAItB,eAAO,IAAI,GAAG,IAAI;AAAA,MACtB;AAAA,IACZ,CAAS;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,EAAE,OAAO,QAAQ,IAAI;AACjB,UAAM,CAAC,YAAY,GAAG,QAAQ,IAAI,KAAK;AAEvC,QAAI,SAAS,SAAS,GAAG;AACrB,YAAM,WAAW,SAAS,OAAO,CAAC,GAAG,MAAM,IAAI,CAAC;AAChD,eAAS,IAAI,GAAG,IAAI,YAAY,EAAE,GAAG;AACjC,cAAM,KAAK,UAAU,GAAG,UAAU,QAAQ;AAAA,MAC9C;AAAA,IACJ,OAAO;AACH,aAAO,KAAK;AAAA,IAChB;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,OAAO;AACZ,UAAM,CAAC,YAAY,GAAG,QAAQ,IAAI,KAAK;AAEvC,YAAQ,UAAU,OAAO,UAAU;AAEnC,QAAI,SAAS,SAAS,GAAG;AACrB,YAAM,WAAW,SAAS,OAAO,CAAC,GAAG,MAAM,IAAI,CAAC;AAChD,aAAO,KAAK,UAAU,OAAO,UAAU,QAAQ;AAAA,IACnD,OAAO;AACH,aAAO,IAAI,OAAO,KAAK,MAAM,CAAC,KAAK,KAAK,KAAK,CAAC,GAAG,QAAQ;AAAA,IAC7D;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAQ,MAAM;AACV,aAAS,QAAQ,GAAG,QAAQ,KAAK,KAAK,QAAQ,EAAE,OAAO;AAEnD,UAAI,KAAK,KAAK,KAAK,KAAK,MAAM;AAC1B,eAAO;AAAA,MACX;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAO,UAAU,UAAU;AACjC,UAAM,KAAK,QAAQ;AACnB,UAAM,MAAM,QAAQ,KAAK;AAGzB,UAAM,OACD,cAAc,KAAK,OACd,KAAK,KAAK,SAAS,IAAI,EAAE,IACzB,KAAK,KAAK,MAAM,IAAI,EAAE;AAChC,WAAO,IAAI,OAAO,KAAK,MAAM,MAAM,QAAQ;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO;AACH,QAAI,KAAK,KAAK,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,iBAAiB,KAAK,KAAK,MAAM,yCAAyC;AAAA,IAC9F;AACA,WAAO,KAAK,KAAK,CAAC;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS;AACL,WAAO,QAAQ,KAAK,MAAM,KAAK,IAAI;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAU;AACN,WAAO,KAAK,MAAK,EAAG,SAAQ;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,WAAW;AACP,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,KAAK,KAAK,CAAC,CAAC;AAAA,IAClD;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,KAAK;AACL,WAAO,KAAK,QAAQ,KAAK,GAAG;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,KAAK,KAAK;AACN,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,KAAK;AAAA,IACpB;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,KAAK;AACL,WAAO,KAAK,QAAQ,KAAK,GAAG;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,KAAK,KAAK;AACN,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,KAAK;AAAA,IACpB;AACA,WAAO;AAAA,EACX;AAAA,EACA,QAAQ;AACJ,WAAO,IAAI,OAAO,KAAK,MAAM,KAAK,KAAK,SAAS,KAAK,KAAK,MAAK,CAAE;AAAA,EACrE;AAAA,EAEA,SAAS,QAAQ;AAEb,QAAI,gBAAgB,CAAA;AACpB,QAAI,aAAa,CAAA;AAIjB,aAAS,aAAa,GAAG,aAAa,KAAK,KAAK,QAAQ,EAAE,YAAY;AAClE,UAAI,QAAQ,OAAO,UAAU;AAE7B,UAAI,UAAU,QAAQ,UAAU,QAAW;AAEvC,mBAAW,KAAK,CAAC,GAAG,KAAK,KAAK,UAAU,CAAC,CAAC;AAC1C,sBAAc,KAAK,KAAK,KAAK,UAAU,CAAC;AAAA,MAE5C,WAAW,OAAO,UAAU,UAAU;AAClC,gBAAQ,UAAU,OAAO,KAAK,KAAK,UAAU,GAAG,UAAU;AAG1D,mBAAW,KAAK,CAAC,OAAO,QAAQ,CAAC,CAAC;AAAA,MAEtC,WAAW,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,GAAG;AAGnD,YAAI,MAAM,CAAC,IAAI,MAAM,CAAC,GAAG;AACrB,gBAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,QAC7C;AAEA,YAAI,UAAU;AAAA,UACV,KAAK,IAAI,MAAM,CAAC,GAAG,CAAC;AAAA,UACpB,KAAK,IAAI,MAAM,CAAC,GAAG,KAAK,KAAK,UAAU,CAAC;AAAA,QAC5D;AAEgB,mBAAW,KAAK,OAAO;AACvB,sBAAc,KAAK,QAAQ,CAAC,IAAI,QAAQ,CAAC,CAAC;AAAA,MAE9C,OAAO;AACH,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC7C;AAAA,IACJ;AAEA,QAAI,UAAU,WAAW,IAAI,CAAC,CAAC,OAAO,GAAG,MAAM,MAAM,KAAK;AAC1D,QAAI,gBAAgB,QAAQ,OAAO,CAAC,GAAG,MAAM,IAAI,CAAC;AAIlD,QAAI,OAAO,IAAI,KAAK,KAAK,YAAY,aAAa;AAGlD,UAAM,SAAS,KAAK,OAAM;AAE1B,aAAS,IAAI,GAAG,IAAI,eAAe,EAAE,GAAG;AACpC,UAAI,gBAAgB;AACpB,eAAS,IAAI,QAAQ,SAAS,GAAG,MAAM,GAAG,KAAK,GAAG,EAAE,GAAG;AACnD,cAAM,OAAO,QAAQ,CAAC;AACtB,0BAAmB,MAAM,OAAQ,WAAW,CAAC,EAAE,CAAC,KAAK,OAAO,CAAC;AAC7D,cAAM,KAAK,MAAM,MAAM,IAAI;AAAA,MAC/B;AACA,WAAK,CAAC,IAAI,KAAK,KAAK,aAAa;AAAA,IACrC;AACA,WAAO,IAAI,OAAO,KAAK,MAAM,MAAM,aAAa;AAAA,EAEpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,WAAW,MAAM;AACb,WAAO,QAAQ,MAAM,IAAI;AAAA,EAC7B;AAAA;AAAA,EAGA,aAAa,MAAM;AACf,WAAO,KAAK,QAAQ,GAAG,IAAI;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,IAAI,MAAM,MAAM,UAAU,OAAO;AAC7B,WAAO,KAAK,KAAK,GAAG,KAAK,OAAO;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,KAAK,IAAI,OAAO,MAAM,MAAM,UAAU,OAAO;AACzC,QAAI,MAAM,OAAO;AAEb,UAAI;AAAA,IACR,WAAW,OAAO,MAAM,UAAU;AAC9B,YAAM,MAAM,qBAAqB,CAAC,EAAE;AAAA,IACxC;AAEA,QAAI,QAAQ,MAAM;AAEd,UAAI,MAAM,KAAK,KAAK,OAAO,CAAC,GAAG,MAAM,IAAK,KAAK,GAAI,CAAC,MAAM,IAAI;AAC9D,aAAO,IAAI,OAAO,KAAK,MAAM,CAAC,GAAG,GAAG,EAAE;AAAA,IAC1C;AAGA,UAAM,UAAU,KAAK,KAAK,KAAK,MAAM;AAGrC,UAAM,aAAa,KAAK,KAAK,MAAK;AAClC,eAAW,GAAG,IAAI;AAIlB,UAAM,SAAS,IAAI,KAAK,KAAK,YAAY,KAAK,KAAK,SAAS,KAAK,KAAK,GAAG,CAAC;AAG1E,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AAGvC,UAAI,cAAc;AAElB,eAAS,IAAI,KAAK,KAAK,SAAS,GAAG,MAAM,GAAG,mBAAmB,GAAG,KAAK,GAAG,EAAE,GAAG;AAC3E,cAAM,OAAO,KAAK,KAAK,CAAC;AACxB,YAAI,MAAM,KAAK;AACX,gBAAM,QAAQ,MAAM;AACpB,yBAAe,QAAQ;AACvB,8BAAoB,WAAW,CAAC;AAAA,QACpC;AACA,cAAM,KAAK,MAAM,MAAM,IAAI;AAAA,MAC/B;AAGA,aAAO,WAAW,KAAM,KAAK,KAAK,CAAC,KAAM;AAAA,IAC7C;AAEA,QAAI,MAAM,GAAG;AACT,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,EAAE,GAAG;AACpC,eAAO,CAAC,IAAI,OAAO,CAAC,MAAM,IAAI;AAAA,MAClC;AAAA,IACJ;AAEA,QAAI,CAAC,SAAS;AACV,iBAAW,OAAO,KAAK,CAAC;AAAA,IAC5B;AAEA,WAAO,IAAI,OAAO,KAAK,MAAM,QAAQ,UAAU;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,WAAW,IAAI,GAAK,MAAM,GAAG;AACzB,UAAM,UAAU,KAAK,KAAK,KAAK,MAAM;AAErC,UAAM,OAAO,KAAK,KAAK,GAAG,KAAK,IAAI;AAEnC,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AAGvC,UAAI,cAAc;AAElB,eAAS,IAAI,KAAK,KAAK,SAAS,GAAG,MAAM,GAAG,mBAAmB,GAAG,KAAK,GAAG,EAAE,GAAG;AAC3E,cAAM,OAAO,KAAK,KAAK,CAAC;AACxB,YAAI,MAAM,KAAK;AACX,gBAAM,QAAQ,MAAM;AACpB,yBAAe,QAAQ;AACvB,8BAAoB,KAAK,KAAK,CAAC;AAAA,QACnC;AACA,cAAM,KAAK,MAAM,MAAM,IAAI;AAAA,MAC/B;AAGA,WAAK,KAAK,CAAC,KAAK,KAAK,KAAK,WAAW;AAAA,IACzC;AAEA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,IAAI,GAAK,MAAM,GAAG;AACxB,WAAO,KAAK,MAAK,EAAG,WAAW,GAAG,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS;AACL,WAAO,aAAa,KAAK,IAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,MAAM,MAAM;AAChB,WAAO,IAAI;AAAA,MACP,KAAK;AAAA,MACL,KAAK;AAAA,MACL,kBAAkB,KAAK,MAAM,GAAG;AAAA,IAC5C;AAAA,EACI;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,MAAM,MAAM;AACjB,SAAK,OAAO,kBAAkB,KAAK,MAAM,GAAG;AAC5C,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,UAAU,MAAM,MAAM;AAClB,WAAO,IAAI;AAAA,MACP,KAAK;AAAA,MACL,KAAK;AAAA,MACL,oBAAoB,KAAK,MAAM,GAAG;AAAA,IAC9C;AAAA,EACI;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,MAAM,MAAM;AACnB,SAAK,OAAO,oBAAoB,KAAK,MAAM,GAAG;AAC9C,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,YAAY,GAAG,UAAU,IAAI;AAElC,eAAW,UAAU,KAAK,KAAK,UAAU,KAAK,KAAK;AAEnD,QAAI,mBAAmB,KAAK,KAAK,MAAM,GAAG,SAAS;AACnD,QAAI,gBAAgB,KAAK,KAAK,MAAM,WAAW,UAAU,CAAC;AAC1D,QAAI,kBAAkB,KAAK,KAAK,MAAM,UAAU,CAAC;AAEjD,SAAK,OAAO,CAAC,GAAG,kBAAkB,cAAc,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC,GAAG,GAAG,eAAe;AAC9F,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,QAAQ,YAAY,GAAG,UAAU,IAAI;AACjC,WAAO,KAAK,MAAK,EAAG,SAAS,WAAW,OAAO;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,MAAM;AAEV,QAAI,gBAAgB;AACpB,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,EAAE,GAAG;AAClC,UAAI,KAAK,CAAC,MAAM,IAAI;AAChB,YAAI,kBAAkB,IAAI;AACtB,gBAAM,IAAI,MAAM,oCAAoC;AAAA,QACxD;AACA,wBAAgB;AAAA,MACpB;AAAA,IACJ;AAEA,QAAI,kBAAkB,IAAI;AAEtB,YAAM,eAAe,KAAK,OAAO,CAAC,SAAS,MAAM,UAAU;AACvD,eAAO,UAAU,gBAAgB,UAAU,OAAO;AAAA,MACtD,GAAG,CAAC;AAEJ,WAAK,aAAa,IAAI,KAAK,KAAK,SAAS;AAAA,IAC7C;AACA,WAAO,IAAI,OAAO,KAAK,MAAM,KAAK,MAAM,IAAI;AAAA,EAChD;AAAA,EAEA,OAAO;AACH,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,IAAI,CAAC,KAAK,KAAK,CAAC;AAAA,IAC/B;AACA,WAAO;AAAA,EACX;AAAA,EACA,MAAM;AACF,WAAO,KAAK,MAAK,EAAG,KAAI;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,KAAK,KAAK;AACb,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI,KAAK,KAAK,CAAC,GAAG,GAAG,GAAG,GAAG;AAAA,IAC5D;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,KAAK;AACZ,WAAO,KAAK,MAAK,EAAG,OAAO,KAAK,GAAG;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS;AACL,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE,GAAG;AACvC,WAAK,KAAK,CAAC,IAAI,KAAK,MAAM,KAAK,KAAK,CAAC,CAAC;AAAA,IAC1C;AACA,WAAO;AAAA,EACX;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAQ;AACJ,WAAO,KAAK,MAAK,EAAG,OAAM;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,GAAG,MAAM;AAEL,QAAI,KAAK,SAAS,KAAM,QAAO;AAG/B,QAAI,CAAC,YAAY,eAAe,IAAI,GAAG;AACnC,YAAM,IAAI,MAAM,qBAAqB,IAAI,EAAE;AAAA,IAC/C;AAEA,WAAO,IAAI,OAAO,MAAM,YAAY,IAAI,EAAE,KAAK,KAAK,IAAI,GAAG,KAAK,IAAI;AAAA,EACxE;AACJ;AA+BA,SAAS,QAAQ,MAAM,YAAY;AAE/B,QAAM,gBAAgB,KAAK;AAC3B,QAAM,gBAAgB,WAAW,OAAO,CAAC,GAAG,MAAM,IAAI,CAAC;AAEvD,MAAI,kBAAkB,eAAe;AACjC,UAAM,MAAM,gCAAgC,aAAa,gBAAgB,UAAU,GAAG;AAAA,EAC1F;AAGA,MAAI,gBAAgB;AAEpB,WAAS,IAAI,WAAW,SAAS,GAAG,KAAK,GAAG,KAAK;AAC7C,oBAAgB,cAAc,OAAO,CAAC,KAAK,QAAQ;AAC/C,UAAI,YAAY,IAAI,IAAI,SAAS,CAAC;AAElC,UAAI,UAAU,SAAS,WAAW,CAAC,GAAG;AAClC,kBAAU,KAAK,GAAG;AAAA,MACtB,OAAO;AACH,YAAI,KAAK,CAAC,GAAG,CAAC;AAAA,MAClB;AAEA,aAAO;AAAA,IACX,GAAG,CAAC,CAAA,CAAE,CAAC;AAAA,EACX;AAEA,SAAO,cAAc,CAAC;AAC1B;AAQO,SAAS,QAAQ,QAAQ,MAAM;AAClC,QAAM,CAAC,cAAc,KAAK,IAAI,aAAa,OAAO,MAAM,OAAO,MAAM,IAAI;AACzE,SAAO,IAAI,OAAO,OAAO,MAAM,cAAc,KAAK;AACtD;AAqHA,SAAS,kBAAkB,MAAM,KAAK;AAClC,SAAO,KAAK,MAAK;AACjB,MAAI,QAAQ,MAAM;AACd,WAAO,KAAK,OAAO,CAAC,MAAM,MAAM,CAAC;AAAA,EACrC,WAAW,OAAO,QAAQ,UAAU;AAChC,QAAI,KAAK,GAAG,MAAM,GAAG;AACjB,WAAK,OAAO,KAAK,CAAC;AAAA,IACtB;AAAA,EACJ,WAAW,MAAM,QAAQ,GAAG,GAAG;AAC3B,WAAO,KAAK,OAAO,CAAC,GAAG,MAAM;AACzB,aAAO,MAAM,KAAK,CAAC,IAAI,SAAS,CAAC;AAAA,IACrC,CAAC;AAAA,EACL;AACA,SAAO;AACX;AASA,SAAS,oBAAoB,MAAM,KAAK;AAGpC,QAAM,UAAU,KAAK,KAAK,SAAS,CAAC;AACpC,SAAO,KAAK,MAAK;AAEjB,OAAK,OAAO,KAAK,GAAG,CAAC;AACrB,SAAO;AACX;AAYA,SAAS,UAAU,OAAO,MAAM,YAAY,MAAM;AAC9C,MAAI,QAAQ,CAAC,QAAQ,SAAS,MAAM;AAChC,UAAM,IAAI,MAAM,qBAAqB,KAAK,kCAAkC,cAAc,OAAO,KAAK,MAAM,SAAS,cAAc,IAAI,EAAE;AAAA,EAC7I;AAEA,MAAI,QAAQ,GAAG;AAEX,aAAU,QAAQ,OAAQ,QAAQ;AAAA,EACtC;AACA,SAAO;AACX;AAQO,SAAS,IAAI,SAAS,MAAM,GAAG;AAClC,QAAM,UAAU,KAAK,QAAQ,CAAC,EAAE,KAAK,MAAM;AAI3C,QAAM,aAAa,QAAQ,CAAC,EAAE,KAAK,MAAK;AACxC,aAAW,GAAG,IAAI,QAAQ,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,KAAK,GAAG,GAAG,CAAC;AAG7D,QAAM,aAAa,WAAW,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAEvD,QAAM,SAAS,IAAI,QAAQ,CAAC,EAAE,KAAK,YAAY,UAAU;AAGzD,QAAM,aAAa,QAAQ,CAAC,EAAE;AAE9B,MAAI,QAAQ,GAAG;AAGX,QAAI,SAAS;AACb,aAAS,KAAK,SAAS;AACnB,aAAO,IAAI,EAAE,MAAM,MAAM;AACzB,gBAAU,EAAE,KAAK;AAAA,IACrB;AAAA,EAEJ,OAAO;AAEH,QAAI,aAAa;AAEjB,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,EAAE,GAAG;AACrC,UAAI,SAAS,QAAQ,CAAC;AAGtB,eAAS,IAAI,GAAG,IAAI,OAAO,KAAK,QAAQ,EAAE,GAAG;AAEzC,YAAI,cAAc;AAElB,iBAAS,IAAI,OAAO,KAAK,SAAS,GAAG,MAAM,GAAG,mBAAmB,GAAG,KAAK,GAAG,EAAE,GAAG;AAC7E,gBAAM,OAAO,OAAO,KAAK,CAAC;AAC1B,cAAI,QAAQ,MAAM;AAClB,cAAI,MAAM,KAAK;AACX,qBAAS;AAAA,UACb;AACA,yBAAe,QAAQ;AACvB,8BAAoB,WAAW,CAAC;AAChC,gBAAM,KAAK,MAAM,MAAM,IAAI;AAAA,QAC/B;AAEA,eAAO,WAAW,IAAI,OAAO,KAAK,CAAC;AAAA,MACvC;AAEA,oBAAc,OAAO,KAAK,GAAG;AAAA,IACjC;AAAA,EACJ;AACA,SAAO,IAAI,OAAO,YAAY,QAAQ,UAAU;AACpD;AAQO,SAAS,MAAM,SAAS,MAAM,GAAG;AAGpC,SAAO,IAAI,QAAQ,IAAI,OAAK,EAAE,UAAU,GAAG,CAAC,GAAG,GAAG;AACtD;AAWO,SAAS,SAAS,OAAO,MAAM,MAAM,aAAa,GAAG,UAAU,OAAO;AAEzE,MAAI,QAAQ,MAAM;AAGd,UAAM,MAAM,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAChD,UAAMA,QAAO,MAAM,MAAM,KAAK;AAE9B,UAAM,MAAM,KAAK,KAAK,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,KAAK,IAAIA,UAAS,GAAG,CAAC,KAAK,MAAM,KAAK,SAAS,WAAW;AAE5G,UAAMC,cAAa,IAAI,OAAO,MAAM,MAAM,CAACD,KAAI,GAAG;AAAA;AAAA,KAAc;AAChE,UAAME,aAAY,IAAI,OAAO,MAAM,MAAM,CAAC,GAAG,GAAG;AAAA;AAAA,KAAc;AAE9D,WAAO,CAACA,YAAWD,WAAU;AAAA,EACjC;AAGA,QAAM,UAAU,KAAK,MAAM,KAAK,MAAM;AAEtC,QAAM,aAAa,KAAK,OAAO,KAAK,OAAO;AAG3C,QAAM,aAAa,MAAM,KAAK,MAAK;AACnC,aAAW,GAAG,IAAI;AAIlB,QAAM,SAAS,IAAI,MAAM,KAAK,YAAY,MAAM,KAAK,SAAS,MAAM,KAAK,GAAG,CAAC;AAG7E,WAAS,IAAI,GAAG,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG;AAGxC,QAAI,cAAc;AAElB,aAAS,IAAI,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,mBAAmB,GAAG,KAAK,GAAG,EAAE,GAAG;AAC5E,YAAM,OAAO,MAAM,KAAK,CAAC;AACzB,UAAI,MAAM,KAAK;AACX,cAAM,QAAQ,MAAM;AACpB,uBAAe,QAAQ;AACvB,4BAAoB,WAAW,CAAC;AAAA,MACpC;AACA,YAAM,KAAK,MAAM,MAAM,IAAI;AAAA,IAC/B;AAGA,WAAO,WAAW,MAAM,MAAM,KAAK,CAAC,IAAI,WAAW,KAAK,WAAW,MAAM;AAAA,EAC7E;AAEA,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,EAAE,GAAG;AACpC,WAAO,CAAC,IAAI,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,GAAG,IAAI,WAAW;AAAA,EACpE;AAEA,MAAI,CAAC,SAAS;AACV,eAAW,OAAO,KAAK,CAAC;AAAA,EAC5B;AAEA,QAAM,YAAY,IAAI,OAAO,MAAM,MAAM,QAAQ,UAAU;AAE3D,SAAO,CAAC,WAAW,UAAU;AACjC;AAUO,SAAS,KAAK,OAAO,MAAM,MAAM,UAAU,OAAO;AAErD,MAAI,QAAQ,MAAM;AAGd,QAAI,MAAM,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAC9C,WAAO,IAAI,OAAO,MAAM,MAAM,CAAC,MAAM,MAAM,KAAK,MAAM,GAAG;AAAA;AAAA,KAAc;AAAA,EAC3E;AAGA,QAAM,UAAU,KAAK,MAAM,KAAK,MAAM;AAGtC,QAAM,aAAa,MAAM,KAAK,MAAK;AACnC,aAAW,GAAG,IAAI;AAIlB,QAAM,SAAS,IAAI,MAAM,KAAK,YAAY,MAAM,KAAK,SAAS,MAAM,KAAK,GAAG,CAAC;AAG7E,WAAS,IAAI,GAAG,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG;AAGxC,QAAI,cAAc;AAElB,aAAS,IAAI,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,mBAAmB,GAAG,KAAK,GAAG,EAAE,GAAG;AAC5E,YAAM,OAAO,MAAM,KAAK,CAAC;AACzB,UAAI,MAAM,KAAK;AACX,cAAM,QAAQ,MAAM;AACpB,uBAAe,QAAQ;AACvB,4BAAoB,WAAW,CAAC;AAAA,MACpC;AACA,YAAM,KAAK,MAAM,MAAM,IAAI;AAAA,IAC/B;AAGA,WAAO,WAAW,KAAK,MAAM,KAAK,CAAC;AAAA,EACvC;AAEA,MAAI,MAAM,KAAK,GAAG,MAAM,GAAG;AACvB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,EAAE,GAAG;AACpC,aAAO,CAAC,IAAI,OAAO,CAAC,IAAI,MAAM,KAAK,GAAG;AAAA,IAC1C;AAAA,EACJ;AAEA,MAAI,CAAC,SAAS;AACV,eAAW,OAAO,KAAK,CAAC;AAAA,EAC5B;AAEA,SAAO,IAAI,OAAO,MAAM,MAAM,QAAQ,UAAU;AACpD;AAUO,SAAS,mBAAmB,QAAQ;AACvC,QAAM,CAAC,eAAe,YAAY,IAAI,OAAO;AAE7C,QAAM,cAAc,CAAC,gBAAgB,GAAG,eAAe,CAAC;AAExD,QAAM,OAAO,IAAI;AAAA,IACb;AAAA,IACA,IAAI,aAAa,YAAY,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,KAAK,QAAQ;AAAA,IAC/D;AAAA,EACR;AAEI,QAAM,QAAQ,IAAI;AAAA,IACd;AAAA,IACA,IAAI,aAAa,YAAY,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,KAAK,EAAE;AAAA,IACzD;AAAA,EACR;AAGI,OAAK,CAAC,EAAE,KAAK,CAAC,IAAI;AAElB,WAASE,KAAI,GAAGA,KAAI,eAAe,GAAG,EAAEA,IAAG;AACvC,aAASC,KAAI,GAAGA,KAAI,gBAAgB,GAAG,EAAEA,IAAG;AAExC,YAAM,KAAK,KAAKA,KAAI,CAAC,EAAED,KAAI,CAAC,EAAE,KAAI;AAClC,YAAM,KAAK,KAAKC,KAAI,CAAC,EAAED,EAAC,EAAE,KAAI;AAC9B,YAAM,KAAK,KAAKC,EAAC,EAAED,KAAI,CAAC,EAAE,KAAI;AAE9B,UAAI,GAAG;AACP,UAAI,KAAK,MAAM,KAAK,IAAI;AACpB,YAAI;AACJ,YAAI;AAAA,MACR,WAAW,KAAK,MAAM,KAAK,IAAI;AAC3B,YAAI;AACJ,YAAI;AAAA,MACR,OAAO;AACH,YAAI;AACJ,YAAI;AAAA,MACR;AAEA,WAAKC,EAAC,EAAE,KAAKD,EAAC,IAAI,OAAOC,KAAI,CAAC,EAAED,KAAI,CAAC,EAAE,KAAI,IAAK;AAChD,YAAMC,EAAC,EAAE,KAAKD,EAAC,IAAI;AAAA,IACvB;AAAA,EACJ;AAGA,MAAI,IAAI;AACR,MAAI,IAAI;AAGR,QAAM,KAAK,KAAK,GAAG,GAAG,YAAY,CAAC,CAAC;AACpC,WAASC,KAAI,GAAGA,KAAI,YAAY,CAAC,GAAG,EAAEA,IAAG;AACrC,UAAMA,EAAC,EAAE,KAAK,CAAC,IAAI;AAAA,EACvB;AAEA,MAAI,eAAe,CAAA;AACnB,MAAI,eAAe,CAAA;AAEnB,SAAO,IAAI,KAAK,IAAI,GAAG;AACnB,iBAAa,KAAK,IAAI,CAAC;AACvB,iBAAa,KAAK,IAAI,CAAC;AAEvB,UAAM,IAAI,MAAM,CAAC,EAAE,CAAC,EAAE,KAAI;AAC1B,YAAQ,GAAC;AAAA,MACL,KAAK;AACD,UAAE;AAAG,UAAE;AACP;AAAA,MACJ,KAAK;AACD,UAAE;AACF;AAAA,MACJ,KAAK;AACD,UAAE;AACF;AAAA,MACJ;AACI,cAAM,IAAI;AAAA,UACN,4DAA4D,CAAC,KAAK,CAAC;AAAA,QACvF;AAAA,IACA;AAAA,EACI;AAEA,eAAa,QAAO;AACpB,eAAa,QAAO;AAEpB,SAAO,CAAC,cAAc,YAAY;AAEtC;AAEA,SAAS,aAAa,MAAM;AACxB,QAAM,SAAS,IAAI,MAAM,KAAK,MAAM;AACpC,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG,EAAE,GAAG;AAC/C,WAAO,CAAC,IAAI;AACZ,UAAM,KAAK,CAAC;AAAA,EAChB;AACA,SAAO;AACX;AAMO,SAAS,KAAK,MAAM;AACvB,QAAM,cAAc,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAClD,SAAO,IAAI;AAAA,IACP;AAAA,IACA,IAAI,cAAc,WAAW,EAAE,KAAK,EAAE;AAAA,IACtC;AAAA,EACR;AACA;AAOO,SAAS,UAAU,QAAQ;AAC9B,SAAO,KAAK,OAAO,IAAI;AAC3B;","x_google_ignoreList":[0]}
@@ -1,67 +0,0 @@
1
- const backends = {};
2
- const backendsSortedByPriority = [];
3
- const registerBackend = (name, backend, priority) => {
4
- if (backend && typeof backend.init === "function" && typeof backend.createSessionHandler === "function") {
5
- const currentBackend = backends[name];
6
- if (currentBackend === void 0) {
7
- backends[name] = { backend, priority };
8
- } else if (currentBackend.priority > priority) {
9
- return;
10
- } else if (currentBackend.priority === priority) {
11
- if (currentBackend.backend !== backend) {
12
- throw new Error(`cannot register backend "${name}" using priority ${priority}`);
13
- }
14
- }
15
- if (priority >= 0) {
16
- const i = backendsSortedByPriority.indexOf(name);
17
- if (i !== -1) {
18
- backendsSortedByPriority.splice(i, 1);
19
- }
20
- for (let i2 = 0; i2 < backendsSortedByPriority.length; i2++) {
21
- if (backends[backendsSortedByPriority[i2]].priority <= priority) {
22
- backendsSortedByPriority.splice(i2, 0, name);
23
- return;
24
- }
25
- }
26
- backendsSortedByPriority.push(name);
27
- }
28
- return;
29
- }
30
- throw new TypeError("not a valid backend");
31
- };
32
- const resolveBackend = async (backendHints) => {
33
- const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;
34
- const errors = [];
35
- for (const backendName of backendNames) {
36
- const backendInfo = backends[backendName];
37
- if (backendInfo) {
38
- if (backendInfo.initialized) {
39
- return backendInfo.backend;
40
- } else if (backendInfo.aborted) {
41
- continue;
42
- }
43
- const isInitializing = !!backendInfo.initPromise;
44
- try {
45
- if (!isInitializing) {
46
- backendInfo.initPromise = backendInfo.backend.init();
47
- }
48
- await backendInfo.initPromise;
49
- backendInfo.initialized = true;
50
- return backendInfo.backend;
51
- } catch (e) {
52
- if (!isInitializing) {
53
- errors.push({ name: backendName, err: e });
54
- }
55
- backendInfo.aborted = true;
56
- } finally {
57
- delete backendInfo.initPromise;
58
- }
59
- }
60
- }
61
- throw new Error(`no available backend found. ERR: ${errors.map((e) => `[${e.name}] ${e.err}`).join(", ")}`);
62
- };
63
- export {
64
- registerBackend,
65
- resolveBackend
66
- };
67
- //# sourceMappingURL=backend-impl.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"backend-impl.js","sources":["../../../../../node_modules/onnxruntime-common/dist/lib/backend-impl.js"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nconst backends = {};\nconst backendsSortedByPriority = [];\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @internal\n */\nexport const registerBackend = (name, backend, priority) => {\n if (backend && typeof backend.init === 'function' && typeof backend.createSessionHandler === 'function') {\n const currentBackend = backends[name];\n if (currentBackend === undefined) {\n backends[name] = { backend, priority };\n }\n else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n }\n else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends[backendsSortedByPriority[i]].priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n throw new TypeError('not a valid backend');\n};\n/**\n * Resolve backend by specified hints.\n *\n * @param backendHints - a list of execution provider names to lookup. If omitted use registered backends as list.\n * @returns a promise that resolves to the backend.\n *\n * @internal\n */\nexport const resolveBackend = async (backendHints) => {\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n const errors = [];\n for (const backendName of backendNames) {\n const backendInfo = backends[backendName];\n if (backendInfo) {\n if (backendInfo.initialized) {\n return backendInfo.backend;\n }\n else if (backendInfo.aborted) {\n continue; // current backend is unavailable; try next\n }\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init();\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n }\n catch (e) {\n if (!isInitializing) {\n errors.push({ name: backendName, err: e });\n }\n backendInfo.aborted = true;\n }\n finally {\n delete backendInfo.initPromise;\n }\n }\n }\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n};\n//# sourceMappingURL=backend-impl.js.map"],"names":["i"],"mappings":"AAEA,MAAM,WAAW,CAAA;AACjB,MAAM,2BAA2B,CAAA;AAWrB,MAAC,kBAAkB,CAAC,MAAM,SAAS,aAAa;AACxD,MAAI,WAAW,OAAO,QAAQ,SAAS,cAAc,OAAO,QAAQ,yBAAyB,YAAY;AACrG,UAAM,iBAAiB,SAAS,IAAI;AACpC,QAAI,mBAAmB,QAAW;AAC9B,eAAS,IAAI,IAAI,EAAE,SAAS,SAAQ;AAAA,IACxC,WACS,eAAe,WAAW,UAAU;AAEzC;AAAA,IACJ,WACS,eAAe,aAAa,UAAU;AAC3C,UAAI,eAAe,YAAY,SAAS;AACpC,cAAM,IAAI,MAAM,4BAA4B,IAAI,oBAAoB,QAAQ,EAAE;AAAA,MAClF;AAAA,IACJ;AACA,QAAI,YAAY,GAAG;AACf,YAAM,IAAI,yBAAyB,QAAQ,IAAI;AAC/C,UAAI,MAAM,IAAI;AACV,iCAAyB,OAAO,GAAG,CAAC;AAAA,MACxC;AACA,eAASA,KAAI,GAAGA,KAAI,yBAAyB,QAAQA,MAAK;AACtD,YAAI,SAAS,yBAAyBA,EAAC,CAAC,EAAE,YAAY,UAAU;AAC5D,mCAAyB,OAAOA,IAAG,GAAG,IAAI;AAC1C;AAAA,QACJ;AAAA,MACJ;AACA,+BAAyB,KAAK,IAAI;AAAA,IACtC;AACA;AAAA,EACJ;AACA,QAAM,IAAI,UAAU,qBAAqB;AAC7C;AASY,MAAC,iBAAiB,OAAO,iBAAiB;AAClD,QAAM,eAAe,aAAa,WAAW,IAAI,2BAA2B;AAC5E,QAAM,SAAS,CAAA;AACf,aAAW,eAAe,cAAc;AACpC,UAAM,cAAc,SAAS,WAAW;AACxC,QAAI,aAAa;AACb,UAAI,YAAY,aAAa;AACzB,eAAO,YAAY;AAAA,MACvB,WACS,YAAY,SAAS;AAC1B;AAAA,MACJ;AACA,YAAM,iBAAiB,CAAC,CAAC,YAAY;AACrC,UAAI;AACA,YAAI,CAAC,gBAAgB;AACjB,sBAAY,cAAc,YAAY,QAAQ,KAAI;AAAA,QACtD;AACA,cAAM,YAAY;AAClB,oBAAY,cAAc;AAC1B,eAAO,YAAY;AAAA,MACvB,SACO,GAAG;AACN,YAAI,CAAC,gBAAgB;AACjB,iBAAO,KAAK,EAAE,MAAM,aAAa,KAAK,GAAG;AAAA,QAC7C;AACA,oBAAY,UAAU;AAAA,MAC1B,UACZ;AACgB,eAAO,YAAY;AAAA,MACvB;AAAA,IACJ;AAAA,EACJ;AACA,QAAM,IAAI,MAAM,oCAAoC,OAAO,IAAI,OAAK,IAAI,EAAE,IAAI,KAAK,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE;AAC5G;","x_google_ignoreList":[0]}
@@ -1,24 +0,0 @@
1
- class EnvImpl {
2
- constructor() {
3
- this.wasm = {};
4
- this.webgl = {};
5
- this.logLevelInternal = "warning";
6
- }
7
- // TODO standadize the getter and setter convention in env for other fields.
8
- set logLevel(value) {
9
- if (value === void 0) {
10
- return;
11
- }
12
- if (typeof value !== "string" || ["verbose", "info", "warning", "error", "fatal"].indexOf(value) === -1) {
13
- throw new Error(`Unsupported logging level: ${value}`);
14
- }
15
- this.logLevelInternal = value;
16
- }
17
- get logLevel() {
18
- return this.logLevelInternal;
19
- }
20
- }
21
- export {
22
- EnvImpl
23
- };
24
- //# sourceMappingURL=env-impl.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"env-impl.js","sources":["../../../../../node_modules/onnxruntime-common/dist/lib/env-impl.js"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nexport class EnvImpl {\n constructor() {\n this.wasm = {};\n this.webgl = {};\n this.logLevelInternal = 'warning';\n }\n // TODO standadize the getter and setter convention in env for other fields.\n set logLevel(value) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n this.logLevelInternal = value;\n }\n get logLevel() {\n return this.logLevelInternal;\n }\n}\n//# sourceMappingURL=env-impl.js.map"],"names":[],"mappings":"AAEO,MAAM,QAAQ;AAAA,EACjB,cAAc;AACV,SAAK,OAAO,CAAA;AACZ,SAAK,QAAQ,CAAA;AACb,SAAK,mBAAmB;AAAA,EAC5B;AAAA;AAAA,EAEA,IAAI,SAAS,OAAO;AAChB,QAAI,UAAU,QAAW;AACrB;AAAA,IACJ;AACA,QAAI,OAAO,UAAU,YAAY,CAAC,WAAW,QAAQ,WAAW,SAAS,OAAO,EAAE,QAAQ,KAAK,MAAM,IAAI;AACrG,YAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,IACzD;AACA,SAAK,mBAAmB;AAAA,EAC5B;AAAA,EACA,IAAI,WAAW;AACX,WAAO,KAAK;AAAA,EAChB;AACJ;","x_google_ignoreList":[0]}
@@ -1,6 +0,0 @@
1
- import { EnvImpl } from "./env-impl.js";
2
- const env = new EnvImpl();
3
- export {
4
- env
5
- };
6
- //# sourceMappingURL=env.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"env.js","sources":["../../../../../node_modules/onnxruntime-common/dist/lib/env.js"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nimport { EnvImpl } from './env-impl';\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env = new EnvImpl();\n//# sourceMappingURL=env.js.map"],"names":[],"mappings":";AAMY,MAAC,MAAM,IAAI,QAAO;","x_google_ignoreList":[0]}
@@ -1,11 +0,0 @@
1
- import { registerBackend } from "./backend-impl.js";
2
- import { env } from "./env.js";
3
- import { InferenceSession } from "./inference-session.js";
4
- import { Tensor } from "./tensor.js";
5
- export {
6
- InferenceSession,
7
- Tensor,
8
- env,
9
- registerBackend
10
- };
11
- //# sourceMappingURL=index.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;"}
@@ -1,162 +0,0 @@
1
- import { resolveBackend } from "./backend-impl.js";
2
- import { Tensor } from "./tensor.js";
3
- class InferenceSession {
4
- constructor(handler) {
5
- this.handler = handler;
6
- }
7
- async run(feeds, arg1, arg2) {
8
- const fetches = {};
9
- let options = {};
10
- if (typeof feeds !== "object" || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {
11
- throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");
12
- }
13
- let isFetchesEmpty = true;
14
- if (typeof arg1 === "object") {
15
- if (arg1 === null) {
16
- throw new TypeError("Unexpected argument[1]: cannot be null.");
17
- }
18
- if (arg1 instanceof Tensor) {
19
- throw new TypeError("'fetches' cannot be a Tensor");
20
- }
21
- if (Array.isArray(arg1)) {
22
- if (arg1.length === 0) {
23
- throw new TypeError("'fetches' cannot be an empty array.");
24
- }
25
- isFetchesEmpty = false;
26
- for (const name of arg1) {
27
- if (typeof name !== "string") {
28
- throw new TypeError("'fetches' must be a string array or an object.");
29
- }
30
- if (this.outputNames.indexOf(name) === -1) {
31
- throw new RangeError(`'fetches' contains invalid output name: ${name}.`);
32
- }
33
- fetches[name] = null;
34
- }
35
- if (typeof arg2 === "object" && arg2 !== null) {
36
- options = arg2;
37
- } else if (typeof arg2 !== "undefined") {
38
- throw new TypeError("'options' must be an object.");
39
- }
40
- } else {
41
- let isFetches = false;
42
- const arg1Keys = Object.getOwnPropertyNames(arg1);
43
- for (const name of this.outputNames) {
44
- if (arg1Keys.indexOf(name) !== -1) {
45
- const v = arg1[name];
46
- if (v === null || v instanceof Tensor) {
47
- isFetches = true;
48
- isFetchesEmpty = false;
49
- fetches[name] = v;
50
- }
51
- }
52
- }
53
- if (isFetches) {
54
- if (typeof arg2 === "object" && arg2 !== null) {
55
- options = arg2;
56
- } else if (typeof arg2 !== "undefined") {
57
- throw new TypeError("'options' must be an object.");
58
- }
59
- } else {
60
- options = arg1;
61
- }
62
- }
63
- } else if (typeof arg1 !== "undefined") {
64
- throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");
65
- }
66
- for (const name of this.inputNames) {
67
- if (typeof feeds[name] === "undefined") {
68
- throw new Error(`input '${name}' is missing in 'feeds'.`);
69
- }
70
- }
71
- if (isFetchesEmpty) {
72
- for (const name of this.outputNames) {
73
- fetches[name] = null;
74
- }
75
- }
76
- const results = await this.handler.run(feeds, fetches, options);
77
- const returnValue = {};
78
- for (const key in results) {
79
- if (Object.hasOwnProperty.call(results, key)) {
80
- returnValue[key] = new Tensor(results[key].type, results[key].data, results[key].dims);
81
- }
82
- }
83
- return returnValue;
84
- }
85
- static async create(arg0, arg1, arg2, arg3) {
86
- let filePathOrUint8Array;
87
- let options = {};
88
- if (typeof arg0 === "string") {
89
- filePathOrUint8Array = arg0;
90
- if (typeof arg1 === "object" && arg1 !== null) {
91
- options = arg1;
92
- } else if (typeof arg1 !== "undefined") {
93
- throw new TypeError("'options' must be an object.");
94
- }
95
- } else if (arg0 instanceof Uint8Array) {
96
- filePathOrUint8Array = arg0;
97
- if (typeof arg1 === "object" && arg1 !== null) {
98
- options = arg1;
99
- } else if (typeof arg1 !== "undefined") {
100
- throw new TypeError("'options' must be an object.");
101
- }
102
- } else if (arg0 instanceof ArrayBuffer || typeof SharedArrayBuffer !== "undefined" && arg0 instanceof SharedArrayBuffer) {
103
- const buffer = arg0;
104
- let byteOffset = 0;
105
- let byteLength = arg0.byteLength;
106
- if (typeof arg1 === "object" && arg1 !== null) {
107
- options = arg1;
108
- } else if (typeof arg1 === "number") {
109
- byteOffset = arg1;
110
- if (!Number.isSafeInteger(byteOffset)) {
111
- throw new RangeError("'byteOffset' must be an integer.");
112
- }
113
- if (byteOffset < 0 || byteOffset >= buffer.byteLength) {
114
- throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);
115
- }
116
- byteLength = arg0.byteLength - byteOffset;
117
- if (typeof arg2 === "number") {
118
- byteLength = arg2;
119
- if (!Number.isSafeInteger(byteLength)) {
120
- throw new RangeError("'byteLength' must be an integer.");
121
- }
122
- if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {
123
- throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);
124
- }
125
- if (typeof arg3 === "object" && arg3 !== null) {
126
- options = arg3;
127
- } else if (typeof arg3 !== "undefined") {
128
- throw new TypeError("'options' must be an object.");
129
- }
130
- } else if (typeof arg2 !== "undefined") {
131
- throw new TypeError("'byteLength' must be a number.");
132
- }
133
- } else if (typeof arg1 !== "undefined") {
134
- throw new TypeError("'options' must be an object.");
135
- }
136
- filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);
137
- } else {
138
- throw new TypeError("Unexpected argument[0]: must be 'path' or 'buffer'.");
139
- }
140
- const eps = options.executionProviders || [];
141
- const backendHints = eps.map((i) => typeof i === "string" ? i : i.name);
142
- const backend = await resolveBackend(backendHints);
143
- const handler = await backend.createSessionHandler(filePathOrUint8Array, options);
144
- return new InferenceSession(handler);
145
- }
146
- startProfiling() {
147
- this.handler.startProfiling();
148
- }
149
- endProfiling() {
150
- this.handler.endProfiling();
151
- }
152
- get inputNames() {
153
- return this.handler.inputNames;
154
- }
155
- get outputNames() {
156
- return this.handler.outputNames;
157
- }
158
- }
159
- export {
160
- InferenceSession
161
- };
162
- //# sourceMappingURL=inference-session-impl.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"inference-session-impl.js","sources":["../../../../../node_modules/onnxruntime-common/dist/lib/inference-session-impl.js"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nimport { resolveBackend } from './backend-impl';\nimport { Tensor } from './tensor';\nexport class InferenceSession {\n constructor(handler) {\n this.handler = handler;\n }\n async run(feeds, arg1, arg2) {\n const fetches = {};\n let options = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError('\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n }\n else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n }\n else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = arg1[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n }\n else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n }\n else {\n options = arg1;\n }\n }\n }\n else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n // feeds, fetches and options are prepared\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n returnValue[key] = new Tensor(results[key].type, results[key].data, results[key].dims);\n }\n }\n return returnValue;\n }\n static async create(arg0, arg1, arg2, arg3) {\n // either load from a file or buffer\n let filePathOrUint8Array;\n let options = {};\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n }\n else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n }\n else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n }\n else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n }\n else if (arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n }\n else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n }\n else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n }\n else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n }\n else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n }\n else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n // get backend hints\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backend = await resolveBackend(backendHints);\n const handler = await backend.createSessionHandler(filePathOrUint8Array, options);\n return new InferenceSession(handler);\n }\n startProfiling() {\n this.handler.startProfiling();\n }\n endProfiling() {\n this.handler.endProfiling();\n }\n get inputNames() {\n return this.handler.inputNames;\n }\n get outputNames() {\n return this.handler.outputNames;\n }\n}\n//# sourceMappingURL=inference-session-impl.js.map"],"names":[],"mappings":";;AAIO,MAAM,iBAAiB;AAAA,EAC1B,YAAY,SAAS;AACjB,SAAK,UAAU;AAAA,EACnB;AAAA,EACA,MAAM,IAAI,OAAO,MAAM,MAAM;AACzB,UAAM,UAAU,CAAA;AAChB,QAAI,UAAU,CAAA;AAEd,QAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,iBAAiB,UAAU,MAAM,QAAQ,KAAK,GAAG;AAChG,YAAM,IAAI,UAAU,+FAAiG;AAAA,IACzH;AACA,QAAI,iBAAiB;AAErB,QAAI,OAAO,SAAS,UAAU;AAC1B,UAAI,SAAS,MAAM;AACf,cAAM,IAAI,UAAU,yCAAyC;AAAA,MACjE;AACA,UAAI,gBAAgB,QAAQ;AACxB,cAAM,IAAI,UAAU,8BAAgC;AAAA,MACxD;AACA,UAAI,MAAM,QAAQ,IAAI,GAAG;AACrB,YAAI,KAAK,WAAW,GAAG;AACnB,gBAAM,IAAI,UAAU,qCAAuC;AAAA,QAC/D;AACA,yBAAiB;AAEjB,mBAAW,QAAQ,MAAM;AACrB,cAAI,OAAO,SAAS,UAAU;AAC1B,kBAAM,IAAI,UAAU,gDAAkD;AAAA,UAC1E;AACA,cAAI,KAAK,YAAY,QAAQ,IAAI,MAAM,IAAI;AACvC,kBAAM,IAAI,WAAW,2CAA2C,IAAI,GAAG;AAAA,UAC3E;AACA,kBAAQ,IAAI,IAAI;AAAA,QACpB;AACA,YAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,oBAAU;AAAA,QACd,WACS,OAAO,SAAS,aAAa;AAClC,gBAAM,IAAI,UAAU,8BAAgC;AAAA,QACxD;AAAA,MACJ,OACK;AAGD,YAAI,YAAY;AAChB,cAAM,WAAW,OAAO,oBAAoB,IAAI;AAChD,mBAAW,QAAQ,KAAK,aAAa;AACjC,cAAI,SAAS,QAAQ,IAAI,MAAM,IAAI;AAC/B,kBAAM,IAAI,KAAK,IAAI;AACnB,gBAAI,MAAM,QAAQ,aAAa,QAAQ;AACnC,0BAAY;AACZ,+BAAiB;AACjB,sBAAQ,IAAI,IAAI;AAAA,YACpB;AAAA,UACJ;AAAA,QACJ;AACA,YAAI,WAAW;AACX,cAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,sBAAU;AAAA,UACd,WACS,OAAO,SAAS,aAAa;AAClC,kBAAM,IAAI,UAAU,8BAAgC;AAAA,UACxD;AAAA,QACJ,OACK;AACD,oBAAU;AAAA,QACd;AAAA,MACJ;AAAA,IACJ,WACS,OAAO,SAAS,aAAa;AAClC,YAAM,IAAI,UAAU,yDAA6D;AAAA,IACrF;AAEA,eAAW,QAAQ,KAAK,YAAY;AAChC,UAAI,OAAO,MAAM,IAAI,MAAM,aAAa;AACpC,cAAM,IAAI,MAAM,UAAU,IAAI,0BAA0B;AAAA,MAC5D;AAAA,IACJ;AAEA,QAAI,gBAAgB;AAChB,iBAAW,QAAQ,KAAK,aAAa;AACjC,gBAAQ,IAAI,IAAI;AAAA,MACpB;AAAA,IACJ;AAEA,UAAM,UAAU,MAAM,KAAK,QAAQ,IAAI,OAAO,SAAS,OAAO;AAC9D,UAAM,cAAc,CAAA;AACpB,eAAW,OAAO,SAAS;AACvB,UAAI,OAAO,eAAe,KAAK,SAAS,GAAG,GAAG;AAC1C,oBAAY,GAAG,IAAI,IAAI,OAAO,QAAQ,GAAG,EAAE,MAAM,QAAQ,GAAG,EAAE,MAAM,QAAQ,GAAG,EAAE,IAAI;AAAA,MACzF;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EACA,aAAa,OAAO,MAAM,MAAM,MAAM,MAAM;AAExC,QAAI;AACJ,QAAI,UAAU,CAAA;AACd,QAAI,OAAO,SAAS,UAAU;AAC1B,6BAAuB;AACvB,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,kBAAU;AAAA,MACd,WACS,OAAO,SAAS,aAAa;AAClC,cAAM,IAAI,UAAU,8BAAgC;AAAA,MACxD;AAAA,IACJ,WACS,gBAAgB,YAAY;AACjC,6BAAuB;AACvB,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,kBAAU;AAAA,MACd,WACS,OAAO,SAAS,aAAa;AAClC,cAAM,IAAI,UAAU,8BAAgC;AAAA,MACxD;AAAA,IACJ,WACS,gBAAgB,eACpB,OAAO,sBAAsB,eAAe,gBAAgB,mBAAoB;AACjF,YAAM,SAAS;AACf,UAAI,aAAa;AACjB,UAAI,aAAa,KAAK;AACtB,UAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,kBAAU;AAAA,MACd,WACS,OAAO,SAAS,UAAU;AAC/B,qBAAa;AACb,YAAI,CAAC,OAAO,cAAc,UAAU,GAAG;AACnC,gBAAM,IAAI,WAAW,kCAAoC;AAAA,QAC7D;AACA,YAAI,aAAa,KAAK,cAAc,OAAO,YAAY;AACnD,gBAAM,IAAI,WAAW,oCAAoC,OAAO,UAAU,IAAI;AAAA,QAClF;AACA,qBAAa,KAAK,aAAa;AAC/B,YAAI,OAAO,SAAS,UAAU;AAC1B,uBAAa;AACb,cAAI,CAAC,OAAO,cAAc,UAAU,GAAG;AACnC,kBAAM,IAAI,WAAW,kCAAoC;AAAA,UAC7D;AACA,cAAI,cAAc,KAAK,aAAa,aAAa,OAAO,YAAY;AAChE,kBAAM,IAAI,WAAW,oCAAoC,OAAO,aAAa,UAAU,IAAI;AAAA,UAC/F;AACA,cAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC3C,sBAAU;AAAA,UACd,WACS,OAAO,SAAS,aAAa;AAClC,kBAAM,IAAI,UAAU,8BAAgC;AAAA,UACxD;AAAA,QACJ,WACS,OAAO,SAAS,aAAa;AAClC,gBAAM,IAAI,UAAU,gCAAkC;AAAA,QAC1D;AAAA,MACJ,WACS,OAAO,SAAS,aAAa;AAClC,cAAM,IAAI,UAAU,8BAAgC;AAAA,MACxD;AACA,6BAAuB,IAAI,WAAW,QAAQ,YAAY,UAAU;AAAA,IACxE,OACK;AACD,YAAM,IAAI,UAAU,qDAAyD;AAAA,IACjF;AAEA,UAAM,MAAM,QAAQ,sBAAsB,CAAA;AAC1C,UAAM,eAAe,IAAI,IAAI,OAAK,OAAO,MAAM,WAAW,IAAI,EAAE,IAAI;AACpE,UAAM,UAAU,MAAM,eAAe,YAAY;AACjD,UAAM,UAAU,MAAM,QAAQ,qBAAqB,sBAAsB,OAAO;AAChF,WAAO,IAAI,iBAAiB,OAAO;AAAA,EACvC;AAAA,EACA,iBAAiB;AACb,SAAK,QAAQ,eAAc;AAAA,EAC/B;AAAA,EACA,eAAe;AACX,SAAK,QAAQ,aAAY;AAAA,EAC7B;AAAA,EACA,IAAI,aAAa;AACb,WAAO,KAAK,QAAQ;AAAA,EACxB;AAAA,EACA,IAAI,cAAc;AACd,WAAO,KAAK,QAAQ;AAAA,EACxB;AACJ;","x_google_ignoreList":[0]}
@@ -1,6 +0,0 @@
1
- import { InferenceSession as InferenceSession$1 } from "./inference-session-impl.js";
2
- const InferenceSession = InferenceSession$1;
3
- export {
4
- InferenceSession
5
- };
6
- //# sourceMappingURL=inference-session.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"inference-session.js","sources":["../../../../../node_modules/onnxruntime-common/dist/lib/inference-session.js"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\nimport { InferenceSession as InferenceSessionImpl } from './inference-session-impl';\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession = InferenceSessionImpl;\n//# sourceMappingURL=inference-session.js.map"],"names":["InferenceSessionImpl"],"mappings":";AAIY,MAAC,mBAAmBA;","x_google_ignoreList":[0]}