proto.io 0.0.228 → 0.0.229

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/README.md +1017 -0
  2. package/dist/adapters/file/aliyun-oss.d.mts +26 -0
  3. package/dist/adapters/file/aliyun-oss.d.mts.map +1 -0
  4. package/dist/adapters/file/aliyun-oss.d.ts +3 -3
  5. package/dist/adapters/file/database.d.mts +23 -0
  6. package/dist/adapters/file/database.d.mts.map +1 -0
  7. package/dist/adapters/file/database.d.ts +2 -2
  8. package/dist/adapters/file/database.js +1 -1
  9. package/dist/adapters/file/database.mjs +1 -1
  10. package/dist/adapters/file/filesystem.d.mts +25 -0
  11. package/dist/adapters/file/filesystem.d.mts.map +1 -0
  12. package/dist/adapters/file/filesystem.d.ts +3 -3
  13. package/dist/adapters/file/google-cloud-storage.d.mts +29 -0
  14. package/dist/adapters/file/google-cloud-storage.d.mts.map +1 -0
  15. package/dist/adapters/file/google-cloud-storage.d.ts +3 -3
  16. package/dist/adapters/storage/postgres.d.mts +299 -0
  17. package/dist/adapters/storage/postgres.d.mts.map +1 -0
  18. package/dist/adapters/storage/postgres.d.ts +5 -1
  19. package/dist/adapters/storage/postgres.d.ts.map +1 -1
  20. package/dist/adapters/storage/postgres.js +182 -74
  21. package/dist/adapters/storage/postgres.js.map +1 -1
  22. package/dist/adapters/storage/postgres.mjs +182 -74
  23. package/dist/adapters/storage/postgres.mjs.map +1 -1
  24. package/dist/client.d.mts +16 -0
  25. package/dist/client.d.mts.map +1 -0
  26. package/dist/client.d.ts +3 -3
  27. package/dist/client.js +1 -1
  28. package/dist/client.mjs +2 -2
  29. package/dist/index.d.mts +151 -0
  30. package/dist/index.d.mts.map +1 -0
  31. package/dist/index.d.ts +3 -3
  32. package/dist/index.js +50 -7
  33. package/dist/index.js.map +1 -1
  34. package/dist/index.mjs +51 -8
  35. package/dist/index.mjs.map +1 -1
  36. package/dist/internals/{base-CW4QHAo3.d.ts → base-Bhrj5Pq1.d.ts} +2 -2
  37. package/dist/internals/{base-CW4QHAo3.d.ts.map → base-Bhrj5Pq1.d.ts.map} +1 -1
  38. package/dist/internals/base-CiZHXD0o.d.mts +27 -0
  39. package/dist/internals/base-CiZHXD0o.d.mts.map +1 -0
  40. package/dist/internals/chunk-Cp2QN7ug.d.mts +17 -0
  41. package/dist/internals/chunk-Cp2QN7ug.d.mts.map +1 -0
  42. package/dist/internals/{chunk-DPgxK2_o.d.ts → chunk-o7lWIP-f.d.ts} +3 -3
  43. package/dist/internals/{chunk-DPgxK2_o.d.ts.map → chunk-o7lWIP-f.d.ts.map} +1 -1
  44. package/dist/internals/{index-vOFh8pVc.js → index-B0TO6h9r.js} +8 -1
  45. package/dist/internals/index-B0TO6h9r.js.map +1 -0
  46. package/dist/internals/{index-CywcwPk-.d.ts → index-B710pfTH.d.ts} +2 -2
  47. package/dist/internals/{index-CywcwPk-.d.ts.map → index-B710pfTH.d.ts.map} +1 -1
  48. package/dist/internals/{index-BWZIV3_T.mjs → index-DG2-4tQ1.mjs} +8 -1
  49. package/dist/internals/index-DG2-4tQ1.mjs.map +1 -0
  50. package/dist/internals/index-DwjvuRyl.d.mts +92 -0
  51. package/dist/internals/index-DwjvuRyl.d.mts.map +1 -0
  52. package/dist/internals/index-OwgXw07h.d.mts +2107 -0
  53. package/dist/internals/index-OwgXw07h.d.mts.map +1 -0
  54. package/dist/internals/{index-h4KGKuhq.d.ts → index-OwgXw07h.d.ts} +45 -3
  55. package/dist/internals/index-OwgXw07h.d.ts.map +1 -0
  56. package/dist/internals/{validator-Bc1jRJfA.js → validator-CFlx3oyq.js} +33 -1
  57. package/dist/internals/validator-CFlx3oyq.js.map +1 -0
  58. package/dist/internals/{validator-Boj1PUjM.mjs → validator-DubDY921.mjs} +32 -2
  59. package/dist/internals/validator-DubDY921.mjs.map +1 -0
  60. package/package.json +7 -19
  61. package/dist/internals/index-BWZIV3_T.mjs.map +0 -1
  62. package/dist/internals/index-h4KGKuhq.d.ts.map +0 -1
  63. package/dist/internals/index-vOFh8pVc.js.map +0 -1
  64. package/dist/internals/validator-Bc1jRJfA.js.map +0 -1
  65. package/dist/internals/validator-Boj1PUjM.mjs.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"postgres.d.mts","sources":["../../../src/adapters/storage/postgres/driver/index.ts","../../../src/adapters/storage/sql/dialect.ts","../../../src/adapters/storage/sql/sql.ts","../../../src/adapters/storage/sql/compiler.ts","../../../src/adapters/storage/sql/storage.ts","../../../src/adapters/storage/postgres/client/base.ts","../../../src/adapters/storage/postgres/client/pool.ts"],"sourcesContent":["//\n// index.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { Pool, PoolConfig, PoolClient, types } from 'pg';\nimport QueryStream from 'pg-query-stream';\nimport { asyncStream, Awaitable, IteratorPool } from '@o2ter/utils-js';\nimport Decimal from 'decimal.js';\nimport { _decodeValue, _encodeValue } from '../../../../internals/object';\nimport { TValueWithoutObject } from '../../../../internals/types';\nimport { quote } from '../dialect/basic';\nimport { PROTO_EVENT } from '../../../../internals/const';\n\nconst typeParser = (oid: number, format?: any) => {\n format = format ?? 'text';\n if (format === 'text') {\n switch (oid) {\n case types.builtins.MONEY:\n return (value: string) => new Decimal(value);\n }\n }\n return types.getTypeParser(oid, format);\n};\n\nexport class PostgresClientDriver {\n\n db: Pool | PoolClient;\n\n constructor(db: Pool | PoolClient) {\n this.db = db;\n }\n\n query(text: string, values: any[] = [], batchSize?: number) {\n const db = this.db;\n return asyncStream(async function* () {\n const client = db instanceof Pool ? await db.connect() : db;\n const stream = new QueryStream(text, values, { batchSize });\n yield* IteratorPool(Number.MAX_SAFE_INTEGER, async function* () {\n client.query(stream);\n try {\n yield* stream;\n } finally {\n stream.destroy();\n if (db instanceof Pool) client.release();\n }\n });\n });\n }\n\n async version() {\n const [{ version }] = await this.query('SELECT version()');\n return version as string;\n }\n\n async databases() {\n return _.compact(_.map(\n await this.query('SELECT datname FROM pg_catalog.pg_database'),\n x => x.datname as string\n ));\n }\n\n async tables() {\n return _.compact(_.map(\n await this.query(`\n SELECT tablename FROM pg_catalog.pg_tables\n WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'\n `),\n x => x.tablename as string\n ));\n }\n\n async views() {\n return _.compact(_.map(\n await this.query(`\n SELECT viewname FROM pg_catalog.pg_views\n WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'\n `),\n x => x.viewname as string\n ));\n }\n\n async materializedViews() {\n return _.compact(_.map(\n await this.query(`\n SELECT matviewname FROM pg_catalog.pg_matviews\n WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'\n `),\n x => x.matviewname as string\n ));\n }\n\n async columns(table: string, namespace?: string) {\n const columns = await this.query(`\n SELECT\n a.attname AS column_name,\n format_type(a.atttypid, a.atttypmod) AS data_type,\n a.attnum ,\n a.attnotnull\n FROM\n pg_namespace n,\n pg_class t,\n pg_attribute a\n WHERE\n a.attnum > 0\n AND n.oid = t.relnamespace\n AND a.attrelid = t.oid\n AND NOT a.attisdropped\n AND t.relname = '${table}'\n ${namespace ? `AND n.nspname = '${namespace}'` : ''}\n `);\n return _.map(columns, ({ column_name, data_type, attnotnull }) => ({\n name: column_name as string,\n type: data_type as string,\n required: !!attnotnull,\n }));\n }\n\n async indices(table: string, namespace?: string) {\n const indices = await this.query(`\n SELECT\n n.nspname AS schema_name,\n t.relname AS table_name,\n i.relname AS index_name,\n ix.indisprimary AS is_primary,\n ix.indisunique AS is_unique,\n a.attname AS column_name,\n k.indseq AS seq\n FROM\n pg_namespace n,\n pg_class t,\n pg_class i,\n pg_index ix,\n UNNEST(ix.indkey) WITH ORDINALITY k(attnum, indseq),\n pg_attribute a\n WHERE\n t.oid = ix.indrelid\n AND n.oid = t.relnamespace\n AND i.oid = ix.indexrelid\n AND a.attrelid = t.oid\n AND a.attnum = k.attnum\n AND t.relkind = 'r'\n AND t.relname = '${table}'\n ${namespace ? `AND n.nspname = '${namespace}'` : ''}\n `);\n return _.mapValues(_.groupBy(indices, 'index_name'), v => ({\n keys: _.map(_.sortBy(v, ({ seq }) => parseInt(seq)), 'column_name'),\n ..._.pick(_.first(v), [\n 'schema_name',\n 'table_name',\n 'index_name',\n 'is_primary',\n 'is_unique',\n ])\n }));\n }\n\n async withClient<T>(callback: (client: PostgresClientDriver) => PromiseLike<T>) {\n const client = this.db instanceof Pool ? await this.db.connect() : this.db;\n try {\n return await callback(new PostgresClientDriver(client));\n } finally {\n if (this.db instanceof Pool) client.release();\n }\n }\n\n async publish(channel: string, payload: TValueWithoutObject) {\n await this.withClient(async (db) => {\n await db.query(`NOTIFY ${PROTO_EVENT}, ${quote(JSON.stringify(_encodeValue({ channel, payload })))}`);\n });\n }\n}\n\nclass PostgresPubSub {\n\n client: Awaitable<PoolClient>;\n subscribers: ((payload: TValueWithoutObject) => void)[] = [];\n\n constructor(client: Awaitable<PoolClient>) {\n this.client = client;\n (async () => {\n try {\n client = await client;\n client.on('notification', ({ channel, payload }) => {\n if (_.toUpper(channel) !== PROTO_EVENT || !payload) return;\n try {\n const _payload = _decodeValue(JSON.parse(payload));\n for (const subscriber of this.subscribers) {\n subscriber(_payload);\n }\n } catch (e) {\n console.error(`Unknown payload: ${e}`);\n }\n });\n await client.query(`LISTEN ${PROTO_EVENT}`);\n } catch (e) {\n console.error(e);\n }\n })();\n }\n\n async shutdown() {\n (await this.client).release();\n }\n\n subscribe(callback: (payload: TValueWithoutObject) => void) {\n this.subscribers.push(callback);\n return () => {\n this.subscribers = this.subscribers.filter(x => x !== callback);\n };\n }\n\n isEmpty() {\n return _.isEmpty(this.subscribers);\n }\n}\n\nexport class PostgresDriver extends PostgresClientDriver {\n\n database: Pool;\n\n private pubsub?: PostgresPubSub;\n\n constructor(config: string | PoolConfig) {\n if (_.isEmpty(config)) throw Error('Invalid postgre config.');\n const _types = { getTypeParser: typeParser as typeof types.getTypeParser };\n const database = new Pool(_.isString(config) ? { connectionString: config, types: _types } : { ...config, types: _types });\n super(database);\n this.database = database;\n }\n\n async shutdown() {\n await this._release_pubsub();\n await this.database.end();\n }\n\n private _init_pubsub() {\n if (this.pubsub || this.database.ending || this.database.ended) return;\n this.pubsub = new PostgresPubSub(this.database.connect());\n }\n\n private async _release_pubsub() {\n const pubsub = this.pubsub;\n this.pubsub = undefined;\n await pubsub?.shutdown();\n }\n\n subscribe(channel: string, callback: (payload: TValueWithoutObject) => void) {\n this._init_pubsub();\n if (!this.pubsub) return () => void 0;\n const release = this.pubsub.subscribe(({ channel: _channel, payload }: any) => {\n if (_channel === channel) callback(payload);\n });\n return () => {\n release();\n if (this.pubsub?.isEmpty()) this._release_pubsub();\n };\n }\n}\n","//\n// dialect.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport { FieldSelectorExpression } from '../../../server/query/dispatcher/parser';\nimport { QueryExpression } from '../../../server/query/dispatcher/parser/expressions';\nimport { QueryAccumulator } from '../../../server/query/dispatcher/parser/accumulators';\nimport { TSchema } from '../../../internals/schema';\nimport { Populate, QueryCompiler, QueryContext } from './compiler';\nimport { SQL } from './sql';\nimport { TValue, TValueWithUndefined } from '../../../internals/types';\nimport { TUpdateOp } from '../../../internals/object/types';\nimport { RelationOptions } from '../../../server/storage';\n\nexport interface SqlDialect {\n quote(str: string): string;\n identifier(name: string): string;\n placeholder(idx: number): string;\n boolean(value: boolean): string;\n encodeType(colname: string, type: TSchema.DataType, value: TValueWithUndefined): SQL;\n decodeType(type: TSchema.Primitive | 'vector', value: any): TValue;\n updateOperation(paths: string[], dataType: TSchema.DataType, operation: TUpdateOp): SQL;\n\n selectPopulate(\n compiler: QueryCompiler,\n parent: QueryContext & { className: string; },\n populate: Populate,\n field: string,\n ): { columns: SQL[], join?: SQL }\n\n encodeFieldExpression(\n compiler: QueryCompiler,\n parent: QueryContext,\n field: string,\n expr: FieldSelectorExpression,\n ): SQL\n\n encodeSortExpression(\n compiler: QueryCompiler,\n parent: QueryContext,\n expr: QueryExpression,\n ): SQL | undefined\n\n encodeBooleanExpression(\n compiler: QueryCompiler,\n parent: QueryContext,\n expr: QueryExpression,\n ): SQL | undefined\n\n encodePopulate(\n compiler: QueryCompiler,\n parent: Populate,\n remix?: QueryContext & { className: string; }\n ): Record<string, SQL>\n\n encodeRelation(\n compiler: QueryCompiler,\n parent: QueryContext & { className: string; },\n relatedBy: NonNullable<RelationOptions['relatedBy']>\n ): SQL\n\n encodeSortKey(\n compiler: QueryCompiler,\n parent: QueryContext,\n key: string\n ): SQL\n\n encodeAccumulatorColumn(\n compiler: QueryCompiler,\n context: QueryContext,\n expr: QueryAccumulator,\n fetchName: string\n ): SQL\n\n random(weight?: SQL): SQL\n}\n","//\n// sql.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { SqlDialect } from './dialect';\nimport { TValueWithUndefined } from '../../../internals/types';\nimport Decimal from 'decimal.js';\n\ntype SQLLiteral = SQL | SQL[] | { literal: string | SQL[], separator?: string };\ntype SQLIdentifier = { identifier: string };\ntype SQLEscapeString = { quote: string };\ntype SQLValue = { value: TValueWithUndefined } | SQLIdentifier | SQLLiteral | SQLEscapeString;\n\nconst isSQLArray = (v: any): v is SQL[] => _.isArray(v) && _.every(v, x => x instanceof SQL);\n\nexport class SQL {\n\n strings: TemplateStringsArray;\n values: SQLValue[];\n\n constructor(templates: TemplateStringsArray, values: SQLValue[]) {\n this.strings = templates;\n this.values = values;\n }\n\n private _compile(dialect: SqlDialect, nextIdx: () => number) {\n let [query, ...strings] = this.strings;\n const values: any[] = [];\n for (const [value, str] of _.zip(this.values, strings)) {\n if (_.isNil(value)) break;\n if (value instanceof SQL) {\n const { query: _query, values: _values } = value._compile(dialect, nextIdx);\n query += _query;\n values.push(..._values);\n } else if (isSQLArray(value)) {\n const queries: string[] = [];\n for (const subquery of value) {\n const { query: _query, values: _values } = subquery._compile(dialect, nextIdx);\n queries.push(_query);\n values.push(..._values);\n }\n query += queries.join(', ');\n } else if ('quote' in value) {\n query += dialect.quote(value.quote);\n } else if ('identifier' in value) {\n query += dialect.identifier(value.identifier);\n } else if ('literal' in value) {\n if (_.isString(value.literal)) {\n query += value.literal;\n } else {\n const queries: string[] = [];\n for (const subquery of value.literal) {\n const { query: _query, values: _values } = subquery._compile(dialect, nextIdx);\n queries.push(_query);\n values.push(..._values);\n }\n query += queries.join(value.separator ?? ', ');\n }\n } else if (_.isBoolean(value.value)) {\n query += dialect.boolean(value.value);\n } else if (_.isString(value.value)) {\n query += `${dialect.placeholder(nextIdx())}::TEXT`;\n values.push(value.value);\n } else if (_.isSafeInteger(value.value)) {\n query += `${value.value}`;\n } else if (_.isNumber(value.value)) {\n query += `${dialect.placeholder(nextIdx())}::DOUBLE PRECISION`;\n values.push(value.value);\n } else if (value.value instanceof Decimal) {\n query += `${dialect.placeholder(nextIdx())}::DECIMAL`;\n values.push(value.value.toString());\n } else {\n query += dialect.placeholder(nextIdx());\n values.push(value.value);\n }\n query += str;\n }\n return { query: _.compact(query.split('\\n').filter(x => !x.match(/^\\s+$/g))).join('\\n'), values };\n }\n\n compile(dialect: SqlDialect) {\n let idx = 1;\n return this._compile(dialect, () => idx++);\n }\n}\n\nexport const sql = (templates: TemplateStringsArray, ...values: SQLValue[]) => new SQL(templates, values);\n","//\n// compiler.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { TSchema, isPointer, isPrimitive, isRelation, isShape, shapePaths } from '../../../internals/schema';\nimport { QueryCoditionalSelector, QueryExpressionSelector, QueryFieldSelector, QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { DecodedBaseQuery, DecodedQuery, FindOptions, InsertOptions, DecodedSortOption, RelationOptions } from '../../../server/storage';\nimport { SQL, sql } from './sql';\nimport { generateId } from '../../../server/crypto/random';\nimport { SqlDialect } from './dialect';\nimport { resolveColumn, resolveDataType } from '../../../server/query/dispatcher/validator';\nimport { decodeUpdateOp } from '../../../internals/object';\nimport { TUpdateOp } from '../../../internals/object/types';\nimport { TValueWithUndefined } from '../../../internals/types';\nimport { QueryAccumulator } from '../../../server/query/dispatcher/parser/accumulators';\n\nexport type QueryCompilerOptions = {\n className: string;\n filter?: QuerySelector;\n sort?: Record<string, 1 | -1> | DecodedSortOption[];\n includes: string[];\n matches: Record<string, DecodedBaseQuery>;\n}\n\nexport type QueryContext = {\n name: string;\n className?: string;\n includes?: Record<string, TSchema.DataType>;\n populates?: Record<string, Populate>;\n groupMatches?: Record<string, Record<string, QueryAccumulator>>;\n}\n\nexport type Populate = Required<QueryContext> & {\n colname: string;\n type: 'pointer' | 'relation';\n foreignField?: string;\n subpaths: string[];\n filter?: QuerySelector;\n sort?: Record<string, 1 | -1> | DecodedSortOption[];\n skip?: number;\n limit?: number;\n}\n\nconst _resolveSortingName = (\n key: string,\n includes: Record<string, TSchema.DataType>,\n populates: Record<string, Populate>,\n) => {\n let resolved: string | undefined;\n let resolvedField = false;\n for (const colname of _.toPath(key)) {\n const name = resolved ? `${resolved}.${colname}` : colname;\n if (resolvedField || includes[name]) {\n resolved = name;\n resolvedField = true;\n } else if (populates[name]) {\n resolved = populates[name].name;\n includes = populates[name].includes;\n populates = populates[name].populates;\n } else if (_.some(_.keys(includes), x => _.startsWith(x, `${name}.`))) {\n resolved = name;\n } else {\n throw Error(`Invalid path: ${key}`);\n }\n }\n return resolved;\n}\n\nconst _encodeSorting = (\n includes: Record<string, TSchema.DataType>,\n populates: Record<string, Populate>,\n sort?: Record<string, 1 | -1> | DecodedSortOption[],\n) => {\n if (_.isArray(sort)) {\n return _.map(sort, x => ({\n order: x.order,\n expr: x.expr.mapKey(key => {\n const resolved = _resolveSortingName(key, includes, populates);\n if (!resolved) throw Error(`Invalid path: ${key}`);\n return resolved;\n }),\n }));\n }\n const sorting: Record<string, 1 | -1> = {};\n for (const [key, order] of _.toPairs(sort)) {\n const resolved = _resolveSortingName(key, includes, populates);\n if (!resolved) throw Error(`Invalid path: ${key}`);\n sorting[resolved] = order;\n }\n return sorting;\n}\n\nconst _defaultInsertOpts = (options: InsertOptions) => {\n const id = generateId(options.objectIdSize);\n return {\n _id: sql`${{ value: id }}`,\n ...options.className === 'User' ? {\n _rperm: sql`${{ value: [id] }}`,\n _wperm: sql`${{ value: [id] }}`,\n } : {},\n };\n}\n\ntype _SelectOptions = {\n select?: SQL,\n sort?: SQL,\n extraFilter?: SQL,\n};\n\nexport class QueryCompiler {\n\n schema: Record<string, TSchema>;\n dialect: SqlDialect;\n selectLock: boolean;\n isUpdate: boolean;\n\n extraFilter?: (className: string) => QuerySelector;\n\n idx = 0;\n\n constructor(options: {\n schema: Record<string, TSchema>;\n dialect: SqlDialect;\n selectLock: boolean;\n isUpdate: boolean;\n extraFilter?: (className: string) => QuerySelector;\n }) {\n this.schema = options.schema;\n this.dialect = options.dialect;\n this.selectLock = options.selectLock;\n this.isUpdate = options.isUpdate;\n this.extraFilter = options.extraFilter;\n }\n\n nextIdx() {\n return this.idx++;\n }\n\n private _encodeIncludes(query: {\n className: string;\n includes: string[];\n matches: Record<string, DecodedBaseQuery>;\n groupMatches: Record<string, Record<string, QueryAccumulator>>;\n }) {\n\n const names: Record<string, TSchema.DataType> = {};\n const populates: Record<string, Populate> = {};\n const groupMatches: Record<string, Record<string, QueryAccumulator>> = {};\n\n for (const include of query.includes) {\n const { paths: [colname, ...subpath], dataType } = resolveColumn(this.schema, query.className, include);\n\n names[colname] = dataType;\n\n if (isRelation(dataType) && !_.isNil(query.groupMatches[colname])) groupMatches[colname] = query.groupMatches[colname];\n\n if (isPointer(dataType) || isRelation(dataType)) {\n if (_.isEmpty(subpath)) throw Error(`Invalid path: ${include}`);\n const _matches = query.matches[colname] ?? {};\n populates[colname] = populates[colname] ?? {\n name: `t${this.nextIdx()}`,\n className: dataType.target,\n subpaths: [],\n filter: _matches.filter,\n skip: _matches.skip,\n limit: _matches.limit,\n type: dataType.type,\n colname,\n };\n if (isRelation(dataType) && dataType.foreignField) {\n const targetType = resolveDataType(this.schema, dataType.target, dataType.foreignField);\n if (_.isNil(targetType)) throw Error(`Invalid path: ${include}`);\n if (!isPointer(targetType) && !isRelation(targetType)) throw Error(`Invalid path: ${include}`);\n populates[colname].foreignField = dataType.foreignField;\n }\n populates[colname].subpaths.push(subpath.join('.'));\n } else if (!_.isEmpty(subpath)) {\n throw Error(`Invalid path: ${include}`);\n }\n }\n\n for (const [colname, populate] of _.toPairs(populates)) {\n const _matches = query.matches[colname] ?? {};\n const { includes, populates, groupMatches } = this._encodeIncludes({\n className: populate.className,\n includes: populate.subpaths,\n matches: _matches.matches,\n groupMatches: {\n ..._.mapKeys(_.pickBy(query.groupMatches, (x, k) => _.startsWith(k, `${colname}.`)), (x, k) => k.slice(colname.length + 1)),\n ..._matches.groupMatches ?? {},\n },\n });\n populate.sort = _encodeSorting(includes, populates, _matches.sort);\n populate.includes = includes;\n populate.populates = populates;\n populate.groupMatches = groupMatches;\n }\n\n return {\n className: query.className,\n includes: names,\n populates,\n groupMatches,\n };\n }\n\n private _baseSelectQuery(\n query: DecodedQuery<FindOptions & RelationOptions>,\n options?: _SelectOptions | ((x: { fetchName: string; }) => _SelectOptions),\n ) {\n\n const fetchName = `_fetch_$${query.className.toLowerCase()}`;\n const context = { ...this._encodeIncludes(query), name: fetchName };\n\n const _stages = _.mapValues(context.populates, (populate) => this.dialect.encodePopulate(this, populate));\n const stages = _.fromPairs(_.flatMap(_.values(_stages), (p) => _.toPairs(p)));\n\n const baseFilter = this._encodeFilter(context, query.filter);\n const populates = this._selectPopulateMap(context);\n const joins = _.compact(_.map(populates, ({ join }) => join));\n\n const includes = {\n literal: [\n ...this._selectIncludes(fetchName, context.includes),\n ..._.flatMap(populates, ({ columns }) => columns),\n ],\n separator: ',\\n',\n };\n\n const _options = _.isFunction(options) ? options({ fetchName }) : options;\n const filter = _.compact([\n baseFilter,\n _options?.extraFilter,\n query.relatedBy && this.dialect.encodeRelation(this, context, query.relatedBy),\n ]);\n\n return {\n stages,\n fetchName,\n context,\n query: sql`\n SELECT ${_options?.select ? _options?.select : sql`*`} FROM (\n SELECT ${includes}\n FROM ${{ identifier: query.className }} AS ${{ identifier: fetchName }}\n ${!_.isEmpty(joins) ? { literal: joins, separator: '\\n' } : sql``}\n ${this.selectLock ? this.isUpdate ? sql`FOR UPDATE NOWAIT` : sql`FOR SHARE NOWAIT` : sql``}\n ) AS ${{ identifier: fetchName }}\n ${!_.isEmpty(filter) ? sql`WHERE ${{ literal: _.map(filter, x => sql`(${x})`), separator: ' AND ' }}` : sql``}\n ${_options?.sort ? _options?.sort : sql``}\n ${!_.isEmpty(query.sort) ? sql`ORDER BY ${this._encodeSort(query.sort, {\n name: fetchName,\n className: query.className,\n groupMatches: query.groupMatches,\n })}` : sql``}\n ${query.limit ? sql`LIMIT ${{ literal: `${query.limit}` }}` : sql``}\n ${query.skip ? sql`OFFSET ${{ literal: `${query.skip}` }}` : sql``}\n `,\n };\n }\n\n private _refetch(\n name: string,\n query: DecodedQuery<FindOptions>,\n ) {\n\n const _context = { ...this._encodeIncludes(query), name };\n const populates = _.mapValues(\n _context.populates, (populate) => this.dialect.encodePopulate(this, populate, { className: query.className, name })\n );\n const stages = _.fromPairs(_.flatMap(_.values(populates), (p) => _.toPairs(p)));\n\n const _populates = this._selectPopulateMap(_context);\n const _joins = _.compact(_.map(_populates, ({ join }) => join));\n\n const _includes = {\n literal: [\n ...this._selectIncludes(name, _context.includes),\n ..._.flatMap(_populates, ({ columns }) => columns),\n ], separator: ',\\n'\n };\n\n return sql`\n ${!_.isEmpty(stages) ? sql`, ${_.map(stages, (q, n) => sql`${{ identifier: n }} AS (${q})`)}` : sql``}\n SELECT ${_includes}\n FROM ${{ identifier: name }}\n ${!_.isEmpty(_joins) ? { literal: _joins, separator: '\\n' } : sql``}\n `;\n }\n\n _selectQuery(\n query: DecodedQuery<FindOptions & RelationOptions>,\n options?: _SelectOptions | ((x: { fetchName: string; }) => _SelectOptions),\n ) {\n const { stages, query: _query } = this._baseSelectQuery(query, options);\n return sql`\n ${!_.isEmpty(stages) ? sql`WITH ${_.map(stages, (q, n) => sql`${{ identifier: n }} AS (${q})`)}` : sql``}\n ${_query}\n `;\n }\n\n private _modifyQuery(\n query: DecodedQuery<FindOptions>,\n action: (fetchName: string, context: QueryContext & { className: string; }) => SQL\n ) {\n const { stages, fetchName, query: _query, context } = this._baseSelectQuery(query);\n stages[fetchName] = _query;\n return sql`\n ${!_.isEmpty(stages) ? sql`WITH ${_.map(stages, (q, n) => sql`${{ identifier: n }} AS (${q})`)}` : sql``}\n ${action(fetchName, context)}\n `;\n }\n\n private _encodeUpdateAttrs(className: string, attrs: Record<string, TUpdateOp>): SQL[] {\n const updates: SQL[] = [\n sql`__v = __v + 1`,\n sql`_updated_at = NOW()`,\n ];\n for (const [path, op] of _.toPairs(attrs)) {\n const { paths: [column, ...subpath], dataType } = resolveColumn(this.schema, className, path);\n if (isShape(dataType)) {\n const [_op, value] = decodeUpdateOp(op);\n if (_op !== '$set') throw Error('Invalid update operation');\n for (const { path, type } of shapePaths(dataType)) {\n if (!isRelation(type) || _.isNil(type.foreignField)) {\n updates.push(sql`${{ identifier: `${column}.${path}` }} = ${this.dialect.updateOperation(\n [`${column}.${path}`], type, { $set: _.get(value, path) ?? null }\n )}`);\n }\n }\n } else {\n updates.push(sql`${{ identifier: column }} = ${this.dialect.updateOperation(\n [column, ...subpath], dataType, op\n )}`);\n }\n }\n return updates;\n }\n\n private _encodeObjectAttrs(className: string, attrs: Record<string, TValueWithUndefined>): Record<string, SQL> {\n const result: Record<string, SQL> = {};\n for (const [key, value] of _.toPairs(attrs)) {\n const { paths: [column, ...subpath], dataType } = resolveColumn(this.schema, className, key);\n if (!_.isEmpty(subpath)) throw Error(`Invalid insert key: ${key}`);\n if (isShape(dataType)) {\n for (const { path, type } of shapePaths(dataType)) {\n if (!isRelation(type) || _.isNil(type.foreignField)) {\n result[`${column}.${path}`] = this.dialect.encodeType(`${column}.${path}`, type, _.get(value, path) ?? null);\n }\n }\n } else {\n result[column] = this.dialect.encodeType(column, dataType, value);\n }\n }\n return result;\n }\n\n private _encodeCoditionalSelector(\n parent: QueryContext,\n filter: QueryCoditionalSelector,\n ) {\n const queries = _.compact(_.map(filter.exprs, x => this._encodeFilter(parent, x)));\n if (_.isEmpty(queries)) return;\n switch (filter.type) {\n case '$and': return sql`(${{ literal: _.map(queries, x => sql`(${x})`), separator: ' AND ' }})`;\n case '$nor': return sql`(${{ literal: _.map(queries, x => sql`NOT (${x})`), separator: ' AND ' }})`;\n case '$or': return sql`(${{ literal: _.map(queries, x => sql`(${x})`), separator: ' OR ' }})`;\n }\n }\n\n _encodeFilter(\n parent: QueryContext,\n filter: QuerySelector,\n ): SQL | undefined {\n if (filter instanceof QueryCoditionalSelector) {\n return this._encodeCoditionalSelector(parent, filter);\n }\n if (filter instanceof QueryFieldSelector) {\n return this.dialect.encodeFieldExpression(this, parent, filter.field, filter.expr);\n }\n if (filter instanceof QueryExpressionSelector) {\n return this.dialect.encodeBooleanExpression(this, parent, filter.expr);\n }\n }\n\n _selectIncludes(\n className: string,\n includes: Record<string, TSchema.DataType>,\n ): SQL[] {\n const _includes = _.pickBy(includes, v => _.isString(v) || (v.type !== 'pointer' && v.type !== 'relation'));\n return _.flatMap(_includes, (dataType, colname) => {\n if (!_.isString(dataType) && isPrimitive(dataType) && !_.isNil(dataType.default)) {\n return sql`COALESCE(${{ identifier: className }}.${{ identifier: colname }}, ${{ value: dataType.default }}) AS ${{ identifier: colname }}`;\n }\n return sql`${{ identifier: className }}.${{ identifier: colname }}`;\n });\n }\n\n _encodeSort(\n sort: Record<string, 1 | -1> | DecodedSortOption[],\n parent: QueryContext,\n ): SQL {\n if (_.isArray(sort)) {\n return sql`${_.map(sort, ({ expr, order }) => {\n const _expr = this.dialect.encodeSortExpression(this, parent, expr);\n if (!_expr) throw Error('Invalid expression');\n return sql`${_expr} ${{ literal: order === 1 ? 'ASC' : 'DESC' }}`;\n })}`;\n }\n return sql`${_.map(sort, (order, key) => sql`\n ${this.dialect.encodeSortKey(this, parent, key)} ${{ literal: order === 1 ? 'ASC' : 'DESC' }}\n `)}`;\n }\n\n private _selectPopulateMap(\n context: QueryContext & { className: string; },\n ) {\n return _.map(context.populates, (populate, field) => this.dialect.selectPopulate(\n this,\n context,\n populate,\n field,\n ));\n }\n\n groupFind(\n query: DecodedQuery<FindOptions & RelationOptions>,\n accumulators: Record<string, QueryAccumulator>\n ) {\n return this._modifyQuery(\n query,\n (fetchName, context) => {\n const columns = _.map(accumulators, (expr, field) => {\n const aggSQL = this.dialect.encodeAccumulatorColumn(\n this,\n { ...context, name: fetchName, className: query.className },\n expr,\n fetchName\n );\n return sql`${aggSQL} AS ${{ identifier: field }}`;\n });\n return sql`SELECT ${{ literal: columns, separator: ', ' }} FROM ${{ identifier: fetchName }}`;\n }\n );\n }\n\n insert(options: InsertOptions, values: Record<string, TValueWithUndefined>[]) {\n\n const _values: Record<string, SQL>[] = _.map(values, attr => ({\n ..._defaultInsertOpts(options),\n ...this._encodeObjectAttrs(options.className, attr),\n }));\n\n const keys = _.uniq(_.flatMap(_values, x => _.keys(x)));\n\n const name = `_insert_$${options.className.toLowerCase()}`;\n\n const context = { ...this._encodeIncludes(options), name };\n const populates = _.mapValues(context.populates, (populate) => this.dialect.encodePopulate(this, populate));\n const stages = _.fromPairs(_.flatMap(_.values(populates), (p) => _.toPairs(p)));\n\n const _populates = this._selectPopulateMap(context);\n const joins = _.compact(_.map(_populates, ({ join }) => join));\n\n return sql`\n WITH ${{ identifier: name }} AS (\n INSERT INTO ${{ identifier: options.className }}\n (${_.map(keys, x => sql`${{ identifier: x }}`)})\n VALUES ${_.map(_values, v => sql`(${_.map(keys, k => sql`${v[k]}`)})`)}\n RETURNING *\n )${!_.isEmpty(stages) ? sql`, ${_.map(stages, (q, n) => sql`${{ identifier: n }} AS (${q})`)}` : sql``}\n SELECT ${{\n literal: [\n ...this._selectIncludes(name, context.includes),\n ..._.flatMap(_populates, ({ columns }) => columns),\n ], separator: ',\\n'\n }}\n FROM ${{ identifier: name }}\n ${!_.isEmpty(joins) ? { literal: joins, separator: '\\n' } : sql``}\n `;\n }\n\n update(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>) {\n return this._modifyQuery(\n query,\n (fetchName) => {\n const name = `_update_$${query.className.toLowerCase()}`;\n return sql`\n , ${{ identifier: name }} AS (\n UPDATE ${{ identifier: query.className }}\n SET ${this._encodeUpdateAttrs(query.className, update)}\n WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})\n RETURNING *\n )\n ${this._refetch(name, query)}\n `;\n }\n );\n }\n\n upsert(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>, setOnInsert: Record<string, TValueWithUndefined>) {\n\n const _insert: [string, SQL][] = _.toPairs({\n ..._defaultInsertOpts(query),\n ...this._encodeObjectAttrs(query.className, setOnInsert),\n });\n\n return this._modifyQuery(\n query,\n (fetchName) => {\n const updateName = `_update_$${query.className.toLowerCase()}`;\n const insertName = `_insert_$${query.className.toLowerCase()}`;\n const upsertName = `_upsert_$${query.className.toLowerCase()}`;\n return sql`\n , ${{ identifier: updateName }} AS (\n UPDATE ${{ identifier: query.className }}\n SET ${this._encodeUpdateAttrs(query.className, update)}\n WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})\n RETURNING *\n )\n , ${{ identifier: insertName }} AS (\n INSERT INTO ${{ identifier: query.className }}\n (${_.map(_insert, x => sql`${{ identifier: x[0] }}`)})\n SELECT ${_.map(_insert, x => sql`${x[1]} AS ${{ identifier: x[0] }}`)}\n WHERE NOT EXISTS(SELECT * FROM ${{ identifier: updateName }})\n RETURNING *\n )\n , ${{ identifier: upsertName }} AS (\n SELECT * FROM ${{ identifier: updateName }}\n UNION\n SELECT * FROM ${{ identifier: insertName }}\n )\n ${this._refetch(upsertName, query)}\n `;\n }\n );\n }\n\n delete(query: DecodedQuery<FindOptions>) {\n\n return this._modifyQuery(\n query,\n (fetchName, context) => {\n const name = `_delete_$${query.className.toLowerCase()}`;\n const populates = this._selectPopulateMap({ ...context, name });\n const joins = _.compact(_.map(populates, ({ join }) => join));\n return sql`\n , ${{ identifier: name }} AS (\n DELETE FROM ${{ identifier: query.className }}\n WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})\n RETURNING *\n )\n SELECT ${{\n literal: [\n ...this._selectIncludes(name, context.includes ?? {}),\n ..._.flatMap(populates, ({ columns }) => columns),\n ], separator: ',\\n'\n }}\n FROM ${{ identifier: name }}\n ${!_.isEmpty(joins) ? { literal: joins, separator: '\\n' } : sql``}\n `;\n }\n );\n }\n\n}","//\n// storage.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { DecodedQuery, FindOptions, InsertOptions, TStorage, RelationOptions, DecodedBaseQuery, QueryRandomOptions } from '../../../server/storage';\nimport { TransactionOptions } from '../../../internals/proto';\nimport { TSchema, isPointer, isRelation, isShape, shapePaths, isPrimitive } from '../../../internals/schema';\nimport { SQL, sql } from './sql';\nimport { SqlDialect } from './dialect';\nimport { QueryCompiler } from './compiler';\nimport { asyncStream } from '@o2ter/utils-js';\nimport { TValueWithoutObject, TValueWithUndefined } from '../../../internals/types';\nimport { TObject } from '../../../internals/object';\nimport { PVK } from '../../../internals/private';\nimport { TUpdateOp } from '../../../internals/object/types';\nimport { QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { QueryAccumulator } from '../../../server/query/dispatcher/parser/accumulators';\nimport { resolveDataType } from '../../../server/query/dispatcher/validator';\nimport { encodeTypedQueryExpression } from '../postgres/dialect/query/expr';\n\nexport abstract class SqlStorage implements TStorage {\n\n schema: Record<string, TSchema> = {};\n\n async prepare(schema: Record<string, TSchema>) {\n this.schema = schema;\n }\n\n async shutdown() { }\n\n classes() {\n return Object.keys(this.schema);\n }\n\n abstract selectLock(): boolean;\n\n abstract config(acl?: string[]): PromiseLike<Record<string, TValueWithoutObject>>;\n abstract configAcl(): PromiseLike<Record<string, string[]>>;\n abstract setConfig(values: Record<string, TValueWithoutObject>, acl?: string[]): PromiseLike<void>;\n abstract lockTable(className: string | string[], update: boolean): Promise<void>;\n abstract withConnection<T>(callback: (connection: TStorage) => PromiseLike<T>): PromiseLike<T>;\n abstract isDuplicateIdError(error: any): boolean;\n abstract atomic<T>(\n callback: (connection: TStorage) => PromiseLike<T>,\n options?: { lockTable?: string; retry?: boolean; },\n ): PromiseLike<T>;\n abstract withTransaction<T>(\n callback: (connection: TStorage) => PromiseLike<T>,\n options?: TransactionOptions,\n ): PromiseLike<T>;\n\n abstract get dialect(): SqlDialect;\n protected abstract _query(text: string, values: any[]): ReturnType<typeof asyncStream<any>>;\n\n abstract _refs(schema: Record<string, TSchema>, className: string, keys: string[], item: SQL): SQL;\n\n query(sql: SQL) {\n const { query, values } = sql.compile(this.dialect);\n return this._query(query, values);\n }\n\n abstract _explain(compiler: QueryCompiler, query: DecodedQuery<FindOptions & RelationOptions>): PromiseLike<any>\n\n private _decodeMatchTypes(value: any, matchType: Record<string, any>): any {\n if (!_.isPlainObject(value)) return;\n return _.mapValues(value, (v, k) => this.dialect.decodeType(matchType[k], v));\n }\n\n private _decodeShapedObject(dataType: TSchema.ShapeType, value: any, matchesType: Record<string, any>) {\n const result = {};\n for (const { path, type } of shapePaths(dataType)) {\n const matchType = _.get(matchesType, path) ?? {};\n if (_.isString(type)) {\n const _value = this.dialect.decodeType(type, _.get(value, path));\n if (!_.isNil(_value)) _.set(result, path, _value);\n } else if (isPointer(type)) {\n const _value = _.get(value, path);\n if (_.isPlainObject(_value)) {\n const decoded = this._decodeObject(type.target, _value, matchType);\n if (decoded.id) _.set(result, path, decoded);\n }\n } else if (isRelation(type)) {\n const _value = _.get(value, path);\n if (_.isArray(_value)) _.set(result, path, _value.map(x => this._decodeObject(type.target, x, matchType)));\n else if (_.isPlainObject(_value)) _.set(result, path, this._decodeMatchTypes(_value, matchType));\n } else {\n const _value = this.dialect.decodeType(type.type, _.get(value, path)) ?? type.default;\n if (!_.isNil(_value)) _.set(result, path, _value);\n }\n }\n return result;\n }\n\n protected _decodeObject(className: string, attrs: Record<string, any>, matchesType: Record<string, any>): TObject {\n const fields = this.schema[className].fields;\n const obj = new TObject(className);\n const _attrs: Record<string, any> = {};\n for (const [key, value] of _.toPairs(attrs)) {\n _.set(_attrs, key, value);\n }\n for (const [key, value] of _.toPairs(_attrs)) {\n const matchType = matchesType[key] ?? {};\n const dataType = fields[key];\n if (!dataType) continue;\n if (_.isString(dataType)) {\n obj[PVK].attributes[key] = this.dialect.decodeType(dataType, value);\n } else if (isShape(dataType)) {\n obj[PVK].attributes[key] = this._decodeShapedObject(dataType, value, matchType);\n } else if (isPointer(dataType)) {\n if (_.isPlainObject(value)) {\n const decoded = this._decodeObject(dataType.target, value, matchType);\n if (decoded.id) obj[PVK].attributes[key] = decoded;\n }\n } else if (isRelation(dataType)) {\n if (_.isArray(value)) obj[PVK].attributes[key] = value.map(x => this._decodeObject(dataType.target, x, matchType));\n else if (_.isPlainObject(value)) obj[PVK].attributes[key] = this._decodeMatchTypes(value, matchType);\n } else {\n obj[PVK].attributes[key] = this.dialect.decodeType(dataType.type, value) ?? dataType.default as any;\n }\n }\n return obj;\n }\n\n private _makeCompiler(\n isUpdate: boolean,\n extraFilter?: (className: string) => QuerySelector,\n ) {\n return new QueryCompiler({\n schema: this.schema,\n dialect: this.dialect,\n selectLock: this.selectLock(),\n isUpdate,\n extraFilter,\n });\n }\n\n async explain(query: DecodedQuery<FindOptions & RelationOptions>) {\n const compiler = this._makeCompiler(false, query.extraFilter);\n return this._explain(compiler, query);\n }\n\n async count(query: DecodedQuery<FindOptions & RelationOptions>) {\n const compiler = this._makeCompiler(false, query.extraFilter);\n const [{ count: _count }] = await this.query(compiler._selectQuery(query, {\n select: sql`COUNT(*) AS count`,\n }));\n const count = parseInt(_count);\n return _.isFinite(count) ? count : 0;\n }\n\n private _matchesType(compiler: QueryCompiler, options: {\n className: string;\n matches: Record<string, DecodedBaseQuery>;\n groupMatches?: Record<string, Record<string, QueryAccumulator>>;\n }): Record<string, any> {\n const types: Record<string, any> = {};\n for (const [key, match] of _.entries(options.matches)) {\n const type = resolveDataType(compiler.schema, options.className, key);\n if (_.isNil(type)) continue;\n if (isPointer(type) || isRelation(type)) {\n types[key] = this._matchesType(compiler, { className: type.target, ...match });\n }\n }\n for (const [key, group] of _.entries(options.groupMatches)) {\n for (const [field, expr] of _.entries(group)) {\n _.set(types, `${key}.${field}`, expr.evalType(compiler.schema, options.className));\n }\n }\n return types;\n }\n\n find(query: DecodedQuery<FindOptions & RelationOptions>) {\n const self = this;\n const compiler = self._makeCompiler(false, query.extraFilter);\n const _matchesType = self._matchesType(compiler, query);\n const _query = compiler._selectQuery(query);\n return (async function* () {\n const objects = self.query(_query);\n for await (const object of objects) {\n yield self._decodeObject(query.className, object, _matchesType);\n }\n })();\n }\n\n random(query: DecodedQuery<FindOptions & RelationOptions>, opts?: QueryRandomOptions) {\n const self = this;\n const compiler = self._makeCompiler(false, query.extraFilter);\n const _matchesType = self._matchesType(compiler, query);\n const _query = compiler._selectQuery({ ...query, sort: {} }, ({ fetchName }) => {\n if (!opts?.weight) return { sort: sql`ORDER BY ${self.dialect.random()}` };\n const weight = encodeTypedQueryExpression(compiler, {\n name: fetchName,\n className: query.className,\n groupMatches: query.groupMatches,\n }, opts.weight);\n if (!weight) throw Error('Invalid expression');\n return { sort: sql`ORDER BY ${self.dialect.random(weight.sql)}` };\n });\n return (async function* () {\n const objects = self.query(_query);\n for await (const object of objects) {\n yield self._decodeObject(query.className, object, _matchesType);\n }\n })();\n }\n\n async groupFind(\n query: DecodedQuery<FindOptions & RelationOptions>,\n accumulators: Record<string, QueryAccumulator>\n ) {\n const compiler = this._makeCompiler(true, query.extraFilter);\n const [result] = await this.query(compiler.groupFind(query, accumulators));\n if (!result) return result;\n // Decode accumulator results based on their types\n const decoded: Record<string, any> = {};\n for (const [key, accumulator] of _.toPairs(accumulators)) {\n const evalType = accumulator.evalType(this.schema, query.className);\n if (evalType === 'array') {\n // For $group operators, result is already parsed JSONB array\n decoded[key] = result[key];\n } else if (evalType && isPrimitive(evalType)) {\n // Extract primitive type from PrimitiveType\n const primitiveType = _.isString(evalType) ? evalType : evalType.type;\n decoded[key] = this.dialect.decodeType(primitiveType, result[key]);\n } else {\n decoded[key] = result[key];\n }\n }\n return decoded;\n }\n\n refs(object: TObject, classNames: string[], roles?: string[]) {\n const self = this;\n const query = sql`\n SELECT *\n FROM (${this._refs(\n _.pick(this.schema, classNames), object.className, TObject.defaultKeys,\n sql`${{ value: `${object.className}$${object.id}` }}`,\n )}) AS \"$\"\n ${_.isNil(roles) ? sql`` : sql`WHERE ${{ identifier: '$' }}.${{ identifier: '_rperm' }} && ${{ value: roles }}`}\n `;\n return (async function* () {\n const objects = self.query(query);\n for await (const { _class, ...object } of objects) {\n yield self._decodeObject(_class, object, {});\n }\n })();\n }\n\n nonrefs(query: DecodedQuery<FindOptions>) {\n const self = this;\n const compiler = self._makeCompiler(false, query.extraFilter);\n const _matchesType = self._matchesType(compiler, query);\n const _query = compiler._selectQuery(query, ({ fetchName }) => ({\n extraFilter: sql`\n NOT EXISTS (${this._refs(\n this.schema, query.className, ['_id'],\n sql`(${{ quote: query.className + '$' }} || ${{ identifier: fetchName }}.${{ identifier: '_id' }})`,\n )})\n `\n }));\n return (async function* () {\n const objects = self.query(_query);\n for await (const object of objects) {\n yield self._decodeObject(query.className, object, _matchesType);\n }\n })();\n }\n\n async insert(options: InsertOptions, values: Record<string, TValueWithUndefined>[]) {\n const compiler = this._makeCompiler(true);\n const _matchesType = this._matchesType(compiler, options);\n const result = await this.query(compiler.insert(options, values));\n return _.map(result, x => this._decodeObject(options.className, x, _matchesType));\n }\n\n async update(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>) {\n const compiler = this._makeCompiler(true, query.extraFilter);\n const _matchesType = this._matchesType(compiler, query);\n const updated = await this.query(compiler.update(query, update));\n return _.map(updated, x => this._decodeObject(query.className, x, _matchesType));\n }\n\n async upsert(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>, setOnInsert: Record<string, TValueWithUndefined>) {\n const compiler = this._makeCompiler(true, query.extraFilter);\n const _matchesType = this._matchesType(compiler, query);\n const upserted = await this.query(compiler.upsert(query, update, setOnInsert));\n return _.map(upserted, x => this._decodeObject(query.className, x, _matchesType));\n }\n\n async delete(query: DecodedQuery<FindOptions>) {\n const compiler = this._makeCompiler(true, query.extraFilter);\n const _matchesType = this._matchesType(compiler, query);\n const deleted = await this.query(compiler.delete(query));\n return _.map(deleted, x => this._decodeObject(query.className, x, _matchesType));\n }\n}\n","//\n// base.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { PostgresClientDriver, PostgresDriver } from '../driver';\nimport { SQL, SqlStorage, sql } from '../../sql';\nimport { PostgresDialect } from '../dialect';\nimport { _encodeJsonValue } from '../dialect/encode';\nimport { QueryCompiler } from '../../sql/compiler';\nimport { DecodedQuery, FindOptions } from '../../../../server/storage';\nimport { TransactionOptions } from '../../../../internals/proto';\nimport { _decodeValue, _encodeValue } from '../../../../internals/object';\nimport { TValueWithoutObject } from '../../../../internals/types';\nimport { TPubSub } from '../../../../server/pubsub';\nimport { TSchema, isPointer, isRelation } from '../../../../internals/schema';\n\nexport class PostgresStorageClient<Driver extends PostgresClientDriver> extends SqlStorage implements TPubSub {\n\n protected _driver: Driver;\n\n constructor(driver: Driver) {\n super();\n this._driver = driver;\n }\n\n get dialect() {\n return PostgresDialect;\n }\n\n selectLock() {\n return false;\n }\n\n async config(acl?: string[]) {\n const config: Record<string, TValueWithoutObject> = {};\n const query = _.isNil(acl)\n ? sql`SELECT * FROM ${{ identifier: '_Config' }}`\n : sql`SELECT * FROM ${{ identifier: '_Config' }} WHERE _rperm && ${{ value: acl }}`;\n for await (const record of this.query(query)) {\n config[record._id] = _decodeValue(record.value);\n }\n return config;\n }\n async configAcl() {\n const config: Record<string, string[]> = {};\n const query = sql`SELECT * FROM ${{ identifier: '_Config' }}`;\n for await (const record of this.query(query)) {\n config[record._id] = record._rperm;\n }\n return config;\n }\n async setConfig(values: Record<string, TValueWithoutObject>, acl?: string[]) {\n const _values = _.pickBy(values, v => !_.isNil(v));\n const nilKeys = _.keys(_.pickBy(values, v => _.isNil(v)));\n if (!_.isEmpty(_values)) {\n await this.query(sql`\n INSERT INTO ${{ identifier: '_Config' }}\n ${_.isNil(acl) ? sql`(_id, value)` : sql`(_id, _rperm, value)`}\n VALUES\n ${_.map(_values, (v, k) => _.isNil(acl)\n ? sql`(${{ value: k }}, ${_encodeJsonValue(_encodeValue(v))})`\n : sql`(${{ value: k }}, ${{ value: acl }}, ${_encodeJsonValue(_encodeValue(v))})`)}\n ON CONFLICT (_id) \n DO UPDATE SET \n ${_.isNil(acl) ? sql`` : sql`_rperm = EXCLUDED._rperm, `}\n value = EXCLUDED.value\n `);\n }\n if (!_.isEmpty(nilKeys)) {\n await this.query(sql`\n DELETE FROM ${{ identifier: '_Config' }}\n WHERE _id IN (${_.map(nilKeys, k => sql`${{ value: k }}`)})\n `);\n }\n }\n\n _query(text: string, values: any[] = [], batchSize?: number) {\n return this._driver.query(text, values, batchSize);\n }\n\n async _explain(compiler: QueryCompiler, query: DecodedQuery<FindOptions>) {\n const [explain] = await this.query(sql`EXPLAIN (ANALYZE, VERBOSE, BUFFERS, FORMAT JSON) ${compiler._selectQuery(query)}`);\n return explain['QUERY PLAN'];\n }\n\n classes() {\n return Object.keys(this.schema);\n }\n\n async version() {\n return this._driver.version();\n }\n\n async databases() {\n return this._driver.databases();\n }\n\n async tables() {\n return this._driver.tables();\n }\n\n async views() {\n return this._driver.views();\n }\n\n async materializedViews() {\n return this._driver.materializedViews();\n }\n\n async columns(table: string, namespace?: string) {\n return this._driver.columns(table, namespace);\n }\n\n async indices(table: string, namespace?: string) {\n return this._driver.indices(table, namespace);\n }\n\n async lockTable(table: string | string[], update: boolean): Promise<void> {\n await this.query(sql`\n LOCK ${_.map(_.castArray(table), x => sql`${{ identifier: x }}`)} \n IN ${update ? sql`EXCLUSIVE` : sql`SHARE`} MODE \n NOWAIT\n `);\n }\n\n withConnection<T>(\n callback: (connection: PostgresStorageClient<PostgresClientDriver>) => PromiseLike<T>\n ) {\n return callback(this);\n }\n\n isDuplicateIdError(error: any) {\n return error.code === '23505' && error.table && error.constraint === `${error.table}_pkey`;\n }\n\n atomic<T>(\n callback: (connection: PostgresStorageTransaction) => PromiseLike<T>,\n options?: { lockTable?: string; retry?: boolean; },\n ): PromiseLike<T> {\n return this.withTransaction(async conn => {\n if (options?.lockTable) await conn.lockTable(options.lockTable, true);\n return callback(conn);\n }, { mode: 'repeatable', retry: true });\n }\n\n async withTransaction<T>(\n callback: (connection: PostgresStorageTransaction) => PromiseLike<T>,\n options?: TransactionOptions,\n ): Promise<T> {\n\n const beginMap = {\n 'committed': sql`BEGIN ISOLATION LEVEL READ COMMITTED`,\n 'repeatable': sql`BEGIN ISOLATION LEVEL REPEATABLE READ`,\n 'serializable': sql`BEGIN ISOLATION LEVEL SERIALIZABLE`,\n default: sql`BEGIN`,\n };\n\n const _begin = options && _.isString(options.mode)\n ? beginMap[options.mode as keyof typeof beginMap] ?? beginMap.default\n : beginMap.default;\n\n try {\n\n return await this.withConnection(async (connection) => {\n\n const transaction = new PostgresStorageTransaction(connection._driver, 0, options?.mode === 'repeatable');\n transaction.schema = this.schema;\n\n try {\n\n await transaction.query(_begin);\n const result = await callback(transaction);\n await transaction.query(sql`COMMIT`);\n\n return result\n\n } catch (e: any) {\n await transaction.query(sql`ROLLBACK`);\n throw e;\n }\n });\n\n } catch (e: any) {\n\n if (options?.retry && ['40001', '40P01', '55P03'].includes(e.code)) {\n return this.withTransaction(callback, {\n ...options,\n retry: _.isNumber(options.retry) ? Math.max(0, options.retry - 1) : options.retry,\n });\n }\n\n throw e;\n }\n }\n\n subscribe(channel: string, callback: (payload: TValueWithoutObject) => void) {\n const db = this._driver;\n if (!(db instanceof PostgresDriver)) throw Error('Invalid pubsub instance');\n return db.subscribe(channel, callback);\n }\n\n publish(channel: string, payload: TValueWithoutObject) {\n return this._driver.publish(channel, payload);\n }\n\n _refs(\n schema: Record<string, TSchema>,\n className: string,\n keys: string[],\n item: SQL,\n ) {\n const _schema = _.pickBy(_.mapValues(schema, s => _.pickBy(\n s.fields,\n f => (isPointer(f) || (isRelation(f) && _.isNil(f.foreignField))) && f.target === className\n )) as Record<string, Record<string, TSchema.PointerType | TSchema.RelationType>>, s => !_.isEmpty(s));\n return sql`${{\n literal: _.map(_schema, (fields, className) => sql`\n SELECT\n ${{ quote: className }} AS ${{ identifier: '_class' }},\n ${_.map(keys, k => sql`${{ identifier: className }}.${{ identifier: k }}`)}\n FROM ${{ identifier: className }}\n WHERE ${{\n literal: _.map(fields, (f, key) => isPointer(f)\n ? sql`${item} = ${{ identifier: className }}.${{ identifier: key }}`\n : sql`${item} = ANY(${{ identifier: className }}.${{ identifier: key }})`),\n separator: ' OR ',\n }}\n `),\n separator: ' UNION '\n }}`;\n }\n}\n\nclass PostgresStorageTransaction extends PostgresStorageClient<PostgresClientDriver> {\n\n counter: number;\n private _selectLock: boolean;\n\n constructor(driver: PostgresClientDriver, counter: number, selectLock: boolean) {\n super(driver);\n this.counter = counter;\n this._selectLock = selectLock;\n }\n\n selectLock() {\n return this._selectLock;\n }\n\n override atomic<T>(\n callback: (connection: PostgresStorageTransaction) => PromiseLike<T>,\n options?: { lockTable?: string; retry?: boolean; },\n ) {\n if (!options?.retry) return callback(this);\n return this.withTransaction(async conn => {\n if (options?.lockTable) await conn.lockTable(options.lockTable, true);\n return callback(conn);\n });\n }\n\n override async withTransaction<T>(\n callback: (connection: PostgresStorageTransaction) => PromiseLike<T>\n ) {\n\n const transaction = new PostgresStorageTransaction(this._driver, this.counter + 1, this._selectLock);\n transaction.schema = this.schema;\n\n try {\n\n await transaction.query(sql`SAVEPOINT ${{ identifier: `savepoint_${this.counter}` }}`);\n const result = await callback(transaction);\n await transaction.query(sql`RELEASE SAVEPOINT ${{ identifier: `savepoint_${this.counter}` }}`);\n\n return result;\n\n } catch (e) {\n await transaction.query(sql`ROLLBACK TO SAVEPOINT ${{ identifier: `savepoint_${this.counter}` }}`);\n throw e;\n }\n }\n}\n","//\n// pool.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2025 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { PoolConfig } from 'pg';\nimport { TSchema, _isTypeof, isPointer, isPrimitive, isRelation, isShape, isVector, shapePaths } from '../../../../internals/schema';\nimport { PostgresDriver, PostgresClientDriver } from '../driver';\nimport { sql } from '../../sql';\nimport { PostgresStorageClient } from './base';\nimport { TObject } from '../../../../internals/object';\n\nconst resolveDataType = (\n schema: TSchema,\n path: string,\n) => {\n let fields = schema.fields;\n let last;\n for (const key of _.toPath(path)) {\n const dataType = fields[key];\n if (_.isNil(dataType)) throw Error(`Invalid path: ${path}`);\n if (isPrimitive(dataType) || isVector(dataType)) return dataType;\n if (!isShape(dataType)) return dataType;\n fields = dataType.shape;\n }\n return last;\n}\n\nexport class PostgresStorage extends PostgresStorageClient<PostgresDriver> {\n\n constructor(config: string | PoolConfig) {\n super(new PostgresDriver(config));\n }\n\n async shutdown() {\n await super.shutdown();\n await this._driver.shutdown();\n }\n\n async prepare(schema: Record<string, TSchema>) {\n await super.prepare(schema);\n await this._enableExtensions();\n await this._createSystemTable();\n for (const [className, _schema] of _.toPairs(schema)) {\n await this._createTable(className, _schema);\n await this._dropIndices(className, _schema);\n await this._rebuildColumns(className, _schema);\n await this._createIndices(className, _schema);\n }\n }\n\n private _pgType(type: TSchema.Primitive | 'pointer' | 'relation' | 'vector') {\n switch (type) {\n case 'boolean': return 'BOOLEAN';\n case 'number': return 'DOUBLE PRECISION';\n case 'decimal': return 'DECIMAL';\n case 'string': return 'TEXT';\n case 'string[]': return 'TEXT[]';\n case 'date': return 'TIMESTAMP(3) WITH TIME ZONE';\n case 'object': return 'JSONB';\n case 'array': return 'JSONB[]';\n case 'vector': return 'DOUBLE PRECISION[]';\n case 'pointer': return 'TEXT';\n case 'relation': return 'TEXT[]';\n default: throw Error('Unknown data type');\n }\n }\n\n private async _enableExtensions() {\n const found = await this.query(sql`SELECT * FROM pg_available_extensions WHERE name = 'vector'`);\n if (!_.isEmpty(found)) {\n await this.query(sql`CREATE EXTENSION IF NOT EXISTS vector`);\n }\n }\n\n private async _createSystemTable() {\n await this.query(sql`\n CREATE TABLE\n IF NOT EXISTS ${{ identifier: '_Config' }}\n (\n _id TEXT PRIMARY KEY,\n _rperm TEXT[] NOT NULL DEFAULT ARRAY['*']::TEXT[],\n value JSONB\n )\n `);\n }\n\n private _fields(schema: TSchema) {\n const fields: Record<string, Exclude<TSchema.DataType, TSchema.ShapeType>> = {};\n for (const [key, dataType] of _.entries(schema.fields)) {\n if (isShape(dataType)) {\n for (const { path, type } of shapePaths(dataType)) {\n fields[`${key}.${path}`] = type;\n }\n } else {\n fields[key] = dataType;\n }\n }\n return fields;\n }\n\n private async _createTable(className: string, schema: TSchema) {\n const fields = _.pickBy(\n this._fields(schema), (x, k) => !_.includes(TObject.defaultKeys, k) && (!isRelation(x) || _.isNil(x.foreignField))\n );\n await this.query(sql`\n CREATE TABLE\n IF NOT EXISTS ${{ identifier: className }}\n (\n _id TEXT PRIMARY KEY,\n __v INTEGER NOT NULL DEFAULT 0,\n __i BIGSERIAL NOT NULL UNIQUE,\n _created_at TIMESTAMP(3) WITH TIME ZONE NOT NULL DEFAULT NOW(),\n _updated_at TIMESTAMP(3) WITH TIME ZONE NOT NULL DEFAULT NOW(),\n _expired_at TIMESTAMP(3) WITH TIME ZONE,\n _rperm TEXT[] NOT NULL DEFAULT ARRAY['*']::TEXT[],\n _wperm TEXT[] NOT NULL DEFAULT ARRAY['*']::TEXT[]\n ${_.isEmpty(fields) ? sql`` : sql`, ${_.map(fields, (dataType, col) => sql`\n ${{ identifier: col }} ${{ literal: this._pgType(_.isString(dataType) ? dataType : dataType.type) }}\n `)}`}\n )\n `);\n await this.query(sql`\n CREATE UNIQUE INDEX CONCURRENTLY\n IF NOT EXISTS ${{ identifier: `${className}$_id` }}\n ON ${{ identifier: className }}\n ((${{ quote: className + '$' }} || _id))\n `);\n }\n\n private _indicesOf(schema: TSchema) {\n const fields = this._fields(schema);\n const pointers = _.pickBy(fields, v => isPointer(v));\n const relations = _.pickBy(fields, v => isRelation(v) && _.isNil(v.foreignField));\n return {\n relations,\n indexes: [\n ..._.map(_.keys(pointers), k => ({ keys: { [k]: 1 } }) as TSchema.Indexes),\n ..._.map(_.keys(relations), k => ({ keys: { [k]: 1 } }) as TSchema.Indexes),\n ...(schema.indexes ?? []),\n ],\n };\n }\n\n private _indexBasicName(className: string, keys: Record<string, 1 | -1>, unique: boolean) {\n if (unique) return `${className}$u$${_.map(keys, (v, k) => `${k}:${v}`).join('$')}`;\n return `${className}$b$${_.map(keys, (v, k) => `${k}:${v}`).join('$')}`;\n }\n\n private _indexVectorName(className: string, keys: string[]) {\n return {\n 'vector_l1_ops': `${className}$v1$${keys.join('$')}`,\n 'vector_l2_ops': `${className}$v2$${keys.join('$')}`,\n 'vector_ip_ops': `${className}$vi$${keys.join('$')}`,\n 'vector_cosine_ops': `${className}$vc$${keys.join('$')}`,\n };\n }\n\n private async _dropIndices(className: string, schema: TSchema) {\n const { indexes } = this._indicesOf(schema);\n const names: string[] = [];\n for (const index of indexes) {\n if (_.isEmpty(index.keys)) continue;\n switch (index.type) {\n case 'vector':\n names.push(..._.values(this._indexVectorName(className, _.castArray(index.keys))));\n break;\n default:\n names.push(this._indexBasicName(className, index.keys, !!index.unique));\n break;\n }\n }\n for (const [name, { is_primary }] of _.toPairs(await this.indices(className))) {\n if (is_primary || names.includes(name)) continue;\n if (name.endsWith('__i_key')) continue;\n await this.query(sql`DROP INDEX CONCURRENTLY IF EXISTS ${{ identifier: name }}`);\n }\n }\n\n private async _rebuildColumns(className: string, schema: TSchema) {\n const columns = await this.columns(className);\n const typeMap: Record<string, string> = {\n 'timestamp': 'timestamp(3) without time zone',\n 'numeric': 'decimal',\n };\n const fields = this._fields(schema);\n const rebuild: { name: string; type: string; }[] = [];\n for (const column of columns) {\n if (TObject.defaultKeys.includes(column.name)) continue;\n const dataType = fields[column.name];\n if (!dataType) continue;\n if (!_.isString(dataType) && dataType.type === 'relation' && !_.isNil(dataType.foreignField)) continue;\n const pgType = this._pgType(_.isString(dataType) ? dataType : dataType.type);\n if (pgType.toLowerCase() === (typeMap[column.type] ?? column.type)) continue;\n rebuild.push({ name: column.name, type: pgType });\n }\n for (const column of _.difference(_.keys(fields), _.map(columns, x => x.name))) {\n const dataType = fields[column];\n const pgType = this._pgType(_.isString(dataType) ? dataType : dataType.type);\n rebuild.push({ name: column, type: pgType });\n }\n if (_.isEmpty(rebuild)) return;\n await this.query(sql`\n ALTER TABLE ${{ identifier: className }}\n ${_.map(rebuild, ({ name, type }) => sql`\n DROP COLUMN IF EXISTS ${{ identifier: name }},\n ADD COLUMN ${{ identifier: name }} ${{ literal: type }}\n `)}\n `);\n }\n\n private async _createIndices(className: string, schema: TSchema) {\n const { relations, indexes } = this._indicesOf(schema);\n for (const index of indexes) {\n if (_.isEmpty(index.keys)) continue;\n switch (index.type) {\n case 'vector':\n {\n const name = this._indexVectorName(className, _.castArray(index.keys));\n const ops = _.keys(name) as (keyof typeof name)[];\n const method = index.method ?? 'hnsw';\n if (_.isArray(index.keys)) {\n for (const op of ops) {\n await this.query(sql`\n CREATE INDEX CONCURRENTLY\n IF NOT EXISTS ${{ identifier: name[op] }}\n ON ${{ identifier: className }}\n USING ${{ literal: method }} (\n CAST(\n ARRAY[${_.map(index.keys, k => sql`COALESCE(${{ identifier: k }}, 0)`)}]\n AS VECTOR(${{ literal: `${index.keys.length}` }})\n ) ${{ literal: op }}\n )\n `);\n }\n } else {\n const column = index.keys;\n const dataType = schema.fields[column];\n if (!isVector(dataType)) throw Error('Invalid index type');\n for (const op of ops) {\n await this.query(sql`\n CREATE INDEX CONCURRENTLY\n IF NOT EXISTS ${{ identifier: name[op] }}\n ON ${{ identifier: className }}\n USING ${{ literal: method }} (\n CAST(\n ${{ identifier: column }} AS VECTOR(${{ literal: `${dataType.dimension}` }})\n ) ${{ literal: op }}\n )\n `);\n }\n }\n }\n break;\n default:\n {\n const name = this._indexBasicName(className, index.keys, !!index.unique);\n const useGin = _.some(_.keys(index.keys), column => {\n const dataType = resolveDataType(schema, column);\n if (!dataType || isShape(dataType)) throw Error('Invalid index type');\n return _isTypeof(dataType, 'string[]') || _.has(relations, column);\n });\n await this.query(sql`\n CREATE ${{ literal: index.unique ? 'UNIQUE' : '' }} INDEX CONCURRENTLY\n IF NOT EXISTS ${{ identifier: name }}\n ON ${{ identifier: className }}\n ${{ literal: useGin ? 'USING GIN' : '' }}\n (${_.map(index.keys, (v, k) => sql`\n ${{ identifier: k }} ${{ literal: useGin ? '' : v === 1 ? 'ASC' : 'DESC' }}\n `)})\n `);\n }\n break;\n }\n }\n }\n\n override withConnection<T>(\n callback: (connection: PostgresStorageClient<PostgresClientDriver>) => PromiseLike<T>\n ) {\n return this._driver.withClient((client) => {\n const connection = new PostgresStorageClient(client);\n connection.schema = this.schema;\n return callback(connection);\n });\n }\n}\n"],"names":[],"mappings":";;;;;;;;;;;AAEO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AC5BO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACTO;AACP;AACA;AACA;AACA;AACA;AACA;AACO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AC5DO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACzCO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACnFO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;"}
@@ -1,5 +1,5 @@
1
1
  import { Pool, PoolClient, PoolConfig } from 'pg';
2
- import { c as TValueWithoutObject, T as TSchema, q as TValueWithUndefined, r as TValue, t as TUpdateOp, F as FieldSelectorExpression, Q as QueryExpression, R as RelationOptions, u as QuerySelector, v as DecodedQuery, w as FindOptions, x as DecodedSortOption, y as QueryAccumulator, I as InsertOptions, z as TStorage, A as TransactionOptions, k as TObject, B as QueryRandomOptions, C as TPubSub } from '../../internals/index-h4KGKuhq.js';
2
+ import { c as TValueWithoutObject, T as TSchema, q as TValueWithUndefined, r as TValue, t as TUpdateOp, F as FieldSelectorExpression, Q as QueryExpression, R as RelationOptions, u as QueryAccumulator, v as QuerySelector, w as DecodedQuery, x as FindOptions, y as DecodedSortOption, I as InsertOptions, z as TStorage, A as TransactionOptions, k as TObject, B as QueryRandomOptions, C as TPubSub } from '../../internals/index-OwgXw07h.js';
3
3
  import * as _o2ter_utils_js from '@o2ter/utils-js';
4
4
  import { asyncStream } from '@o2ter/utils-js';
5
5
  import 'jsonwebtoken';
@@ -70,6 +70,7 @@ interface SqlDialect {
70
70
  className: string;
71
71
  }, relatedBy: NonNullable<RelationOptions['relatedBy']>): SQL;
72
72
  encodeSortKey(compiler: QueryCompiler, parent: QueryContext, key: string): SQL;
73
+ encodeAccumulatorColumn(compiler: QueryCompiler, context: QueryContext, expr: QueryAccumulator, fetchName: string): SQL;
73
74
  random(weight?: SQL): SQL;
74
75
  }
75
76
 
@@ -148,6 +149,7 @@ declare class QueryCompiler {
148
149
  _selectIncludes(className: string, includes: Record<string, TSchema.DataType>): SQL[];
149
150
  _encodeSort(sort: Record<string, 1 | -1> | DecodedSortOption[], parent: QueryContext): SQL;
150
151
  private _selectPopulateMap;
152
+ groupFind(query: DecodedQuery<FindOptions & RelationOptions>, accumulators: Record<string, QueryAccumulator>): SQL;
151
153
  insert(options: InsertOptions, values: Record<string, TValueWithUndefined>[]): SQL;
152
154
  update(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>): SQL;
153
155
  upsert(query: DecodedQuery<FindOptions>, update: Record<string, TUpdateOp>, setOnInsert: Record<string, TValueWithUndefined>): SQL;
@@ -185,6 +187,7 @@ declare abstract class SqlStorage implements TStorage {
185
187
  private _matchesType;
186
188
  find(query: DecodedQuery<FindOptions & RelationOptions>): AsyncGenerator<TObject, void, unknown>;
187
189
  random(query: DecodedQuery<FindOptions & RelationOptions>, opts?: QueryRandomOptions): AsyncGenerator<TObject, void, unknown>;
190
+ groupFind(query: DecodedQuery<FindOptions & RelationOptions>, accumulators: Record<string, QueryAccumulator>): Promise<any>;
188
191
  refs(object: TObject, classNames: string[], roles?: string[]): AsyncGenerator<TObject, void, unknown>;
189
192
  nonrefs(query: DecodedQuery<FindOptions>): AsyncGenerator<TObject, void, unknown>;
190
193
  insert(options: InsertOptions, values: Record<string, TValueWithUndefined>[]): Promise<TObject[]>;
@@ -220,6 +223,7 @@ declare class PostgresStorageClient<Driver extends PostgresClientDriver> extends
220
223
  className: string;
221
224
  }, relatedBy: NonNullable<RelationOptions["relatedBy"]>) => SQL;
222
225
  encodeSortKey: (compiler: QueryCompiler, parent: QueryContext, key: string) => SQL;
226
+ encodeAccumulatorColumn: (compiler: QueryCompiler, context: QueryContext, expr: QueryAccumulator, fetchName: string) => SQL;
223
227
  random: (weight?: SQL) => SQL;
224
228
  };
225
229
  selectLock(): boolean;