proto.io 0.0.174 → 0.0.176
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/file/database.d.ts +2 -2
- package/dist/adapters/file/database.js +1 -0
- package/dist/adapters/file/database.js.map +1 -1
- package/dist/adapters/file/database.mjs +3 -2
- package/dist/adapters/file/database.mjs.map +1 -1
- package/dist/adapters/file/filesystem.d.ts +2 -2
- package/dist/adapters/file/google-cloud-storage.d.ts +2 -2
- package/dist/adapters/storage/progres.d.ts +3 -2
- package/dist/adapters/storage/progres.js +51 -29
- package/dist/adapters/storage/progres.js.map +1 -1
- package/dist/adapters/storage/progres.mjs +53 -31
- package/dist/adapters/storage/progres.mjs.map +1 -1
- package/dist/client.d.ts +3 -3
- package/dist/client.js +1 -1
- package/dist/client.mjs +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +58 -314
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +62 -318
- package/dist/index.mjs.map +1 -1
- package/dist/internals/{index-CGX3qcjQ.d.ts → index-B0DE6MVr.d.ts} +2 -2
- package/dist/internals/index-B0DE6MVr.d.ts.map +1 -0
- package/dist/internals/{index-CE5tdYK8.d.ts → index-BZcW5wgM.d.ts} +32 -68
- package/dist/internals/index-BZcW5wgM.d.ts.map +1 -0
- package/dist/internals/{index-uwXdnxqN.d.ts → index-Bh60s4Tq.d.ts} +32 -2
- package/dist/internals/index-Bh60s4Tq.d.ts.map +1 -0
- package/dist/internals/{index--ifyu-GL.mjs → index-CTsc042s.mjs} +2 -2
- package/dist/internals/{index--ifyu-GL.mjs.map → index-CTsc042s.mjs.map} +1 -1
- package/dist/internals/{index-S_gTMQBh.js → index-Cd6nNb--.js} +31 -43
- package/dist/internals/index-Cd6nNb--.js.map +1 -0
- package/dist/internals/{index-C3fbOqmn.mjs → index-DHQhYGsJ.mjs} +2 -2
- package/dist/internals/{index-C3fbOqmn.mjs.map → index-DHQhYGsJ.mjs.map} +1 -1
- package/dist/internals/{index-R0gbIGc-.mjs → index-tEsKpX2d.mjs} +32 -44
- package/dist/internals/index-tEsKpX2d.mjs.map +1 -0
- package/dist/internals/{random-DVOUDDGg.js → random-CvVZek_8.js} +13 -1
- package/dist/internals/random-CvVZek_8.js.map +1 -0
- package/dist/internals/{random-DPRG8oW6.mjs → random-DGiu1IIq.mjs} +15 -3
- package/dist/internals/random-DGiu1IIq.mjs.map +1 -0
- package/package.json +1 -1
- package/dist/internals/index-CE5tdYK8.d.ts.map +0 -1
- package/dist/internals/index-CGX3qcjQ.d.ts.map +0 -1
- package/dist/internals/index-R0gbIGc-.mjs.map +0 -1
- package/dist/internals/index-S_gTMQBh.js.map +0 -1
- package/dist/internals/index-uwXdnxqN.d.ts.map +0 -1
- package/dist/internals/random-DPRG8oW6.mjs.map +0 -1
- package/dist/internals/random-DVOUDDGg.js.map +0 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { T as TSchema, P as ProtoService } from '../../internals/index-
|
|
2
|
-
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-
|
|
1
|
+
import { T as TSchema, P as ProtoService } from '../../internals/index-BZcW5wgM.js';
|
|
2
|
+
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-B0DE6MVr.js';
|
|
3
3
|
import '@o2ter/utils-js';
|
|
4
4
|
import 'jsonwebtoken';
|
|
5
5
|
import '@o2ter/server-js';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"database.js","sources":["../../../../src/adapters/file/database/index.ts"],"sourcesContent":["//\n// index.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2024 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { ProtoService } from '../../../server/proto';\nimport { TSchema } from '../../../internals/schema';\nimport FileStorageBase, { FileStorageOptions } from '../base';\nimport { QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { base64ToBuffer, bufferToBase64 } from '@o2ter/utils-js';\n\nexport class DatabaseFileStorage extends FileStorageBase {\n\n constructor(options: FileStorageOptions = {}) {\n super(options);\n }\n\n get schema(): Record<string, TSchema> {\n return {\n '_FileChunk': {\n fields: {\n token: 'string',\n start: 'number',\n end: 'number',\n size: 'number',\n base64: 'string',\n },\n classLevelPermissions: {\n get: [],\n find: [],\n count: [],\n create: [],\n update: [],\n delete: [],\n },\n fieldLevelPermissions: {\n _expired_at: { create: [], update: [] },\n },\n indexes: [\n { keys: { token: 1, start: 1, end: 1 } },\n { keys: { token: 1, end: 1 } },\n ]\n },\n }\n }\n\n async createChunk<E>(proto: ProtoService<E>, token: string, start: number, end: number, compressed: Buffer) {\n\n const created = await proto.Query('_FileChunk').insert({\n token,\n start: start,\n end: end,\n size: end - start,\n base64: bufferToBase64(compressed),\n }, { master: true });\n\n if (!created) throw Error('Unable to save file');\n\n }\n\n async* readChunks<E>(proto: ProtoService<E>, token: string, start?: number | undefined, end?: number | undefined) {\n const query = proto.Query('_FileChunk')\n .sort({ start: 1 })\n .filter({\n token: { $eq: token },\n ...start ? { end: { $gt: start } } : {},\n ...end ? { start: { $lt: end } } : {},\n });\n for await (const chunk of query.find({ master: true })) {\n const startBytes = chunk.get('start');\n const base64 = chunk.get('base64');\n if (!_.isNumber(startBytes) || !_.isString(base64)) throw Error('Corrupted data');\n yield {\n start: startBytes,\n data: base64ToBuffer(base64),\n };\n }\n }\n\n async destroy<E>(proto: ProtoService<E>, token: string) {\n proto.storage.deleteMany({\n className: '_FileChunk',\n filter: QuerySelector.decode({ token: { $eq: token } }),\n includes: ['_id'],\n matches: {},\n objectIdSize: 0\n });\n }\n};\n\nexport default DatabaseFileStorage;"],"names":["FileStorageBase","bufferToBase64","base64ToBuffer","QuerySelector"],"mappings":";;;;;;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASM,MAAO,mBAAoB,SAAQA,qBAAe,CAAA;AAEtD,IAAA,WAAA,CAAY,UAA8B,EAAE,EAAA;QAC1C,KAAK,CAAC,OAAO,CAAC;;AAGhB,IAAA,IAAI,MAAM,GAAA;QACR,OAAO;AACL,YAAA,YAAY,EAAE;AACZ,gBAAA,MAAM,EAAE;AACN,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,GAAG,EAAE,QAAQ;AACb,oBAAA,IAAI,EAAE,QAAQ;AACd,oBAAA,MAAM,EAAE,QAAQ;AACjB,iBAAA;AACD,gBAAA,qBAAqB,EAAE;AACrB,oBAAA,GAAG,EAAE,EAAE;AACP,oBAAA,IAAI,EAAE,EAAE;AACR,oBAAA,KAAK,EAAE,EAAE;AACT,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACX,iBAAA;AACD,gBAAA,qBAAqB,EAAE;oBACrB,WAAW,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE;AACxC,iBAAA;AACD,gBAAA,OAAO,EAAE;AACP,oBAAA,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;oBACxC,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;AAC/B;AACF,aAAA;SACF;;IAGH,MAAM,WAAW,CAAI,KAAsB,EAAE,KAAa,EAAE,KAAa,EAAE,GAAW,EAAE,UAAkB,EAAA;QAExG,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC;YACrD,KAAK;AACL,YAAA,KAAK,EAAE,KAAK;AACZ,YAAA,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,GAAG,GAAG,KAAK;AACjB,YAAA,MAAM,EAAEC,sBAAc,CAAC,UAAU,CAAC;AACnC,SAAA,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAEpB,QAAA,IAAI,CAAC,OAAO;AAAE,YAAA,MAAM,KAAK,CAAC,qBAAqB,CAAC;;IAIlD,OAAO,UAAU,CAAI,KAAsB,EAAE,KAAa,EAAE,KAA0B,EAAE,GAAwB,EAAA;AAC9G,QAAA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY;AACnC,aAAA,IAAI,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;AACjB,aAAA,MAAM,CAAC;AACN,YAAA,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE;AACrB,YAAA,GAAG,KAAK,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE;AACvC,YAAA,GAAG,GAAG,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE;AACtC,SAAA,CAAC;AACJ,QAAA,WAAW,MAAM,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE;YACtD,MAAM,UAAU,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;YACrC,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC;AAAE,gBAAA,MAAM,KAAK,CAAC,gBAAgB,CAAC;YACjF,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU;AACjB,gBAAA,IAAI,EAAEC,sBAAc,CAAC,MAAM,CAAC;aAC7B;;;AAIL,IAAA,MAAM,OAAO,CAAI,KAAsB,EAAE,KAAa,EAAA;AACpD,QAAA,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC;AACvB,YAAA,SAAS,EAAE,YAAY;AACvB,YAAA,MAAM,EAAEC,qBAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC;YACvD,QAAQ,EAAE,CAAC,KAAK,CAAC;AACjB,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,YAAY,EAAE;AACf,SAAA,CAAC;;AAEL;;;;;"}
|
|
1
|
+
{"version":3,"file":"database.js","sources":["../../../../src/adapters/file/database/index.ts"],"sourcesContent":["//\n// index.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2024 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { ProtoService } from '../../../server/proto';\nimport { TSchema } from '../../../internals/schema';\nimport FileStorageBase, { FileStorageOptions } from '../base';\nimport { QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { base64ToBuffer, bufferToBase64 } from '@o2ter/utils-js';\n\nexport class DatabaseFileStorage extends FileStorageBase {\n\n constructor(options: FileStorageOptions = {}) {\n super(options);\n }\n\n get schema(): Record<string, TSchema> {\n return {\n '_FileChunk': {\n fields: {\n token: 'string',\n start: 'number',\n end: 'number',\n size: 'number',\n base64: 'string',\n },\n classLevelPermissions: {\n get: [],\n find: [],\n count: [],\n create: [],\n update: [],\n delete: [],\n },\n fieldLevelPermissions: {\n _expired_at: { create: [], update: [] },\n },\n indexes: [\n { keys: { token: 1, start: 1, end: 1 } },\n { keys: { token: 1, end: 1 } },\n ]\n },\n }\n }\n\n async createChunk<E>(proto: ProtoService<E>, token: string, start: number, end: number, compressed: Buffer) {\n\n const created = await proto.Query('_FileChunk').insert({\n token,\n start: start,\n end: end,\n size: end - start,\n base64: bufferToBase64(compressed),\n }, { master: true });\n\n if (!created) throw Error('Unable to save file');\n\n }\n\n async* readChunks<E>(proto: ProtoService<E>, token: string, start?: number | undefined, end?: number | undefined) {\n const query = proto.Query('_FileChunk')\n .sort({ start: 1 })\n .filter({\n token: { $eq: token },\n ...start ? { end: { $gt: start } } : {},\n ...end ? { start: { $lt: end } } : {},\n });\n for await (const chunk of query.find({ master: true })) {\n const startBytes = chunk.get('start');\n const base64 = chunk.get('base64');\n if (!_.isNumber(startBytes) || !_.isString(base64)) throw Error('Corrupted data');\n yield {\n start: startBytes,\n data: base64ToBuffer(base64),\n };\n }\n }\n\n async destroy<E>(proto: ProtoService<E>, token: string) {\n proto.storage.deleteMany({\n className: '_FileChunk',\n filter: QuerySelector.decode({ token: { $eq: token } }),\n includes: ['_id'],\n matches: {},\n countMatches: [],\n objectIdSize: 0\n });\n }\n};\n\nexport default DatabaseFileStorage;"],"names":["FileStorageBase","bufferToBase64","base64ToBuffer","QuerySelector"],"mappings":";;;;;;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASM,MAAO,mBAAoB,SAAQA,qBAAe,CAAA;AAEtD,IAAA,WAAA,CAAY,UAA8B,EAAE,EAAA;QAC1C,KAAK,CAAC,OAAO,CAAC;;AAGhB,IAAA,IAAI,MAAM,GAAA;QACR,OAAO;AACL,YAAA,YAAY,EAAE;AACZ,gBAAA,MAAM,EAAE;AACN,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,GAAG,EAAE,QAAQ;AACb,oBAAA,IAAI,EAAE,QAAQ;AACd,oBAAA,MAAM,EAAE,QAAQ;AACjB,iBAAA;AACD,gBAAA,qBAAqB,EAAE;AACrB,oBAAA,GAAG,EAAE,EAAE;AACP,oBAAA,IAAI,EAAE,EAAE;AACR,oBAAA,KAAK,EAAE,EAAE;AACT,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACX,iBAAA;AACD,gBAAA,qBAAqB,EAAE;oBACrB,WAAW,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE;AACxC,iBAAA;AACD,gBAAA,OAAO,EAAE;AACP,oBAAA,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;oBACxC,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;AAC/B;AACF,aAAA;SACF;;IAGH,MAAM,WAAW,CAAI,KAAsB,EAAE,KAAa,EAAE,KAAa,EAAE,GAAW,EAAE,UAAkB,EAAA;QAExG,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC;YACrD,KAAK;AACL,YAAA,KAAK,EAAE,KAAK;AACZ,YAAA,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,GAAG,GAAG,KAAK;AACjB,YAAA,MAAM,EAAEC,sBAAc,CAAC,UAAU,CAAC;AACnC,SAAA,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAEpB,QAAA,IAAI,CAAC,OAAO;AAAE,YAAA,MAAM,KAAK,CAAC,qBAAqB,CAAC;;IAIlD,OAAO,UAAU,CAAI,KAAsB,EAAE,KAAa,EAAE,KAA0B,EAAE,GAAwB,EAAA;AAC9G,QAAA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY;AACnC,aAAA,IAAI,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;AACjB,aAAA,MAAM,CAAC;AACN,YAAA,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE;AACrB,YAAA,GAAG,KAAK,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE;AACvC,YAAA,GAAG,GAAG,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE;AACtC,SAAA,CAAC;AACJ,QAAA,WAAW,MAAM,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE;YACtD,MAAM,UAAU,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;YACrC,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC;AAAE,gBAAA,MAAM,KAAK,CAAC,gBAAgB,CAAC;YACjF,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU;AACjB,gBAAA,IAAI,EAAEC,sBAAc,CAAC,MAAM,CAAC;aAC7B;;;AAIL,IAAA,MAAM,OAAO,CAAI,KAAsB,EAAE,KAAa,EAAA;AACpD,QAAA,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC;AACvB,YAAA,SAAS,EAAE,YAAY;AACvB,YAAA,MAAM,EAAEC,qBAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC;YACvD,QAAQ,EAAE,CAAC,KAAK,CAAC;AACjB,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,YAAY,EAAE,EAAE;AAChB,YAAA,YAAY,EAAE;AACf,SAAA,CAAC;;AAEL;;;;;"}
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import _ from 'lodash';
|
|
2
2
|
import { F as FileStorageBase } from '../../internals/index-be1VYBY2.mjs';
|
|
3
|
-
import { Q as QuerySelector } from '../../internals/index-
|
|
3
|
+
import { Q as QuerySelector } from '../../internals/index-DHQhYGsJ.mjs';
|
|
4
4
|
import { bufferToBase64, base64ToBuffer } from '@o2ter/utils-js';
|
|
5
5
|
import 'util';
|
|
6
6
|
import 'zlib';
|
|
7
7
|
import '../../internals/private-BUpLAMZi.mjs';
|
|
8
|
-
import '../../internals/index
|
|
8
|
+
import '../../internals/index-CTsc042s.mjs';
|
|
9
9
|
import 'decimal.js';
|
|
10
10
|
|
|
11
11
|
//
|
|
@@ -100,6 +100,7 @@ class DatabaseFileStorage extends FileStorageBase {
|
|
|
100
100
|
filter: QuerySelector.decode({ token: { $eq: token } }),
|
|
101
101
|
includes: ['_id'],
|
|
102
102
|
matches: {},
|
|
103
|
+
countMatches: [],
|
|
103
104
|
objectIdSize: 0
|
|
104
105
|
});
|
|
105
106
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"database.mjs","sources":["../../../../src/adapters/file/database/index.ts"],"sourcesContent":["//\n// index.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2024 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { ProtoService } from '../../../server/proto';\nimport { TSchema } from '../../../internals/schema';\nimport FileStorageBase, { FileStorageOptions } from '../base';\nimport { QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { base64ToBuffer, bufferToBase64 } from '@o2ter/utils-js';\n\nexport class DatabaseFileStorage extends FileStorageBase {\n\n constructor(options: FileStorageOptions = {}) {\n super(options);\n }\n\n get schema(): Record<string, TSchema> {\n return {\n '_FileChunk': {\n fields: {\n token: 'string',\n start: 'number',\n end: 'number',\n size: 'number',\n base64: 'string',\n },\n classLevelPermissions: {\n get: [],\n find: [],\n count: [],\n create: [],\n update: [],\n delete: [],\n },\n fieldLevelPermissions: {\n _expired_at: { create: [], update: [] },\n },\n indexes: [\n { keys: { token: 1, start: 1, end: 1 } },\n { keys: { token: 1, end: 1 } },\n ]\n },\n }\n }\n\n async createChunk<E>(proto: ProtoService<E>, token: string, start: number, end: number, compressed: Buffer) {\n\n const created = await proto.Query('_FileChunk').insert({\n token,\n start: start,\n end: end,\n size: end - start,\n base64: bufferToBase64(compressed),\n }, { master: true });\n\n if (!created) throw Error('Unable to save file');\n\n }\n\n async* readChunks<E>(proto: ProtoService<E>, token: string, start?: number | undefined, end?: number | undefined) {\n const query = proto.Query('_FileChunk')\n .sort({ start: 1 })\n .filter({\n token: { $eq: token },\n ...start ? { end: { $gt: start } } : {},\n ...end ? { start: { $lt: end } } : {},\n });\n for await (const chunk of query.find({ master: true })) {\n const startBytes = chunk.get('start');\n const base64 = chunk.get('base64');\n if (!_.isNumber(startBytes) || !_.isString(base64)) throw Error('Corrupted data');\n yield {\n start: startBytes,\n data: base64ToBuffer(base64),\n };\n }\n }\n\n async destroy<E>(proto: ProtoService<E>, token: string) {\n proto.storage.deleteMany({\n className: '_FileChunk',\n filter: QuerySelector.decode({ token: { $eq: token } }),\n includes: ['_id'],\n matches: {},\n objectIdSize: 0\n });\n }\n};\n\nexport default DatabaseFileStorage;"],"names":[],"mappings":";;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASM,MAAO,mBAAoB,SAAQ,eAAe,CAAA;AAEtD,IAAA,WAAA,CAAY,UAA8B,EAAE,EAAA;QAC1C,KAAK,CAAC,OAAO,CAAC;;AAGhB,IAAA,IAAI,MAAM,GAAA;QACR,OAAO;AACL,YAAA,YAAY,EAAE;AACZ,gBAAA,MAAM,EAAE;AACN,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,GAAG,EAAE,QAAQ;AACb,oBAAA,IAAI,EAAE,QAAQ;AACd,oBAAA,MAAM,EAAE,QAAQ;AACjB,iBAAA;AACD,gBAAA,qBAAqB,EAAE;AACrB,oBAAA,GAAG,EAAE,EAAE;AACP,oBAAA,IAAI,EAAE,EAAE;AACR,oBAAA,KAAK,EAAE,EAAE;AACT,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACX,iBAAA;AACD,gBAAA,qBAAqB,EAAE;oBACrB,WAAW,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE;AACxC,iBAAA;AACD,gBAAA,OAAO,EAAE;AACP,oBAAA,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;oBACxC,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;AAC/B;AACF,aAAA;SACF;;IAGH,MAAM,WAAW,CAAI,KAAsB,EAAE,KAAa,EAAE,KAAa,EAAE,GAAW,EAAE,UAAkB,EAAA;QAExG,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC;YACrD,KAAK;AACL,YAAA,KAAK,EAAE,KAAK;AACZ,YAAA,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,GAAG,GAAG,KAAK;AACjB,YAAA,MAAM,EAAE,cAAc,CAAC,UAAU,CAAC;AACnC,SAAA,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAEpB,QAAA,IAAI,CAAC,OAAO;AAAE,YAAA,MAAM,KAAK,CAAC,qBAAqB,CAAC;;IAIlD,OAAO,UAAU,CAAI,KAAsB,EAAE,KAAa,EAAE,KAA0B,EAAE,GAAwB,EAAA;AAC9G,QAAA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY;AACnC,aAAA,IAAI,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;AACjB,aAAA,MAAM,CAAC;AACN,YAAA,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE;AACrB,YAAA,GAAG,KAAK,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE;AACvC,YAAA,GAAG,GAAG,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE;AACtC,SAAA,CAAC;AACJ,QAAA,WAAW,MAAM,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE;YACtD,MAAM,UAAU,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;YACrC,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC;AAAE,gBAAA,MAAM,KAAK,CAAC,gBAAgB,CAAC;YACjF,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU;AACjB,gBAAA,IAAI,EAAE,cAAc,CAAC,MAAM,CAAC;aAC7B;;;AAIL,IAAA,MAAM,OAAO,CAAI,KAAsB,EAAE,KAAa,EAAA;AACpD,QAAA,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC;AACvB,YAAA,SAAS,EAAE,YAAY;AACvB,YAAA,MAAM,EAAE,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC;YACvD,QAAQ,EAAE,CAAC,KAAK,CAAC;AACjB,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,YAAY,EAAE;AACf,SAAA,CAAC;;AAEL;;;;"}
|
|
1
|
+
{"version":3,"file":"database.mjs","sources":["../../../../src/adapters/file/database/index.ts"],"sourcesContent":["//\n// index.ts\n//\n// The MIT License\n// Copyright (c) 2021 - 2024 O2ter Limited. All rights reserved.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n//\n\nimport _ from 'lodash';\nimport { ProtoService } from '../../../server/proto';\nimport { TSchema } from '../../../internals/schema';\nimport FileStorageBase, { FileStorageOptions } from '../base';\nimport { QuerySelector } from '../../../server/query/dispatcher/parser';\nimport { base64ToBuffer, bufferToBase64 } from '@o2ter/utils-js';\n\nexport class DatabaseFileStorage extends FileStorageBase {\n\n constructor(options: FileStorageOptions = {}) {\n super(options);\n }\n\n get schema(): Record<string, TSchema> {\n return {\n '_FileChunk': {\n fields: {\n token: 'string',\n start: 'number',\n end: 'number',\n size: 'number',\n base64: 'string',\n },\n classLevelPermissions: {\n get: [],\n find: [],\n count: [],\n create: [],\n update: [],\n delete: [],\n },\n fieldLevelPermissions: {\n _expired_at: { create: [], update: [] },\n },\n indexes: [\n { keys: { token: 1, start: 1, end: 1 } },\n { keys: { token: 1, end: 1 } },\n ]\n },\n }\n }\n\n async createChunk<E>(proto: ProtoService<E>, token: string, start: number, end: number, compressed: Buffer) {\n\n const created = await proto.Query('_FileChunk').insert({\n token,\n start: start,\n end: end,\n size: end - start,\n base64: bufferToBase64(compressed),\n }, { master: true });\n\n if (!created) throw Error('Unable to save file');\n\n }\n\n async* readChunks<E>(proto: ProtoService<E>, token: string, start?: number | undefined, end?: number | undefined) {\n const query = proto.Query('_FileChunk')\n .sort({ start: 1 })\n .filter({\n token: { $eq: token },\n ...start ? { end: { $gt: start } } : {},\n ...end ? { start: { $lt: end } } : {},\n });\n for await (const chunk of query.find({ master: true })) {\n const startBytes = chunk.get('start');\n const base64 = chunk.get('base64');\n if (!_.isNumber(startBytes) || !_.isString(base64)) throw Error('Corrupted data');\n yield {\n start: startBytes,\n data: base64ToBuffer(base64),\n };\n }\n }\n\n async destroy<E>(proto: ProtoService<E>, token: string) {\n proto.storage.deleteMany({\n className: '_FileChunk',\n filter: QuerySelector.decode({ token: { $eq: token } }),\n includes: ['_id'],\n matches: {},\n countMatches: [],\n objectIdSize: 0\n });\n }\n};\n\nexport default DatabaseFileStorage;"],"names":[],"mappings":";;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASM,MAAO,mBAAoB,SAAQ,eAAe,CAAA;AAEtD,IAAA,WAAA,CAAY,UAA8B,EAAE,EAAA;QAC1C,KAAK,CAAC,OAAO,CAAC;;AAGhB,IAAA,IAAI,MAAM,GAAA;QACR,OAAO;AACL,YAAA,YAAY,EAAE;AACZ,gBAAA,MAAM,EAAE;AACN,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,KAAK,EAAE,QAAQ;AACf,oBAAA,GAAG,EAAE,QAAQ;AACb,oBAAA,IAAI,EAAE,QAAQ;AACd,oBAAA,MAAM,EAAE,QAAQ;AACjB,iBAAA;AACD,gBAAA,qBAAqB,EAAE;AACrB,oBAAA,GAAG,EAAE,EAAE;AACP,oBAAA,IAAI,EAAE,EAAE;AACR,oBAAA,KAAK,EAAE,EAAE;AACT,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,MAAM,EAAE,EAAE;AACX,iBAAA;AACD,gBAAA,qBAAqB,EAAE;oBACrB,WAAW,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE;AACxC,iBAAA;AACD,gBAAA,OAAO,EAAE;AACP,oBAAA,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;oBACxC,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE;AAC/B;AACF,aAAA;SACF;;IAGH,MAAM,WAAW,CAAI,KAAsB,EAAE,KAAa,EAAE,KAAa,EAAE,GAAW,EAAE,UAAkB,EAAA;QAExG,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC;YACrD,KAAK;AACL,YAAA,KAAK,EAAE,KAAK;AACZ,YAAA,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,GAAG,GAAG,KAAK;AACjB,YAAA,MAAM,EAAE,cAAc,CAAC,UAAU,CAAC;AACnC,SAAA,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAEpB,QAAA,IAAI,CAAC,OAAO;AAAE,YAAA,MAAM,KAAK,CAAC,qBAAqB,CAAC;;IAIlD,OAAO,UAAU,CAAI,KAAsB,EAAE,KAAa,EAAE,KAA0B,EAAE,GAAwB,EAAA;AAC9G,QAAA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY;AACnC,aAAA,IAAI,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;AACjB,aAAA,MAAM,CAAC;AACN,YAAA,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE;AACrB,YAAA,GAAG,KAAK,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE;AACvC,YAAA,GAAG,GAAG,GAAG,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE;AACtC,SAAA,CAAC;AACJ,QAAA,WAAW,MAAM,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE;YACtD,MAAM,UAAU,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;YACrC,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC;AAAE,gBAAA,MAAM,KAAK,CAAC,gBAAgB,CAAC;YACjF,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU;AACjB,gBAAA,IAAI,EAAE,cAAc,CAAC,MAAM,CAAC;aAC7B;;;AAIL,IAAA,MAAM,OAAO,CAAI,KAAsB,EAAE,KAAa,EAAA;AACpD,QAAA,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC;AACvB,YAAA,SAAS,EAAE,YAAY;AACvB,YAAA,MAAM,EAAE,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC;YACvD,QAAQ,EAAE,CAAC,KAAK,CAAC;AACjB,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,YAAY,EAAE,EAAE;AAChB,YAAA,YAAY,EAAE;AACf,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { P as ProtoService } from '../../internals/index-
|
|
2
|
-
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-
|
|
1
|
+
import { P as ProtoService } from '../../internals/index-BZcW5wgM.js';
|
|
2
|
+
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-B0DE6MVr.js';
|
|
3
3
|
import '@o2ter/utils-js';
|
|
4
4
|
import 'jsonwebtoken';
|
|
5
5
|
import '@o2ter/server-js';
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as _google_cloud_storage from '@google-cloud/storage';
|
|
2
2
|
import { Storage } from '@google-cloud/storage';
|
|
3
|
-
import { P as ProtoService } from '../../internals/index-
|
|
4
|
-
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-
|
|
3
|
+
import { P as ProtoService } from '../../internals/index-BZcW5wgM.js';
|
|
4
|
+
import { F as FileStorageBase, a as FileStorageOptions } from '../../internals/index-B0DE6MVr.js';
|
|
5
5
|
import '@o2ter/utils-js';
|
|
6
6
|
import 'jsonwebtoken';
|
|
7
7
|
import '@o2ter/server-js';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Pool, PoolClient, PoolConfig } from 'pg';
|
|
2
|
-
import { _ as _TValue, o as EventData, T as TSchema, Q as QuerySelector, p as DecodedQuery, F as FindOptions, R as RelationOptions, q as DecodedSortOption, I as InsertOptions, r as TValue, t as FindOneOptions, u as TUpdateOp, v as FieldSelectorExpression, w as QueryExpression, x as TStorage, y as TransactionOptions, h as TObject, z as TQueryRandomOptions, A as TPubSub } from '../../internals/index-
|
|
2
|
+
import { _ as _TValue, o as EventData, T as TSchema, Q as QuerySelector, p as DecodedQuery, F as FindOptions, R as RelationOptions, q as DecodedSortOption, I as InsertOptions, r as TValue, t as FindOneOptions, u as TUpdateOp, v as FieldSelectorExpression, w as QueryExpression, x as TStorage, y as TransactionOptions, h as TObject, z as TQueryRandomOptions, A as TPubSub } from '../../internals/index-BZcW5wgM.js';
|
|
3
3
|
import * as _o2ter_utils_js from '@o2ter/utils-js';
|
|
4
4
|
import { asyncStream } from '@o2ter/utils-js';
|
|
5
5
|
import 'jsonwebtoken';
|
|
@@ -57,6 +57,7 @@ type Populate = {
|
|
|
57
57
|
filter?: QuerySelector;
|
|
58
58
|
includes: Record<string, TSchema.DataType>;
|
|
59
59
|
populates: Record<string, Populate>;
|
|
60
|
+
countMatches: string[];
|
|
60
61
|
sort?: Record<string, 1 | -1> | DecodedSortOption[];
|
|
61
62
|
skip?: number;
|
|
62
63
|
limit?: number;
|
|
@@ -124,7 +125,7 @@ interface SqlDialect {
|
|
|
124
125
|
selectPopulate(compiler: QueryCompiler, parent: {
|
|
125
126
|
className: string;
|
|
126
127
|
name: string;
|
|
127
|
-
}, populate: Populate, field: string): {
|
|
128
|
+
}, populate: Populate, field: string, countMatches: boolean): {
|
|
128
129
|
columns: SQL[];
|
|
129
130
|
join?: SQL;
|
|
130
131
|
};
|
|
@@ -11,7 +11,7 @@ var Decimal = require('decimal.js');
|
|
|
11
11
|
var utils = require('pg/lib/utils');
|
|
12
12
|
var index$1 = require('../../internals/index-Dc3V_Bzw.js');
|
|
13
13
|
require('@o2ter/crypto-js');
|
|
14
|
-
var random$1 = require('../../internals/random-
|
|
14
|
+
var random$1 = require('../../internals/random-CvVZek_8.js');
|
|
15
15
|
var _private = require('../../internals/private-CSB1Ep4g.js');
|
|
16
16
|
|
|
17
17
|
//
|
|
@@ -237,22 +237,25 @@ class QueryCompiler {
|
|
|
237
237
|
return this.idx++;
|
|
238
238
|
}
|
|
239
239
|
_makeContext(query) {
|
|
240
|
-
const context = this._encodeIncludes(query
|
|
240
|
+
const context = this._encodeIncludes(query);
|
|
241
241
|
return {
|
|
242
242
|
...context,
|
|
243
243
|
sorting: _encodeSorting(context.includes, context.populates, query.sort),
|
|
244
244
|
};
|
|
245
245
|
}
|
|
246
|
-
_encodeIncludes(
|
|
246
|
+
_encodeIncludes(query) {
|
|
247
247
|
const names = {};
|
|
248
248
|
const populates = {};
|
|
249
|
-
|
|
250
|
-
|
|
249
|
+
const countMatches = [];
|
|
250
|
+
for (const include of query.includes) {
|
|
251
|
+
const { paths: [colname, ...subpath], dataType } = random$1.resolveColumn(this.schema, query.className, include);
|
|
251
252
|
names[colname] = dataType;
|
|
253
|
+
if (index.isRelation(dataType) && _.includes(query.countMatches, colname))
|
|
254
|
+
countMatches.push(colname);
|
|
252
255
|
if (index.isPointer(dataType) || index.isRelation(dataType)) {
|
|
253
256
|
if (_.isEmpty(subpath))
|
|
254
257
|
throw Error(`Invalid path: ${include}`);
|
|
255
|
-
const _matches = matches[colname];
|
|
258
|
+
const _matches = query.matches[colname];
|
|
256
259
|
populates[colname] = populates[colname] ?? {
|
|
257
260
|
name: `t${this.nextIdx()}`,
|
|
258
261
|
className: dataType.target,
|
|
@@ -278,13 +281,22 @@ class QueryCompiler {
|
|
|
278
281
|
}
|
|
279
282
|
}
|
|
280
283
|
for (const [colname, populate] of _.toPairs(populates)) {
|
|
281
|
-
const _matches = matches[colname];
|
|
282
|
-
const { includes, populates } = this._encodeIncludes(
|
|
284
|
+
const _matches = query.matches[colname];
|
|
285
|
+
const { includes, populates, countMatches } = this._encodeIncludes({
|
|
286
|
+
className: populate.className,
|
|
287
|
+
includes: populate.subpaths,
|
|
288
|
+
matches: _matches.matches,
|
|
289
|
+
countMatches: [
|
|
290
|
+
..._.filter(query.countMatches, x => _.startsWith(x, `${colname}.`)).map(x => x.slice(colname.length + 1)),
|
|
291
|
+
..._matches.countMatches ?? [],
|
|
292
|
+
],
|
|
293
|
+
});
|
|
283
294
|
populate.sort = _encodeSorting(includes, populates, _matches.sort);
|
|
284
295
|
populate.includes = includes;
|
|
285
296
|
populate.populates = populates;
|
|
297
|
+
populate.countMatches = countMatches;
|
|
286
298
|
}
|
|
287
|
-
return { includes: names, populates };
|
|
299
|
+
return { includes: names, populates, countMatches };
|
|
288
300
|
}
|
|
289
301
|
_baseSelectQuery(query, options) {
|
|
290
302
|
const context = this._makeContext(query);
|
|
@@ -328,7 +340,7 @@ class QueryCompiler {
|
|
|
328
340
|
};
|
|
329
341
|
}
|
|
330
342
|
_refetch(name, query) {
|
|
331
|
-
const _context = this._encodeIncludes(query
|
|
343
|
+
const _context = this._encodeIncludes(query);
|
|
332
344
|
const populates = _.mapValues(_context.populates, (populate) => this.dialect.encodePopulate(this, populate, { className: query.className, name }));
|
|
333
345
|
const stages = _.fromPairs(_.flatMap(_.values(populates), (p) => _.toPairs(p)));
|
|
334
346
|
const _populates = this._selectPopulateMap(_context, query.className, name);
|
|
@@ -362,7 +374,10 @@ class QueryCompiler {
|
|
|
362
374
|
`;
|
|
363
375
|
}
|
|
364
376
|
_encodeUpdateAttrs(className, attrs) {
|
|
365
|
-
const updates = [
|
|
377
|
+
const updates = [
|
|
378
|
+
sql `__v = __v + 1`,
|
|
379
|
+
sql `_updated_at = NOW()`,
|
|
380
|
+
];
|
|
366
381
|
for (const [path, op] of _.toPairs(attrs)) {
|
|
367
382
|
const { paths: [column, ...subpath], dataType } = random$1.resolveColumn(this.schema, className, path);
|
|
368
383
|
if (index.isShape(dataType)) {
|
|
@@ -447,7 +462,7 @@ class QueryCompiler {
|
|
|
447
462
|
return _.map(context.populates, (populate, field) => this.dialect.selectPopulate(this, {
|
|
448
463
|
className,
|
|
449
464
|
name,
|
|
450
|
-
}, populate, field));
|
|
465
|
+
}, populate, field, _.includes(context.countMatches, field)));
|
|
451
466
|
}
|
|
452
467
|
insert(options, attrs) {
|
|
453
468
|
const _attrs = _.toPairs({
|
|
@@ -496,8 +511,7 @@ class QueryCompiler {
|
|
|
496
511
|
return sql `
|
|
497
512
|
, ${{ identifier: name }} AS (
|
|
498
513
|
UPDATE ${{ identifier: query.className }}
|
|
499
|
-
SET
|
|
500
|
-
${!_.isEmpty(update) ? sql `, ${this._encodeUpdateAttrs(query.className, update)}` : sql ``}
|
|
514
|
+
SET ${this._encodeUpdateAttrs(query.className, update)}
|
|
501
515
|
WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})
|
|
502
516
|
RETURNING *
|
|
503
517
|
)
|
|
@@ -509,8 +523,7 @@ class QueryCompiler {
|
|
|
509
523
|
return this._modifyQuery(query, (fetchName) => {
|
|
510
524
|
return sql `
|
|
511
525
|
UPDATE ${{ identifier: query.className }}
|
|
512
|
-
SET
|
|
513
|
-
${!_.isEmpty(update) ? sql `, ${this._encodeUpdateAttrs(query.className, update)}` : sql ``}
|
|
526
|
+
SET ${this._encodeUpdateAttrs(query.className, update)}
|
|
514
527
|
WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})
|
|
515
528
|
RETURNING _id
|
|
516
529
|
`;
|
|
@@ -528,8 +541,7 @@ class QueryCompiler {
|
|
|
528
541
|
return sql `
|
|
529
542
|
, ${{ identifier: updateName }} AS (
|
|
530
543
|
UPDATE ${{ identifier: query.className }}
|
|
531
|
-
SET
|
|
532
|
-
${!_.isEmpty(update) ? sql `, ${this._encodeUpdateAttrs(query.className, update)}` : sql ``}
|
|
544
|
+
SET ${this._encodeUpdateAttrs(query.className, update)}
|
|
533
545
|
WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})
|
|
534
546
|
RETURNING *
|
|
535
547
|
)
|
|
@@ -560,17 +572,16 @@ class QueryCompiler {
|
|
|
560
572
|
return sql `
|
|
561
573
|
, ${{ identifier: updateName }} AS (
|
|
562
574
|
UPDATE ${{ identifier: query.className }}
|
|
563
|
-
SET
|
|
564
|
-
${!_.isEmpty(update) ? sql `, ${this._encodeUpdateAttrs(query.className, update)}` : sql ``}
|
|
575
|
+
SET ${this._encodeUpdateAttrs(query.className, update)}
|
|
565
576
|
WHERE ${{ identifier: query.className }}._id IN (SELECT ${{ identifier: fetchName }}._id FROM ${{ identifier: fetchName }})
|
|
566
|
-
RETURNING _id,
|
|
577
|
+
RETURNING _id, __v
|
|
567
578
|
)
|
|
568
579
|
, ${{ identifier: insertName }} AS (
|
|
569
580
|
INSERT INTO ${{ identifier: query.className }}
|
|
570
581
|
(${_.map(_insert, x => sql `${{ identifier: x[0] }}`)})
|
|
571
582
|
SELECT ${_.map(_insert, x => sql `${x[1]} AS ${{ identifier: x[0] }}`)}
|
|
572
583
|
WHERE NOT EXISTS(SELECT * FROM ${{ identifier: updateName }})
|
|
573
|
-
RETURNING _id,
|
|
584
|
+
RETURNING _id, __v
|
|
574
585
|
)
|
|
575
586
|
SELECT * FROM ${{ identifier: updateName }}
|
|
576
587
|
UNION
|
|
@@ -664,7 +675,9 @@ class SqlStorage {
|
|
|
664
675
|
}
|
|
665
676
|
else if (index.isRelation(type)) {
|
|
666
677
|
const _value = _.get(value, path);
|
|
667
|
-
if (_.
|
|
678
|
+
if (_.isString(_value) && _value.match(/^\d+$/g))
|
|
679
|
+
_.set(result, path, parseInt(_value));
|
|
680
|
+
else if (_.isArray(_value))
|
|
668
681
|
_.set(result, path, _value.map(x => this._decodeObject(type.target, x)));
|
|
669
682
|
}
|
|
670
683
|
else {
|
|
@@ -700,7 +713,9 @@ class SqlStorage {
|
|
|
700
713
|
}
|
|
701
714
|
}
|
|
702
715
|
else if (index.isRelation(dataType)) {
|
|
703
|
-
if (_.
|
|
716
|
+
if (_.isString(value) && value.match(/^\d+$/g))
|
|
717
|
+
obj[_private.PVK].attributes[key] = parseInt(value);
|
|
718
|
+
else if (_.isArray(value))
|
|
704
719
|
obj[_private.PVK].attributes[key] = value.map(x => this._decodeObject(dataType.target, x));
|
|
705
720
|
}
|
|
706
721
|
else {
|
|
@@ -811,9 +826,10 @@ class SqlStorage {
|
|
|
811
826
|
async upsertMany(query, update, setOnInsert) {
|
|
812
827
|
const compiler = this._makeCompiler(true, query.extraFilter);
|
|
813
828
|
const upserted = await this.query(compiler.upsertMany(query, update, setOnInsert));
|
|
829
|
+
const inserted = _.filter(upserted, x => x.__v === 0).length;
|
|
814
830
|
return {
|
|
815
|
-
updated:
|
|
816
|
-
inserted:
|
|
831
|
+
updated: upserted.length - inserted,
|
|
832
|
+
inserted: inserted,
|
|
817
833
|
};
|
|
818
834
|
}
|
|
819
835
|
async deleteOne(query) {
|
|
@@ -1992,11 +2008,17 @@ const _selectRelationPopulate = (compiler, parent, populate, field, encode) => {
|
|
|
1992
2008
|
${compiler.selectLock ? compiler.isUpdate ? sql `FOR UPDATE NOWAIT` : sql `FOR SHARE NOWAIT` : sql ``}
|
|
1993
2009
|
`;
|
|
1994
2010
|
};
|
|
1995
|
-
const selectPopulate = (compiler, parent, populate, field) => {
|
|
2011
|
+
const selectPopulate = (compiler, parent, populate, field, countMatches) => {
|
|
1996
2012
|
if (populate.type === 'relation') {
|
|
1997
2013
|
return {
|
|
1998
2014
|
columns: [
|
|
1999
|
-
sql `
|
|
2015
|
+
countMatches ? sql `
|
|
2016
|
+
(
|
|
2017
|
+
SELECT COUNT(*) FROM (
|
|
2018
|
+
${_selectRelationPopulate(compiler, parent, populate, field, false)}
|
|
2019
|
+
) ${{ identifier: populate.name }}
|
|
2020
|
+
) AS ${{ identifier: field }}
|
|
2021
|
+
` : sql `
|
|
2000
2022
|
ARRAY(
|
|
2001
2023
|
SELECT to_jsonb(${{ identifier: populate.name }}) FROM (
|
|
2002
2024
|
${_selectRelationPopulate(compiler, parent, populate, field, true)}
|
|
@@ -2097,7 +2119,7 @@ const encodePopulate = (compiler, parent, remix) => {
|
|
|
2097
2119
|
parent.filter && compiler._encodeFilter(parent, parent.filter),
|
|
2098
2120
|
compiler.extraFilter && compiler._encodeFilter(parent, compiler.extraFilter(parent.className)),
|
|
2099
2121
|
]);
|
|
2100
|
-
const _populates = _.map(parent.populates, (populate, field) => selectPopulate(compiler, parent, populate, field));
|
|
2122
|
+
const _populates = _.map(parent.populates, (populate, field) => selectPopulate(compiler, parent, populate, field, _.includes(parent.countMatches, field)));
|
|
2101
2123
|
const _joins = _.compact(_.map(_populates, ({ join }) => join));
|
|
2102
2124
|
const _includes = _.pickBy(parent.includes, v => index.isPrimitive(v));
|
|
2103
2125
|
const { joins: _joins2 = [], field: _foreignField = undefined, rows = false, } = parent.foreignField ? encodeForeignField(compiler, {
|