@milaboratories/pl-model-common 1.21.3 → 1.21.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/drivers/ChunkedStreamReader.cjs +206 -0
- package/dist/drivers/ChunkedStreamReader.cjs.map +1 -0
- package/dist/drivers/ChunkedStreamReader.d.ts +117 -0
- package/dist/drivers/ChunkedStreamReader.d.ts.map +1 -0
- package/dist/drivers/ChunkedStreamReader.js +204 -0
- package/dist/drivers/ChunkedStreamReader.js.map +1 -0
- package/dist/drivers/index.d.ts +1 -0
- package/dist/drivers/index.d.ts.map +1 -1
- package/dist/drivers/pframe/pframe.d.ts +2 -2
- package/dist/drivers/pframe/pframe.d.ts.map +1 -1
- package/dist/drivers/pframe/spec/spec.cjs.map +1 -1
- package/dist/drivers/pframe/spec/spec.d.ts +6 -6
- package/dist/drivers/pframe/spec/spec.d.ts.map +1 -1
- package/dist/drivers/pframe/spec/spec.js.map +1 -1
- package/dist/index.cjs +3 -0
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +2 -1
- package/dist/index.js.map +1 -1
- package/dist/json.cjs +4 -0
- package/dist/json.cjs.map +1 -1
- package/dist/json.d.ts +11 -3
- package/dist/json.d.ts.map +1 -1
- package/dist/json.js +4 -1
- package/dist/json.js.map +1 -1
- package/dist/pool/spec.cjs.map +1 -1
- package/dist/pool/spec.d.ts +2 -2
- package/dist/pool/spec.d.ts.map +1 -1
- package/dist/pool/spec.js.map +1 -1
- package/package.json +6 -6
- package/src/drivers/ChunkedStreamReader.ts +270 -0
- package/src/drivers/index.ts +1 -0
- package/src/drivers/pframe/pframe.ts +2 -2
- package/src/drivers/pframe/spec/spec.ts +6 -6
- package/src/json.ts +28 -14
- package/src/pool/spec.ts +2 -2
package/dist/pool/spec.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"spec.cjs","sources":["../../src/pool/spec.ts"],"sourcesContent":["import type { Branded } from '../branding';\nimport type { JoinEntry, PColumn, PColumnSpec } from '../drivers';\nimport { assertNever } from '../util';\nimport type { ResultPoolEntry } from './entry';\n\n/** Any object exported into the result pool by the block always have spec attached to it */\nexport
|
|
1
|
+
{"version":3,"file":"spec.cjs","sources":["../../src/pool/spec.ts"],"sourcesContent":["import type { Branded } from '../branding';\nimport type { JoinEntry, PColumn, PColumnSpec } from '../drivers';\nimport { assertNever } from '../util';\nimport type { ResultPoolEntry } from './entry';\n\n/** Any object exported into the result pool by the block always have spec attached to it */\nexport type PObjectSpec = {\n /** PObject kind discriminator */\n readonly kind: string;\n\n /** Name is common part of PObject identity */\n readonly name: string;\n\n /** Domain is a set of key-value pairs that can be used to identify the object */\n readonly domain?: Record<string, string>;\n\n /** Additional information attached to the object */\n readonly annotations?: Record<string, string>;\n};\n\n/** Stable PObject id */\nexport type PObjectId = Branded<string, 'PColumnId'>;\n\n/**\n * Full PObject representation.\n *\n * @template Data type of the object referencing or describing the \"data\" part of the PObject\n * */\nexport interface PObject<Data> {\n /** Fully rendered PObjects are assigned a stable identifier. */\n readonly id: PObjectId;\n\n /** PObject spec, allowing it to be found among other PObjects */\n readonly spec: PObjectSpec;\n\n /** A handle to data object */\n readonly data: Data;\n}\n\nexport function isPColumnSpec(spec: PObjectSpec): spec is PColumnSpec {\n return spec.kind === 'PColumn';\n}\n\nexport function isPColumn<T>(obj: PObject<T>): obj is PColumn<T> {\n return isPColumnSpec(obj.spec);\n}\n\nexport function isPColumnSpecResult(\n r: ResultPoolEntry<PObjectSpec>,\n): r is ResultPoolEntry<PColumnSpec> {\n return isPColumnSpec(r.obj);\n}\n\nexport function isPColumnResult<T>(\n r: ResultPoolEntry<PObject<T>>,\n): r is ResultPoolEntry<PColumn<T>> {\n return isPColumnSpec(r.obj.spec);\n}\n\nexport function ensurePColumn<T>(obj: PObject<T>): PColumn<T> {\n if (!isPColumn(obj)) throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n}\n\nexport function mapPObjectData<D1, D2>(pObj: PColumn<D1>, cb: (d: D1) => D2): PColumn<D2>;\nexport function mapPObjectData<D1, D2>(\n pObj: PColumn<D1> | undefined,\n cb: (d: D1) => D2\n): PColumn<D2> | undefined;\nexport function mapPObjectData<D1, D2>(pObj: PObject<D1>, cb: (d: D1) => D2): PObject<D2>;\nexport function mapPObjectData<D1, D2>(\n pObj: PObject<D1> | undefined,\n cb: (d: D1) => D2\n): PObject<D2> | undefined;\nexport function mapPObjectData<D1, D2>(\n pObj: PObject<D1> | undefined,\n cb: (d: D1) => D2,\n): PObject<D2> | undefined {\n return pObj === undefined\n ? undefined\n : {\n ...pObj,\n data: cb(pObj.data),\n };\n}\n\nexport function extractAllColumns<D>(entry: JoinEntry<PColumn<D>>): PColumn<D>[] {\n const columns = new Map<PObjectId, PColumn<D>>();\n const addAllColumns = (entry: JoinEntry<PColumn<D>>) => {\n switch (entry.type) {\n case 'column':\n columns.set(entry.column.id, entry.column);\n return;\n case 'slicedColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'artificialColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'inlineColumn':\n return;\n case 'full':\n case 'inner':\n for (const e of entry.entries) addAllColumns(e);\n return;\n case 'outer':\n addAllColumns(entry.primary);\n for (const e of entry.secondary) addAllColumns(e);\n return;\n default:\n assertNever(entry);\n }\n };\n addAllColumns(entry);\n return [...columns.values()];\n}\n"],"names":["assertNever"],"mappings":";;;;AAuCM,SAAU,aAAa,CAAC,IAAiB,EAAA;AAC7C,IAAA,OAAO,IAAI,CAAC,IAAI,KAAK,SAAS;AAChC;AAEM,SAAU,SAAS,CAAI,GAAe,EAAA;AAC1C,IAAA,OAAO,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AAChC;AAEM,SAAU,mBAAmB,CACjC,CAA+B,EAAA;AAE/B,IAAA,OAAO,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC;AAC7B;AAEM,SAAU,eAAe,CAC7B,CAA8B,EAAA;IAE9B,OAAO,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC;AAClC;AAEM,SAAU,aAAa,CAAI,GAAe,EAAA;AAC9C,IAAA,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC;QAAE,MAAM,IAAI,KAAK,CAAC,CAAA,sBAAA,EAAyB,GAAG,CAAC,IAAI,CAAC,IAAI,CAAA,CAAA,CAAG,CAAC;AAC/E,IAAA,OAAO,GAAG;AACZ;AAYM,SAAU,cAAc,CAC5B,IAA6B,EAC7B,EAAiB,EAAA;IAEjB,OAAO,IAAI,KAAK;AACd,UAAE;AACF,UAAE;AACE,YAAA,GAAG,IAAI;AACP,YAAA,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC;SACpB;AACP;AAEM,SAAU,iBAAiB,CAAI,KAA4B,EAAA;AAC/D,IAAA,MAAM,OAAO,GAAG,IAAI,GAAG,EAAyB;AAChD,IAAA,MAAM,aAAa,GAAG,CAAC,KAA4B,KAAI;AACrD,QAAA,QAAQ,KAAK,CAAC,IAAI;AAChB,YAAA,KAAK,QAAQ;AACX,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,cAAc;AACjB,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,kBAAkB;AACrB,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,cAAc;gBACjB;AACF,YAAA,KAAK,MAAM;AACX,YAAA,KAAK,OAAO;AACV,gBAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,OAAO;oBAAE,aAAa,CAAC,CAAC,CAAC;gBAC/C;AACF,YAAA,KAAK,OAAO;AACV,gBAAA,aAAa,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,gBAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,SAAS;oBAAE,aAAa,CAAC,CAAC,CAAC;gBACjD;AACF,YAAA;gBACEA,gBAAW,CAAC,KAAK,CAAC;;AAExB,IAAA,CAAC;IACD,aAAa,CAAC,KAAK,CAAC;AACpB,IAAA,OAAO,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;AAC9B;;;;;;;;;;"}
|
package/dist/pool/spec.d.ts
CHANGED
|
@@ -2,7 +2,7 @@ import type { Branded } from '../branding';
|
|
|
2
2
|
import type { JoinEntry, PColumn, PColumnSpec } from '../drivers';
|
|
3
3
|
import type { ResultPoolEntry } from './entry';
|
|
4
4
|
/** Any object exported into the result pool by the block always have spec attached to it */
|
|
5
|
-
export
|
|
5
|
+
export type PObjectSpec = {
|
|
6
6
|
/** PObject kind discriminator */
|
|
7
7
|
readonly kind: string;
|
|
8
8
|
/** Name is common part of PObject identity */
|
|
@@ -11,7 +11,7 @@ export interface PObjectSpec {
|
|
|
11
11
|
readonly domain?: Record<string, string>;
|
|
12
12
|
/** Additional information attached to the object */
|
|
13
13
|
readonly annotations?: Record<string, string>;
|
|
14
|
-
}
|
|
14
|
+
};
|
|
15
15
|
/** Stable PObject id */
|
|
16
16
|
export type PObjectId = Branded<string, 'PColumnId'>;
|
|
17
17
|
/**
|
package/dist/pool/spec.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"spec.d.ts","sourceRoot":"","sources":["../../src/pool/spec.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAElE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,4FAA4F;AAC5F,MAAM,WAAW,
|
|
1
|
+
{"version":3,"file":"spec.d.ts","sourceRoot":"","sources":["../../src/pool/spec.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAElE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,4FAA4F;AAC5F,MAAM,MAAM,WAAW,GAAG;IACxB,iCAAiC;IACjC,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IAEtB,8CAA8C;IAC9C,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IAEtB,iFAAiF;IACjF,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEzC,oDAAoD;IACpD,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAC/C,CAAC;AAEF,wBAAwB;AACxB,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;AAErD;;;;KAIK;AACL,MAAM,WAAW,OAAO,CAAC,IAAI;IAC3B,gEAAgE;IAChE,QAAQ,CAAC,EAAE,EAAE,SAAS,CAAC;IAEvB,iEAAiE;IACjE,QAAQ,CAAC,IAAI,EAAE,WAAW,CAAC;IAE3B,8BAA8B;IAC9B,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;CACrB;AAED,wBAAgB,aAAa,CAAC,IAAI,EAAE,WAAW,GAAG,IAAI,IAAI,WAAW,CAEpE;AAED,wBAAgB,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,CAAC,CAAC,CAE/D;AAED,wBAAgB,mBAAmB,CACjC,CAAC,EAAE,eAAe,CAAC,WAAW,CAAC,GAC9B,CAAC,IAAI,eAAe,CAAC,WAAW,CAAC,CAEnC;AAED,wBAAgB,eAAe,CAAC,CAAC,EAC/B,CAAC,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAC7B,CAAC,IAAI,eAAe,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAElC;AAED,wBAAgB,aAAa,CAAC,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAG5D;AAED,wBAAgB,cAAc,CAAC,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC;AAC1F,wBAAgB,cAAc,CAAC,EAAE,EAAE,EAAE,EACnC,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,SAAS,EAC7B,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAChB,OAAO,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC;AAC3B,wBAAgB,cAAc,CAAC,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC;AAC1F,wBAAgB,cAAc,CAAC,EAAE,EAAE,EAAE,EACnC,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,SAAS,EAC7B,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAChB,OAAO,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC;AAa3B,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,KAAK,EAAE,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,EAAE,CA6B/E"}
|
package/dist/pool/spec.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"spec.js","sources":["../../src/pool/spec.ts"],"sourcesContent":["import type { Branded } from '../branding';\nimport type { JoinEntry, PColumn, PColumnSpec } from '../drivers';\nimport { assertNever } from '../util';\nimport type { ResultPoolEntry } from './entry';\n\n/** Any object exported into the result pool by the block always have spec attached to it */\nexport
|
|
1
|
+
{"version":3,"file":"spec.js","sources":["../../src/pool/spec.ts"],"sourcesContent":["import type { Branded } from '../branding';\nimport type { JoinEntry, PColumn, PColumnSpec } from '../drivers';\nimport { assertNever } from '../util';\nimport type { ResultPoolEntry } from './entry';\n\n/** Any object exported into the result pool by the block always have spec attached to it */\nexport type PObjectSpec = {\n /** PObject kind discriminator */\n readonly kind: string;\n\n /** Name is common part of PObject identity */\n readonly name: string;\n\n /** Domain is a set of key-value pairs that can be used to identify the object */\n readonly domain?: Record<string, string>;\n\n /** Additional information attached to the object */\n readonly annotations?: Record<string, string>;\n};\n\n/** Stable PObject id */\nexport type PObjectId = Branded<string, 'PColumnId'>;\n\n/**\n * Full PObject representation.\n *\n * @template Data type of the object referencing or describing the \"data\" part of the PObject\n * */\nexport interface PObject<Data> {\n /** Fully rendered PObjects are assigned a stable identifier. */\n readonly id: PObjectId;\n\n /** PObject spec, allowing it to be found among other PObjects */\n readonly spec: PObjectSpec;\n\n /** A handle to data object */\n readonly data: Data;\n}\n\nexport function isPColumnSpec(spec: PObjectSpec): spec is PColumnSpec {\n return spec.kind === 'PColumn';\n}\n\nexport function isPColumn<T>(obj: PObject<T>): obj is PColumn<T> {\n return isPColumnSpec(obj.spec);\n}\n\nexport function isPColumnSpecResult(\n r: ResultPoolEntry<PObjectSpec>,\n): r is ResultPoolEntry<PColumnSpec> {\n return isPColumnSpec(r.obj);\n}\n\nexport function isPColumnResult<T>(\n r: ResultPoolEntry<PObject<T>>,\n): r is ResultPoolEntry<PColumn<T>> {\n return isPColumnSpec(r.obj.spec);\n}\n\nexport function ensurePColumn<T>(obj: PObject<T>): PColumn<T> {\n if (!isPColumn(obj)) throw new Error(`not a PColumn (kind = ${obj.spec.kind})`);\n return obj;\n}\n\nexport function mapPObjectData<D1, D2>(pObj: PColumn<D1>, cb: (d: D1) => D2): PColumn<D2>;\nexport function mapPObjectData<D1, D2>(\n pObj: PColumn<D1> | undefined,\n cb: (d: D1) => D2\n): PColumn<D2> | undefined;\nexport function mapPObjectData<D1, D2>(pObj: PObject<D1>, cb: (d: D1) => D2): PObject<D2>;\nexport function mapPObjectData<D1, D2>(\n pObj: PObject<D1> | undefined,\n cb: (d: D1) => D2\n): PObject<D2> | undefined;\nexport function mapPObjectData<D1, D2>(\n pObj: PObject<D1> | undefined,\n cb: (d: D1) => D2,\n): PObject<D2> | undefined {\n return pObj === undefined\n ? undefined\n : {\n ...pObj,\n data: cb(pObj.data),\n };\n}\n\nexport function extractAllColumns<D>(entry: JoinEntry<PColumn<D>>): PColumn<D>[] {\n const columns = new Map<PObjectId, PColumn<D>>();\n const addAllColumns = (entry: JoinEntry<PColumn<D>>) => {\n switch (entry.type) {\n case 'column':\n columns.set(entry.column.id, entry.column);\n return;\n case 'slicedColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'artificialColumn':\n columns.set(entry.column.id, entry.column);\n return;\n case 'inlineColumn':\n return;\n case 'full':\n case 'inner':\n for (const e of entry.entries) addAllColumns(e);\n return;\n case 'outer':\n addAllColumns(entry.primary);\n for (const e of entry.secondary) addAllColumns(e);\n return;\n default:\n assertNever(entry);\n }\n };\n addAllColumns(entry);\n return [...columns.values()];\n}\n"],"names":[],"mappings":";;AAuCM,SAAU,aAAa,CAAC,IAAiB,EAAA;AAC7C,IAAA,OAAO,IAAI,CAAC,IAAI,KAAK,SAAS;AAChC;AAEM,SAAU,SAAS,CAAI,GAAe,EAAA;AAC1C,IAAA,OAAO,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AAChC;AAEM,SAAU,mBAAmB,CACjC,CAA+B,EAAA;AAE/B,IAAA,OAAO,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC;AAC7B;AAEM,SAAU,eAAe,CAC7B,CAA8B,EAAA;IAE9B,OAAO,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC;AAClC;AAEM,SAAU,aAAa,CAAI,GAAe,EAAA;AAC9C,IAAA,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC;QAAE,MAAM,IAAI,KAAK,CAAC,CAAA,sBAAA,EAAyB,GAAG,CAAC,IAAI,CAAC,IAAI,CAAA,CAAA,CAAG,CAAC;AAC/E,IAAA,OAAO,GAAG;AACZ;AAYM,SAAU,cAAc,CAC5B,IAA6B,EAC7B,EAAiB,EAAA;IAEjB,OAAO,IAAI,KAAK;AACd,UAAE;AACF,UAAE;AACE,YAAA,GAAG,IAAI;AACP,YAAA,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC;SACpB;AACP;AAEM,SAAU,iBAAiB,CAAI,KAA4B,EAAA;AAC/D,IAAA,MAAM,OAAO,GAAG,IAAI,GAAG,EAAyB;AAChD,IAAA,MAAM,aAAa,GAAG,CAAC,KAA4B,KAAI;AACrD,QAAA,QAAQ,KAAK,CAAC,IAAI;AAChB,YAAA,KAAK,QAAQ;AACX,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,cAAc;AACjB,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,kBAAkB;AACrB,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,MAAM,CAAC;gBAC1C;AACF,YAAA,KAAK,cAAc;gBACjB;AACF,YAAA,KAAK,MAAM;AACX,YAAA,KAAK,OAAO;AACV,gBAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,OAAO;oBAAE,aAAa,CAAC,CAAC,CAAC;gBAC/C;AACF,YAAA,KAAK,OAAO;AACV,gBAAA,aAAa,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,gBAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,SAAS;oBAAE,aAAa,CAAC,CAAC,CAAC;gBACjD;AACF,YAAA;gBACE,WAAW,CAAC,KAAK,CAAC;;AAExB,IAAA,CAAC;IACD,aAAa,CAAC,KAAK,CAAC;AACpB,IAAA,OAAO,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;AAC9B;;;;"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@milaboratories/pl-model-common",
|
|
3
|
-
"version": "1.21.
|
|
3
|
+
"version": "1.21.5",
|
|
4
4
|
"description": "Platforma SDK Model",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
@@ -24,18 +24,18 @@
|
|
|
24
24
|
"devDependencies": {
|
|
25
25
|
"eslint": "^9.25.1",
|
|
26
26
|
"typescript": "~5.6.3",
|
|
27
|
-
"vitest": "^
|
|
28
|
-
"@milaboratories/ts-builder": "1.0.5",
|
|
29
|
-
"@milaboratories/build-configs": "1.0.8",
|
|
27
|
+
"vitest": "^4.0.7",
|
|
30
28
|
"@platforma-sdk/eslint-config": "1.1.0",
|
|
31
|
-
"@milaboratories/ts-configs": "1.0.6"
|
|
29
|
+
"@milaboratories/ts-configs": "1.0.6",
|
|
30
|
+
"@milaboratories/build-configs": "1.0.8",
|
|
31
|
+
"@milaboratories/ts-builder": "1.0.5"
|
|
32
32
|
},
|
|
33
33
|
"scripts": {
|
|
34
34
|
"type-check": "ts-builder types --target node",
|
|
35
35
|
"build": "ts-builder build --target node",
|
|
36
36
|
"watch": "ts-builder build --target node --watch",
|
|
37
37
|
"lint": "eslint .",
|
|
38
|
-
"test": "vitest",
|
|
38
|
+
"test": "vitest run",
|
|
39
39
|
"do-pack": "rm -f *.tgz && pnpm pack && mv *.tgz package.tgz"
|
|
40
40
|
}
|
|
41
41
|
}
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import type { RangeBytes } from './blob';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Status returned by onError handler to indicate what action to take
|
|
5
|
+
* - 'continue': Retry the failed operation
|
|
6
|
+
* - 'error': Error the stream (calls controller.error, aborts ongoing fetches)
|
|
7
|
+
* - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)
|
|
8
|
+
*/
|
|
9
|
+
export type ErrorHandlerStatus = 'continue' | 'error' | 'cancel';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Options for creating a ChunkedStreamReader
|
|
13
|
+
*/
|
|
14
|
+
export interface ChunkedStreamReaderOptions {
|
|
15
|
+
/**
|
|
16
|
+
* Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.
|
|
17
|
+
*/
|
|
18
|
+
fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Total size of the blob in bytes
|
|
22
|
+
*/
|
|
23
|
+
totalSize: number;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Size of each chunk to read in bytes (default: 16MB)
|
|
27
|
+
*/
|
|
28
|
+
chunkSize?: number;
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Error handler callback. Called when an error occurs during chunk fetching.
|
|
32
|
+
* Should return:
|
|
33
|
+
* - 'continue' to retry the operation
|
|
34
|
+
* - 'error' to error the stream (will call controller.error and abort ongoing fetches)
|
|
35
|
+
* - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)
|
|
36
|
+
* Default behavior: returns 'error'.
|
|
37
|
+
*/
|
|
38
|
+
onError?: (error: unknown) => Promise<ErrorHandlerStatus>;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
|
|
43
|
+
* in fixed-size chunks. This is useful for streaming large files without loading
|
|
44
|
+
* them entirely into memory.
|
|
45
|
+
*/
|
|
46
|
+
export class ChunkedStreamReader {
|
|
47
|
+
private currentPosition: number = 0;
|
|
48
|
+
private _read = true;
|
|
49
|
+
private _canceled = false;
|
|
50
|
+
private _errored = false;
|
|
51
|
+
private abortController: AbortController | null = null;
|
|
52
|
+
private readonly options: Required<ChunkedStreamReaderOptions>;
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Creates a new ChunkedStreamReader instance.
|
|
56
|
+
* Use the static `create` method instead.
|
|
57
|
+
*/
|
|
58
|
+
private constructor(options: ChunkedStreamReaderOptions) {
|
|
59
|
+
// Normalize options with defaults
|
|
60
|
+
this.options = {
|
|
61
|
+
...options,
|
|
62
|
+
chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
|
|
63
|
+
onError: options.onError ?? (async () => {
|
|
64
|
+
// Default behavior: error (will automatically call controller.error)
|
|
65
|
+
return 'error';
|
|
66
|
+
}),
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
if (this.totalSize < 0) {
|
|
70
|
+
throw new Error('Total size must be non-negative');
|
|
71
|
+
}
|
|
72
|
+
if (this.chunkSize <= 0) {
|
|
73
|
+
throw new Error('Chunk size must be positive');
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Gets the fetchChunk function from options
|
|
79
|
+
*/
|
|
80
|
+
private get fetchChunk() {
|
|
81
|
+
return this.options.fetchChunk;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Gets the total size from options
|
|
86
|
+
*/
|
|
87
|
+
private get totalSize() {
|
|
88
|
+
return this.options.totalSize;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Gets the chunk size from options
|
|
93
|
+
*/
|
|
94
|
+
private get chunkSize() {
|
|
95
|
+
return this.options.chunkSize;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Gets the onError callback from options
|
|
100
|
+
*/
|
|
101
|
+
private get onError() {
|
|
102
|
+
return this.options.onError;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Creates and returns a ReadableStream that reads data in chunks.
|
|
107
|
+
*
|
|
108
|
+
* @param options - Configuration options for the chunked stream reader
|
|
109
|
+
* @returns ReadableStream that can be consumed by zip.add or other stream consumers
|
|
110
|
+
*
|
|
111
|
+
* @example
|
|
112
|
+
* ```typescript
|
|
113
|
+
* const stream = ChunkedStreamReader.create({
|
|
114
|
+
* fetchChunk: async (range, signal) => {
|
|
115
|
+
* const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
|
|
116
|
+
* return new Uint8Array(await response.arrayBuffer());
|
|
117
|
+
* },
|
|
118
|
+
* totalSize: 1024 * 1024, // 1MB
|
|
119
|
+
* chunkSize: 64 * 1024, // 64KB chunks
|
|
120
|
+
* });
|
|
121
|
+
* ```
|
|
122
|
+
*/
|
|
123
|
+
static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array> {
|
|
124
|
+
const reader = new ChunkedStreamReader(options);
|
|
125
|
+
return reader.createStream();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private readStart() {
|
|
129
|
+
this._read = true;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
private readStop() {
|
|
133
|
+
this._read = false;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private async tryRead(controller: ReadableStreamDefaultController<Uint8Array>): Promise<boolean> {
|
|
137
|
+
if (this._canceled) {
|
|
138
|
+
return true;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Check if we've read all data
|
|
142
|
+
if (this.isComplete()) {
|
|
143
|
+
controller.close();
|
|
144
|
+
return true;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
try {
|
|
148
|
+
// Calculate the end position for this chunk
|
|
149
|
+
// Ensure we don't read beyond the total size
|
|
150
|
+
const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
|
|
151
|
+
|
|
152
|
+
// Fetch the chunk from the blob driver, passing the abort signal if available
|
|
153
|
+
const data = await this.fetchChunk(
|
|
154
|
+
{ from: this.currentPosition, to: endPosition },
|
|
155
|
+
this.abortController?.signal,
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
// Check if stream was cancelled during the fetch
|
|
159
|
+
if (this._canceled) {
|
|
160
|
+
return true;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Enqueue the data into the stream
|
|
164
|
+
controller.enqueue(data);
|
|
165
|
+
|
|
166
|
+
// Update the current position for the next chunk
|
|
167
|
+
this.currentPosition = endPosition;
|
|
168
|
+
|
|
169
|
+
if (!controller.desiredSize || controller.desiredSize <= 0) {
|
|
170
|
+
// The internal queue is full, so propagate
|
|
171
|
+
// the backpressure signal to the underlying source.
|
|
172
|
+
this.readStop();
|
|
173
|
+
}
|
|
174
|
+
} catch (error) {
|
|
175
|
+
// If any error occurs during chunk reading, call the error handler
|
|
176
|
+
const status = await this.onError(error);
|
|
177
|
+
|
|
178
|
+
if (status === 'error') {
|
|
179
|
+
this._errored = true;
|
|
180
|
+
// Error the stream and abort any ongoing fetch operations
|
|
181
|
+
controller.error(error);
|
|
182
|
+
this.abortController?.abort('Stream errored');
|
|
183
|
+
return true; // Stop reading
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
if (status === 'cancel') {
|
|
187
|
+
this._canceled = true;
|
|
188
|
+
// Close the stream gracefully and abort any ongoing fetch operations
|
|
189
|
+
controller.close();
|
|
190
|
+
this.abortController?.abort('Stream cancelled');
|
|
191
|
+
console.debug('ChunkedStreamReader cancelled due to error');
|
|
192
|
+
return true; // Stop reading
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return false;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Creates and returns a ReadableStream that reads data in chunks.
|
|
201
|
+
* The stream will automatically close when all data has been read.
|
|
202
|
+
*
|
|
203
|
+
* @private - Use the static `create` method instead
|
|
204
|
+
* @returns ReadableStream that can be consumed by zip.add or other stream consumers
|
|
205
|
+
*/
|
|
206
|
+
private createStream(): ReadableStream<Uint8Array> {
|
|
207
|
+
// Create an AbortController for this stream
|
|
208
|
+
this.abortController = new AbortController();
|
|
209
|
+
|
|
210
|
+
return new ReadableStream({
|
|
211
|
+
start: async (controller) => {
|
|
212
|
+
while (true) {
|
|
213
|
+
if (this._canceled || this._errored) {
|
|
214
|
+
return;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
if (!this._read) {
|
|
218
|
+
await new Promise((r) => setTimeout(r, 0));
|
|
219
|
+
if (controller.desiredSize) {
|
|
220
|
+
this.readStart();
|
|
221
|
+
}
|
|
222
|
+
} else {
|
|
223
|
+
const isDone = await this.tryRead(controller);
|
|
224
|
+
if (isDone) {
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
},
|
|
230
|
+
|
|
231
|
+
pull: () => {
|
|
232
|
+
this.readStart();
|
|
233
|
+
},
|
|
234
|
+
|
|
235
|
+
cancel: (reason) => {
|
|
236
|
+
this._canceled = true;
|
|
237
|
+
// Abort any ongoing fetch operations
|
|
238
|
+
this.abortController?.abort(reason);
|
|
239
|
+
console.debug('ChunkedStreamReader cancelled:', reason);
|
|
240
|
+
},
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/**
|
|
245
|
+
* Gets the current reading position in bytes.
|
|
246
|
+
*
|
|
247
|
+
* @returns Current position as number of bytes read
|
|
248
|
+
*/
|
|
249
|
+
getCurrentPosition(): number {
|
|
250
|
+
return this.currentPosition;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Gets the remaining bytes to be read.
|
|
255
|
+
*
|
|
256
|
+
* @returns Number of bytes remaining
|
|
257
|
+
*/
|
|
258
|
+
getRemainingBytes(): number {
|
|
259
|
+
return Math.max(0, this.totalSize - this.currentPosition);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Checks if the entire blob has been read.
|
|
264
|
+
*
|
|
265
|
+
* @returns True if all data has been read
|
|
266
|
+
*/
|
|
267
|
+
isComplete(): boolean {
|
|
268
|
+
return this.currentPosition >= this.totalSize;
|
|
269
|
+
}
|
|
270
|
+
}
|
package/src/drivers/index.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { PObjectId } from '../../pool';
|
|
2
2
|
import type { TableRange } from './data_types';
|
|
3
3
|
import type { FindColumnsRequest, FindColumnsResponse } from './find_columns';
|
|
4
|
-
import type {
|
|
4
|
+
import type { PColumnIdAndSpec, PColumnSpec } from './spec/spec';
|
|
5
5
|
import type {
|
|
6
6
|
CalculateTableDataRequest,
|
|
7
7
|
CalculateTableDataResponse,
|
|
@@ -33,4 +33,4 @@ export interface PFrame {
|
|
|
33
33
|
}
|
|
34
34
|
|
|
35
35
|
/** Information required to instantiate a PFrame. */
|
|
36
|
-
export type PFrameDef<
|
|
36
|
+
export type PFrameDef<Col> = Col[];
|
|
@@ -270,7 +270,7 @@ export function readAnnotationJson<T extends keyof AnnotationJson>(
|
|
|
270
270
|
* Each record inside a PColumn is addressed by a unique tuple of values set for
|
|
271
271
|
* all the axes specified in the column spec.
|
|
272
272
|
*/
|
|
273
|
-
export
|
|
273
|
+
export type AxisSpec = {
|
|
274
274
|
/** Type of the axis value. Should not use non-key types like float or double. */
|
|
275
275
|
readonly type: ValueType;
|
|
276
276
|
|
|
@@ -304,7 +304,7 @@ export interface AxisSpec {
|
|
|
304
304
|
* in the list that defines the structure of the data model.
|
|
305
305
|
*/
|
|
306
306
|
readonly parentAxes?: number[];
|
|
307
|
-
}
|
|
307
|
+
};
|
|
308
308
|
|
|
309
309
|
/** Parents are specs, not indexes; normalized axis can be used considering its parents independently from column */
|
|
310
310
|
export interface AxisSpecNormalized extends Omit<AxisSpec, 'parentAxes'> {
|
|
@@ -512,7 +512,7 @@ export const PColumnName = {
|
|
|
512
512
|
*
|
|
513
513
|
* Each element in tuple correspond to the axis having the same index in axesSpec.
|
|
514
514
|
*/
|
|
515
|
-
export
|
|
515
|
+
export type PUniversalColumnSpec = PObjectSpec & {
|
|
516
516
|
/** Defines specific type of BObject, the most generic type of unit of
|
|
517
517
|
* information in Platforma Project. */
|
|
518
518
|
readonly kind: 'PColumn';
|
|
@@ -536,7 +536,7 @@ export interface PUniversalColumnSpec extends PObjectSpec {
|
|
|
536
536
|
|
|
537
537
|
/** Axes specifications */
|
|
538
538
|
readonly axesSpec: AxesSpec;
|
|
539
|
-
}
|
|
539
|
+
};
|
|
540
540
|
|
|
541
541
|
/**
|
|
542
542
|
* Specification of a data column.
|
|
@@ -546,10 +546,10 @@ export interface PUniversalColumnSpec extends PObjectSpec {
|
|
|
546
546
|
* values like files or other abstract data types. Data columns are optimized for storing and processing
|
|
547
547
|
* basic tabular data.
|
|
548
548
|
*/
|
|
549
|
-
export
|
|
549
|
+
export type PDataColumnSpec = PUniversalColumnSpec & {
|
|
550
550
|
/** Type of column values */
|
|
551
551
|
readonly valueType: ValueType;
|
|
552
|
-
}
|
|
552
|
+
};
|
|
553
553
|
|
|
554
554
|
// @todo: change this to PUniversalColumnSpec
|
|
555
555
|
export type PColumnSpec = PDataColumnSpec;
|
package/src/json.ts
CHANGED
|
@@ -1,37 +1,51 @@
|
|
|
1
1
|
import canonicalize from 'canonicalize';
|
|
2
2
|
|
|
3
|
-
type JsonPrimitive = string | number | boolean | null
|
|
3
|
+
type JsonPrimitive = string | number | boolean | null;
|
|
4
4
|
|
|
5
|
-
type JsonValue = JsonPrimitive | JsonValue[] | {
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
type JsonValue = JsonPrimitive | JsonValue[] | { [key: string]: JsonValue };
|
|
6
|
+
|
|
7
|
+
export type JsonSerializable =
|
|
8
|
+
| JsonPrimitive
|
|
9
|
+
| JsonSerializable[]
|
|
10
|
+
| { [key: string]: JsonSerializable }
|
|
11
|
+
| { toJSON(): JsonValue };
|
|
8
12
|
|
|
9
13
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
|
|
10
14
|
type NotAssignableToJson = bigint | symbol | Function;
|
|
11
15
|
|
|
12
|
-
export type JsonCompatible<T> = unknown extends T ? unknown
|
|
13
|
-
[
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
export type JsonCompatible<T> = unknown extends T ? unknown
|
|
17
|
+
: [T] extends [JsonValue] ? T
|
|
18
|
+
: [T] extends [NotAssignableToJson] ? never
|
|
19
|
+
: {
|
|
20
|
+
[P in keyof T]: [Exclude<T[P], undefined>] extends [JsonValue] ? T[P]
|
|
21
|
+
: [Exclude<T[P], undefined>] extends [NotAssignableToJson] ? never
|
|
22
|
+
: JsonCompatible<T[P]>;
|
|
23
|
+
};
|
|
18
24
|
|
|
19
25
|
export type StringifiedJson<T = unknown> = JsonCompatible<T> extends never ? never : string & {
|
|
20
26
|
__json_stringified: T;
|
|
21
27
|
};
|
|
22
28
|
|
|
23
|
-
export function stringifyJson<T>(value: JsonCompatible<T>): StringifiedJson<T
|
|
24
|
-
|
|
29
|
+
export function stringifyJson<T>(value: JsonCompatible<T>): StringifiedJson<T>;
|
|
30
|
+
export function stringifyJson<T extends JsonSerializable>(value: T): string;
|
|
31
|
+
export function stringifyJson(value: unknown): string {
|
|
32
|
+
return JSON.stringify(value);
|
|
25
33
|
}
|
|
26
34
|
|
|
27
35
|
export type CanonicalizedJson<T = unknown> = JsonCompatible<T> extends never ? never : string & {
|
|
28
36
|
__json_canonicalized: T;
|
|
29
37
|
};
|
|
30
38
|
|
|
31
|
-
export function canonicalizeJson<T>(value: JsonCompatible<T>): CanonicalizedJson<T
|
|
32
|
-
|
|
39
|
+
export function canonicalizeJson<T>(value: JsonCompatible<T>): CanonicalizedJson<T>;
|
|
40
|
+
export function canonicalizeJson<T extends JsonSerializable>(value: T): string;
|
|
41
|
+
export function canonicalizeJson(value: unknown): string {
|
|
42
|
+
return canonicalize(value)!;
|
|
33
43
|
}
|
|
34
44
|
|
|
35
45
|
export function parseJson<T>(value: StringifiedJson<T> | CanonicalizedJson<T>): T {
|
|
36
46
|
return JSON.parse(value) as T;
|
|
37
47
|
}
|
|
48
|
+
|
|
49
|
+
export function bigintReplacer(_key: string, value: unknown): unknown {
|
|
50
|
+
return typeof value === 'bigint' ? value.toString() : value;
|
|
51
|
+
}
|
package/src/pool/spec.ts
CHANGED
|
@@ -4,7 +4,7 @@ import { assertNever } from '../util';
|
|
|
4
4
|
import type { ResultPoolEntry } from './entry';
|
|
5
5
|
|
|
6
6
|
/** Any object exported into the result pool by the block always have spec attached to it */
|
|
7
|
-
export
|
|
7
|
+
export type PObjectSpec = {
|
|
8
8
|
/** PObject kind discriminator */
|
|
9
9
|
readonly kind: string;
|
|
10
10
|
|
|
@@ -16,7 +16,7 @@ export interface PObjectSpec {
|
|
|
16
16
|
|
|
17
17
|
/** Additional information attached to the object */
|
|
18
18
|
readonly annotations?: Record<string, string>;
|
|
19
|
-
}
|
|
19
|
+
};
|
|
20
20
|
|
|
21
21
|
/** Stable PObject id */
|
|
22
22
|
export type PObjectId = Branded<string, 'PColumnId'>;
|