@xyo-network/diviner-payload-memory 2.104.0 → 2.105.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.cjs +3 -6
- package/dist/browser/index.cjs.map +1 -1
- package/dist/browser/index.js +3 -6
- package/dist/browser/index.js.map +1 -1
- package/dist/neutral/index.cjs +3 -6
- package/dist/neutral/index.cjs.map +1 -1
- package/dist/neutral/index.js +3 -6
- package/dist/neutral/index.js.map +1 -1
- package/dist/node/index.cjs +4 -10
- package/dist/node/index.cjs.map +1 -1
- package/dist/node/index.js +4 -10
- package/dist/node/index.js.map +1 -1
- package/package.json +14 -13
package/dist/browser/index.cjs
CHANGED
|
@@ -42,8 +42,7 @@ var MemoryPayloadDiviner = class extends import_diviner_payload_abstract.Payload
|
|
|
42
42
|
static defaultConfigSchema = import_diviner_payload_model.PayloadDivinerConfigSchema;
|
|
43
43
|
async divineHandler(payloads) {
|
|
44
44
|
const filter = (0, import_assert.assertEx)(payloads?.filter(import_diviner_payload_model.isPayloadDivinerQueryPayload)?.pop(), () => "Missing query payload");
|
|
45
|
-
if (!filter)
|
|
46
|
-
return [];
|
|
45
|
+
if (!filter) return [];
|
|
47
46
|
const archivist = (0, import_assert.assertEx)(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
48
47
|
const { schemas, limit, offset, hash, order = "desc", ...props } = (0, import_object.removeFields)(filter, [
|
|
49
48
|
"schema",
|
|
@@ -52,10 +51,8 @@ var MemoryPayloadDiviner = class extends import_diviner_payload_abstract.Payload
|
|
|
52
51
|
]);
|
|
53
52
|
let all = await archivist.all?.();
|
|
54
53
|
if (all) {
|
|
55
|
-
if (order === "desc")
|
|
56
|
-
|
|
57
|
-
if (schemas?.length)
|
|
58
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
54
|
+
if (order === "desc") all = all.reverse();
|
|
55
|
+
if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
59
56
|
if (Object.keys(props).length > 0) {
|
|
60
57
|
const additionalFilterCriteria = Object.entries(props);
|
|
61
58
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;ACAA,oBAAyB;AACzB,oBAA6B;AAE7B,sCAA+B;AAC/B,mCAKO;AACP,6BAA+B;AAGxB,IAAMA,uBAAN,cASGC,+CAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,aAASC,wBAASF,UAAUC,OAAOE,yDAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;ACAA,oBAAyB;AACzB,oBAA6B;AAE7B,sCAA+B;AAC/B,mCAKO;AACP,6BAA+B;AAGxB,IAAMA,uBAAN,cASGC,+CAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,aAASC,wBAASF,UAAUC,OAAOE,yDAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,gBAAYH,wBAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,QAAUC,4BAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,MAAMT,UAAUS,MAAG;AAC9B,QAAIA,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,SAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AACZ,kBAAML,QAAOP,UAAUQ,QAAAA;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,KAASA,MAAKN,WAAWW,KAAAA;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,YAAYA,UAAUQ,QAAAA,MAAcxB,OAAAA;QACtD;MACF;AACA,YAAM6B,cAActB,SAASM,IAAIE;AACjC,YAAMe,eAAetB,UAAU;AAC/B,aAAOA,WAAWuB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,sCAAeC,UAAUrB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOuB,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AACrDuB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AAClDuB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACpB,OAAAA,MAAaA,OAAAA,EAASqB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAhB,IAAIwB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;","names":["MemoryPayloadDiviner","PayloadDiviner","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error"]}
|
package/dist/browser/index.js
CHANGED
|
@@ -18,8 +18,7 @@ var MemoryPayloadDiviner = class extends PayloadDiviner {
|
|
|
18
18
|
static defaultConfigSchema = PayloadDivinerConfigSchema;
|
|
19
19
|
async divineHandler(payloads) {
|
|
20
20
|
const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => "Missing query payload");
|
|
21
|
-
if (!filter)
|
|
22
|
-
return [];
|
|
21
|
+
if (!filter) return [];
|
|
23
22
|
const archivist = assertEx(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
24
23
|
const { schemas, limit, offset, hash, order = "desc", ...props } = removeFields(filter, [
|
|
25
24
|
"schema",
|
|
@@ -28,10 +27,8 @@ var MemoryPayloadDiviner = class extends PayloadDiviner {
|
|
|
28
27
|
]);
|
|
29
28
|
let all = await archivist.all?.();
|
|
30
29
|
if (all) {
|
|
31
|
-
if (order === "desc")
|
|
32
|
-
|
|
33
|
-
if (schemas?.length)
|
|
34
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
30
|
+
if (order === "desc") all = all.reverse();
|
|
31
|
+
if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
35
32
|
if (Object.keys(props).length > 0) {
|
|
36
33
|
const additionalFilterCriteria = Object.entries(props);
|
|
37
34
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,oBAAoB;AAE7B,SAASC,sBAAsB;AAC/B,SACEC,8BACAC,kCAGK;AACP,SAASC,sBAAsB;AAGxB,IAAMC,uBAAN,cASGC,eAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,SAASC,SAASF,UAAUC,OAAOE,4BAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH
|
|
1
|
+
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,oBAAoB;AAE7B,SAASC,sBAAsB;AAC/B,SACEC,8BACAC,kCAGK;AACP,SAASC,sBAAsB;AAGxB,IAAMC,uBAAN,cASGC,eAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,SAASC,SAASF,UAAUC,OAAOE,4BAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,YAAYH,SAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,IAAUC,aAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,MAAMT,UAAUS,MAAG;AAC9B,QAAIA,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,SAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AACZ,kBAAML,QAAOP,UAAUQ,QAAAA;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,KAASA,MAAKN,WAAWW,KAAAA;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,YAAYA,UAAUQ,QAAAA,MAAcxB,OAAAA;QACtD;MACF;AACA,YAAM6B,cAActB,SAASM,IAAIE;AACjC,YAAMe,eAAetB,UAAU;AAC/B,aAAOA,WAAWuB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,eAAeC,UAAUrB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOuB,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AACrDuB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AAClDuB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACpB,OAAAA,MAAaA,OAAAA,EAASqB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAhB,IAAIwB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;","names":["assertEx","removeFields","PayloadDiviner","isPayloadDivinerQueryPayload","PayloadDivinerConfigSchema","PayloadBuilder","MemoryPayloadDiviner","PayloadDiviner","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error"]}
|
package/dist/neutral/index.cjs
CHANGED
|
@@ -42,8 +42,7 @@ var MemoryPayloadDiviner = class extends import_diviner_payload_abstract.Payload
|
|
|
42
42
|
static defaultConfigSchema = import_diviner_payload_model.PayloadDivinerConfigSchema;
|
|
43
43
|
async divineHandler(payloads) {
|
|
44
44
|
const filter = (0, import_assert.assertEx)(payloads?.filter(import_diviner_payload_model.isPayloadDivinerQueryPayload)?.pop(), () => "Missing query payload");
|
|
45
|
-
if (!filter)
|
|
46
|
-
return [];
|
|
45
|
+
if (!filter) return [];
|
|
47
46
|
const archivist = (0, import_assert.assertEx)(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
48
47
|
const { schemas, limit, offset, hash, order = "desc", ...props } = (0, import_object.removeFields)(filter, [
|
|
49
48
|
"schema",
|
|
@@ -52,10 +51,8 @@ var MemoryPayloadDiviner = class extends import_diviner_payload_abstract.Payload
|
|
|
52
51
|
]);
|
|
53
52
|
let all = await archivist.all?.();
|
|
54
53
|
if (all) {
|
|
55
|
-
if (order === "desc")
|
|
56
|
-
|
|
57
|
-
if (schemas?.length)
|
|
58
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
54
|
+
if (order === "desc") all = all.reverse();
|
|
55
|
+
if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
59
56
|
if (Object.keys(props).length > 0) {
|
|
60
57
|
const additionalFilterCriteria = Object.entries(props);
|
|
61
58
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;ACAA,oBAAyB;AACzB,oBAA6B;AAE7B,sCAA+B;AAC/B,mCAKO;AACP,6BAA+B;AAGxB,IAAMA,uBAAN,cASGC,+CAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,aAASC,wBAASF,UAAUC,OAAOE,yDAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;ACAA,oBAAyB;AACzB,oBAA6B;AAE7B,sCAA+B;AAC/B,mCAKO;AACP,6BAA+B;AAGxB,IAAMA,uBAAN,cASGC,+CAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,aAASC,wBAASF,UAAUC,OAAOE,yDAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,gBAAYH,wBAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,QAAUC,4BAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,MAAMT,UAAUS,MAAG;AAC9B,QAAIA,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,SAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AACZ,kBAAML,QAAOP,UAAUQ,QAAAA;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,KAASA,MAAKN,WAAWW,KAAAA;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,YAAYA,UAAUQ,QAAAA,MAAcxB,OAAAA;QACtD;MACF;AACA,YAAM6B,cAActB,SAASM,IAAIE;AACjC,YAAMe,eAAetB,UAAU;AAC/B,aAAOA,WAAWuB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,sCAAeC,UAAUrB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOuB,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AACrDuB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AAClDuB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACpB,OAAAA,MAAaA,OAAAA,EAASqB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAhB,IAAIwB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;","names":["MemoryPayloadDiviner","PayloadDiviner","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error"]}
|
package/dist/neutral/index.js
CHANGED
|
@@ -18,8 +18,7 @@ var MemoryPayloadDiviner = class extends PayloadDiviner {
|
|
|
18
18
|
static defaultConfigSchema = PayloadDivinerConfigSchema;
|
|
19
19
|
async divineHandler(payloads) {
|
|
20
20
|
const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => "Missing query payload");
|
|
21
|
-
if (!filter)
|
|
22
|
-
return [];
|
|
21
|
+
if (!filter) return [];
|
|
23
22
|
const archivist = assertEx(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
24
23
|
const { schemas, limit, offset, hash, order = "desc", ...props } = removeFields(filter, [
|
|
25
24
|
"schema",
|
|
@@ -28,10 +27,8 @@ var MemoryPayloadDiviner = class extends PayloadDiviner {
|
|
|
28
27
|
]);
|
|
29
28
|
let all = await archivist.all?.();
|
|
30
29
|
if (all) {
|
|
31
|
-
if (order === "desc")
|
|
32
|
-
|
|
33
|
-
if (schemas?.length)
|
|
34
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
30
|
+
if (order === "desc") all = all.reverse();
|
|
31
|
+
if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
35
32
|
if (Object.keys(props).length > 0) {
|
|
36
33
|
const additionalFilterCriteria = Object.entries(props);
|
|
37
34
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,oBAAoB;AAE7B,SAASC,sBAAsB;AAC/B,SACEC,8BACAC,kCAGK;AACP,SAASC,sBAAsB;AAGxB,IAAMC,uBAAN,cASGC,eAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,SAASC,SAASF,UAAUC,OAAOE,4BAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH
|
|
1
|
+
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,oBAAoB;AAE7B,SAASC,sBAAsB;AAC/B,SACEC,8BACAC,kCAGK;AACP,SAASC,sBAAsB;AAGxB,IAAMC,uBAAN,cASGC,eAAAA;EAtBV,OAsBUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EAEvD,MAAyBE,cAAcC,UAA6C;AAClF,UAAMC,SAASC,SAASF,UAAUC,OAAOE,4BAAAA,GAA+BC,IAAAA,GAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,YAAYH,SAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,IAAUC,aAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,MAAMT,UAAUS,MAAG;AAC9B,QAAIA,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,SAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AACZ,kBAAML,QAAOP,UAAUQ,QAAAA;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,KAASA,MAAKN,WAAWW,KAAAA;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,YAAYA,UAAUQ,QAAAA,MAAcxB,OAAAA;QACtD;MACF;AACA,YAAM6B,cAActB,SAASM,IAAIE;AACjC,YAAMe,eAAetB,UAAU;AAC/B,aAAOA,WAAWuB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,eAAeC,UAAUrB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOuB,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AACrDuB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASjB,SAAS,KAAKiB,SAAS,CAAA,EAAG,CAAA,MAAOvB,MAAM;AAClDuB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACpB,OAAAA,MAAaA,OAAAA,EAASqB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAhB,IAAIwB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;","names":["assertEx","removeFields","PayloadDiviner","isPayloadDivinerQueryPayload","PayloadDivinerConfigSchema","PayloadBuilder","MemoryPayloadDiviner","PayloadDiviner","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error"]}
|
package/dist/node/index.cjs
CHANGED
|
@@ -20,10 +20,7 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
20
20
|
return to;
|
|
21
21
|
};
|
|
22
22
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
23
|
-
var __publicField = (obj, key, value) =>
|
|
24
|
-
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
25
|
-
return value;
|
|
26
|
-
};
|
|
23
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
27
24
|
var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj);
|
|
28
25
|
|
|
29
26
|
// src/index.ts
|
|
@@ -43,8 +40,7 @@ var _MemoryPayloadDiviner = class _MemoryPayloadDiviner extends import_diviner_p
|
|
|
43
40
|
async divineHandler(payloads) {
|
|
44
41
|
var _a, _b;
|
|
45
42
|
const filter = (0, import_assert.assertEx)((_a = payloads == null ? void 0 : payloads.filter(import_diviner_payload_model.isPayloadDivinerQueryPayload)) == null ? void 0 : _a.pop(), () => "Missing query payload");
|
|
46
|
-
if (!filter)
|
|
47
|
-
return [];
|
|
43
|
+
if (!filter) return [];
|
|
48
44
|
const archivist = (0, import_assert.assertEx)(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
49
45
|
const { schemas, limit, offset, hash, order = "desc", ...props } = (0, import_object.removeFields)(filter, [
|
|
50
46
|
"schema",
|
|
@@ -53,10 +49,8 @@ var _MemoryPayloadDiviner = class _MemoryPayloadDiviner extends import_diviner_p
|
|
|
53
49
|
]);
|
|
54
50
|
let all = await ((_b = archivist.all) == null ? void 0 : _b.call(archivist));
|
|
55
51
|
if (all) {
|
|
56
|
-
if (order === "desc")
|
|
57
|
-
|
|
58
|
-
if (schemas == null ? void 0 : schemas.length)
|
|
59
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
52
|
+
if (order === "desc") all = all.reverse();
|
|
53
|
+
if (schemas == null ? void 0 : schemas.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
60
54
|
if (Object.keys(props).length > 0) {
|
|
61
55
|
const additionalFilterCriteria = Object.entries(props);
|
|
62
56
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
package/dist/node/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["export * from './MemoryPayloadDiviner'\n","import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;ACAA,oBAAyB;AACzB,oBAA6B;AAE7B,sCAA+B;AAC/B,mCAKO;AACP,6BAA+B;AAGxB,IAAMA,wBAAN,MAAMA,8BASHC,+CAAAA;EAIR,MAAyBC,cAAcC,UAA6C;AA1BtF;AA2BI,UAAMC,aAASC,yBAASF,0CAAUC,OAAOE,+DAAjBH,mBAAgDI,OAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,gBAAYH,wBAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,QAAUC,4BAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,QAAMT,eAAUS,QAAVT;AACjB,QAAIS,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,mCAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AA1C9B,gBAAAC;AA2CkB,kBAAMN,QAAOP,mCAAUQ;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,OAASA,MAAAA,MAAKN,aAALM,gBAAAA,IAAAA,KAAAA,OAAgBK;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,aAAYA,mCAAUQ,eAAcxB,OAAAA;QACtD;MACF;AACA,YAAM8B,cAAcvB,SAASM,IAAIE;AACjC,YAAMgB,eAAevB,UAAU;AAC/B,aAAOA,WAAWwB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,sCAAeC,UAAUtB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOwB,SAASlB,SAAS,KAAKkB,SAAS,CAAA,EAAG,CAAA,MAAOxB,MAAM;AACrDwB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASlB,SAAS,KAAKkB,SAAS,CAAA,EAAG,CAAA,MAAOxB,MAAM;AAClDwB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACrB,OAAAA,MAAaA,OAAAA,EAASsB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAjB,IAAIyB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;AAnDU1C;AACR,cAVWD,uBAUc4C,iBAA0B;KAAI,yDAAMA;EAAeC;;AAC5E,cAXW7C,uBAWc8C,uBAA8BD;AAXlD,IAAM7C,uBAAN;","names":["MemoryPayloadDiviner","PayloadDiviner","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","_a","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema"]}
|
package/dist/node/index.js
CHANGED
|
@@ -3,10 +3,7 @@ var __getProtoOf = Object.getPrototypeOf;
|
|
|
3
3
|
var __reflectGet = Reflect.get;
|
|
4
4
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
5
5
|
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
6
|
-
var __publicField = (obj, key, value) =>
|
|
7
|
-
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
8
|
-
return value;
|
|
9
|
-
};
|
|
6
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
10
7
|
var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj);
|
|
11
8
|
|
|
12
9
|
// src/MemoryPayloadDiviner.ts
|
|
@@ -19,8 +16,7 @@ var _MemoryPayloadDiviner = class _MemoryPayloadDiviner extends PayloadDiviner {
|
|
|
19
16
|
async divineHandler(payloads) {
|
|
20
17
|
var _a, _b;
|
|
21
18
|
const filter = assertEx((_a = payloads == null ? void 0 : payloads.filter(isPayloadDivinerQueryPayload)) == null ? void 0 : _a.pop(), () => "Missing query payload");
|
|
22
|
-
if (!filter)
|
|
23
|
-
return [];
|
|
19
|
+
if (!filter) return [];
|
|
24
20
|
const archivist = assertEx(await this.archivistInstance(), () => "Unable to resolve archivist");
|
|
25
21
|
const { schemas, limit, offset, hash, order = "desc", ...props } = removeFields(filter, [
|
|
26
22
|
"schema",
|
|
@@ -29,10 +25,8 @@ var _MemoryPayloadDiviner = class _MemoryPayloadDiviner extends PayloadDiviner {
|
|
|
29
25
|
]);
|
|
30
26
|
let all = await ((_b = archivist.all) == null ? void 0 : _b.call(archivist));
|
|
31
27
|
if (all) {
|
|
32
|
-
if (order === "desc")
|
|
33
|
-
|
|
34
|
-
if (schemas == null ? void 0 : schemas.length)
|
|
35
|
-
all = all.filter((payload) => schemas.includes(payload.schema));
|
|
28
|
+
if (order === "desc") all = all.reverse();
|
|
29
|
+
if (schemas == null ? void 0 : schemas.length) all = all.filter((payload) => schemas.includes(payload.schema));
|
|
36
30
|
if (Object.keys(props).length > 0) {
|
|
37
31
|
const additionalFilterCriteria = Object.entries(props);
|
|
38
32
|
for (const [prop, filter2] of additionalFilterCriteria) {
|
package/dist/node/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../../src/MemoryPayloadDiviner.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { removeFields } from '@xylabs/object'\nimport { DivinerInstance, DivinerModuleEventData } from '@xyo-network/diviner-model'\nimport { PayloadDiviner } from '@xyo-network/diviner-payload-abstract'\nimport {\n isPayloadDivinerQueryPayload,\n PayloadDivinerConfigSchema,\n PayloadDivinerParams,\n PayloadDivinerQueryPayload,\n} from '@xyo-network/diviner-payload-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport { Payload, Schema, WithMeta } from '@xyo-network/payload-model'\n\nexport class MemoryPayloadDiviner<\n TParams extends PayloadDivinerParams = PayloadDivinerParams,\n TIn extends PayloadDivinerQueryPayload = PayloadDivinerQueryPayload,\n TOut extends Payload = Payload,\n TEventData extends DivinerModuleEventData<DivinerInstance<TParams, TIn, TOut>, TIn, TOut> = DivinerModuleEventData<\n DivinerInstance<TParams, TIn, TOut>,\n TIn,\n TOut\n >,\n> extends PayloadDiviner<TParams, TIn, TOut, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, PayloadDivinerConfigSchema]\n static override readonly defaultConfigSchema: Schema = PayloadDivinerConfigSchema\n\n protected override async divineHandler(payloads?: TIn[]): Promise<WithMeta<TOut>[]> {\n const filter = assertEx(payloads?.filter(isPayloadDivinerQueryPayload)?.pop(), () => 'Missing query payload')\n if (!filter) return []\n const archivist = assertEx(await this.archivistInstance(), () => 'Unable to resolve archivist')\n const { schemas, limit, offset, hash, order = 'desc', ...props } = removeFields(filter as WithMeta<TIn>, ['schema', '$meta', '$hash'])\n let all = (await archivist.all?.()) as WithMeta<TOut>[]\n if (all) {\n if (order === 'desc') all = all.reverse()\n if (schemas?.length) all = all.filter((payload) => schemas.includes(payload.schema))\n if (Object.keys(props).length > 0) {\n const additionalFilterCriteria = Object.entries(props)\n for (const [prop, filter] of additionalFilterCriteria) {\n const property = prop as keyof TOut\n all =\n Array.isArray(filter) ?\n all.filter((payload) =>\n filter.every((value) => {\n const prop = payload?.[property]\n //TODO: This seems to be written just to check arrays, and now that $meta is there, need to check type?\n return Array.isArray(prop) && prop.includes?.(value)\n }),\n )\n : all.filter((payload) => payload?.[property] === filter)\n }\n }\n const parsedLimit = limit ?? all.length\n const parsedOffset = offset || 0\n return offset === undefined ?\n (async () => {\n const allPairs = await PayloadBuilder.hashPairs(all)\n if (hash) {\n //remove all until found\n while (allPairs.length > 0 && allPairs[0][1] !== hash) {\n allPairs.shift()\n }\n //remove it if found\n if (allPairs.length > 0 && allPairs[0][1] === hash) {\n allPairs.shift()\n }\n }\n return allPairs.map(([payload]) => payload).slice(parsedOffset, parsedOffset + parsedLimit)\n })()\n : all.slice(parsedOffset, parsedOffset + parsedLimit)\n } else {\n throw new Error('Archivist does not support \"all\"')\n }\n }\n}\n"],"mappings":";;;;;;;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,oBAAoB;AAE7B,SAASC,sBAAsB;AAC/B,SACEC,8BACAC,kCAGK;AACP,SAASC,sBAAsB;AAGxB,IAAMC,wBAAN,MAAMA,8BASHC,eAAAA;EAIR,MAAyBC,cAAcC,UAA6C;AA1BtF;AA2BI,UAAMC,SAASC,UAASF,0CAAUC,OAAOE,kCAAjBH,mBAAgDI,OAAO,MAAM,uBAAA;AACrF,QAAI,CAACH,OAAQ,QAAO,CAAA;AACpB,UAAMI,YAAYH,SAAS,MAAM,KAAKI,kBAAiB,GAAI,MAAM,6BAAA;AACjE,UAAM,EAAEC,SAASC,OAAOC,QAAQC,MAAMC,QAAQ,QAAQ,GAAGC,MAAAA,IAAUC,aAAaZ,QAAyB;MAAC;MAAU;MAAS;KAAQ;AACrI,QAAIa,MAAO,QAAMT,eAAUS,QAAVT;AACjB,QAAIS,KAAK;AACP,UAAIH,UAAU,OAAQG,OAAMA,IAAIC,QAAO;AACvC,UAAIR,mCAASS,OAAQF,OAAMA,IAAIb,OAAO,CAACgB,YAAYV,QAAQW,SAASD,QAAQE,MAAM,CAAA;AAClF,UAAIC,OAAOC,KAAKT,KAAAA,EAAOI,SAAS,GAAG;AACjC,cAAMM,2BAA2BF,OAAOG,QAAQX,KAAAA;AAChD,mBAAW,CAACY,MAAMvB,OAAAA,KAAWqB,0BAA0B;AACrD,gBAAMG,WAAWD;AACjBV,gBACEY,MAAMC,QAAQ1B,OAAAA,IACZa,IAAIb,OAAO,CAACgB,YACVhB,QAAO2B,MAAM,CAACC,UAAAA;AA1C9B,gBAAAC;AA2CkB,kBAAMN,QAAOP,mCAAUQ;AAEvB,mBAAOC,MAAMC,QAAQH,KAAAA,OAASA,MAAAA,MAAKN,aAALM,gBAAAA,IAAAA,KAAAA,OAAgBK;UAChD,CAAA,CAAA,IAEFf,IAAIb,OAAO,CAACgB,aAAYA,mCAAUQ,eAAcxB,OAAAA;QACtD;MACF;AACA,YAAM8B,cAAcvB,SAASM,IAAIE;AACjC,YAAMgB,eAAevB,UAAU;AAC/B,aAAOA,WAAWwB,UACb,YAAA;AACC,cAAMC,WAAW,MAAMC,eAAeC,UAAUtB,GAAAA;AAChD,YAAIJ,MAAM;AAER,iBAAOwB,SAASlB,SAAS,KAAKkB,SAAS,CAAA,EAAG,CAAA,MAAOxB,MAAM;AACrDwB,qBAASG,MAAK;UAChB;AAEA,cAAIH,SAASlB,SAAS,KAAKkB,SAAS,CAAA,EAAG,CAAA,MAAOxB,MAAM;AAClDwB,qBAASG,MAAK;UAChB;QACF;AACA,eAAOH,SAASI,IAAI,CAAC,CAACrB,OAAAA,MAAaA,OAAAA,EAASsB,MAAMP,cAAcA,eAAeD,WAAAA;MACjF,GAAA,IACAjB,IAAIyB,MAAMP,cAAcA,eAAeD,WAAAA;IAC7C,OAAO;AACL,YAAM,IAAIS,MAAM,kCAAA;IAClB;EACF;AACF;AAnDU1C;AACR,cAVWD,uBAUc4C,iBAA0B;KAAI,yDAAMA;EAAeC;;AAC5E,cAXW7C,uBAWc8C,uBAA8BD;AAXlD,IAAM7C,uBAAN;","names":["assertEx","removeFields","PayloadDiviner","isPayloadDivinerQueryPayload","PayloadDivinerConfigSchema","PayloadBuilder","MemoryPayloadDiviner","PayloadDiviner","divineHandler","payloads","filter","assertEx","isPayloadDivinerQueryPayload","pop","archivist","archivistInstance","schemas","limit","offset","hash","order","props","removeFields","all","reverse","length","payload","includes","schema","Object","keys","additionalFilterCriteria","entries","prop","property","Array","isArray","every","value","_a","parsedLimit","parsedOffset","undefined","allPairs","PayloadBuilder","hashPairs","shift","map","slice","Error","configSchemas","PayloadDivinerConfigSchema","defaultConfigSchema"]}
|
package/package.json
CHANGED
|
@@ -12,19 +12,19 @@
|
|
|
12
12
|
"dependencies": {
|
|
13
13
|
"@xylabs/assert": "^3.5.1",
|
|
14
14
|
"@xylabs/object": "^3.5.1",
|
|
15
|
-
"@xyo-network/diviner-model": "~2.
|
|
16
|
-
"@xyo-network/diviner-payload-abstract": "~2.
|
|
17
|
-
"@xyo-network/diviner-payload-model": "~2.
|
|
18
|
-
"@xyo-network/payload-builder": "~2.
|
|
19
|
-
"@xyo-network/payload-model": "~2.
|
|
15
|
+
"@xyo-network/diviner-model": "~2.105.0-rc.1",
|
|
16
|
+
"@xyo-network/diviner-payload-abstract": "~2.105.0-rc.1",
|
|
17
|
+
"@xyo-network/diviner-payload-model": "~2.105.0-rc.1",
|
|
18
|
+
"@xyo-network/payload-builder": "~2.105.0-rc.1",
|
|
19
|
+
"@xyo-network/payload-model": "~2.105.0-rc.1"
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
|
22
|
-
"@xylabs/ts-scripts-yarn3": "^3.
|
|
23
|
-
"@xylabs/tsconfig": "^3.
|
|
24
|
-
"@xyo-network/account": "~2.
|
|
25
|
-
"@xyo-network/archivist-memory": "~2.
|
|
26
|
-
"@xyo-network/node-memory": "~2.
|
|
27
|
-
"@xyo-network/payload-builder": "~2.
|
|
22
|
+
"@xylabs/ts-scripts-yarn3": "^3.11.2",
|
|
23
|
+
"@xylabs/tsconfig": "^3.11.2",
|
|
24
|
+
"@xyo-network/account": "~2.105.0-rc.1",
|
|
25
|
+
"@xyo-network/archivist-memory": "~2.105.0-rc.1",
|
|
26
|
+
"@xyo-network/node-memory": "~2.105.0-rc.1",
|
|
27
|
+
"@xyo-network/payload-builder": "~2.105.0-rc.1",
|
|
28
28
|
"typescript": "^5.4.5"
|
|
29
29
|
},
|
|
30
30
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
@@ -66,6 +66,7 @@
|
|
|
66
66
|
"url": "https://github.com/XYOracleNetwork/sdk-xyo-client-js.git"
|
|
67
67
|
},
|
|
68
68
|
"sideEffects": false,
|
|
69
|
-
"version": "2.
|
|
70
|
-
"type": "module"
|
|
69
|
+
"version": "2.105.0-rc.1",
|
|
70
|
+
"type": "module",
|
|
71
|
+
"stableVersion": "2.104.1"
|
|
71
72
|
}
|