@conduit-client/service-cache-inclusion-policy 2.0.1 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../../src/v1/cache-inclusion-policy.ts","../../../../utils/dist/index.js","../../../cache/dist/v1/index.js","../../src/v1/durable-cache-inclusion-policy.ts","../../src/v1/cache-query.ts","../../src/v1/cache-update.ts","../../src/v1/in-memory-cache-inclusion-policy.ts"],"sourcesContent":["import { type SyncOrAsync, type Result } from '@conduit-client/utils';\nimport type { DeepReadonly, NamedService, ServiceDescriptor } from '@conduit-client/utils';\nimport type { CacheEntry, Key, Cache } from '@conduit-client/service-cache/v1';\nimport { CacheQuery } from './cache-query';\nimport { CacheUpdate } from './cache-update';\n\n/**\n * CacheInclusionPolicy is an interface for accessing the cache\n * and synchronizing the cache data with another external cache.\n *\n * https://en.wikipedia.org/wiki/Cache_inclusion_policy\n */\nexport abstract class CacheInclusionPolicyService {\n /**\n * Reads data out of a cache applying a multilevel cache inclusion policy to the results.\n *\n * @param {Cache} l1 The cache to use for L1 cache reads.\n * @param {(cache: Cache) => SyncOrAsync<D>} readFromL1 A function for reading the data from L1.\n * The readFromL1 function returns a boolean indicating whether the completed read operation\n * was successful.\n * @returns {SyncOrAsync<D>}\n */\n abstract read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D>;\n\n /**\n * Write data out to a cache applying a multilevel cache inclusion policy to the writes.\n *\n * @param {Cache} l1 The cache to use for L1 cache writes.\n * @param {(l1: Cache) => SyncOrAsync<void>} writeToL1 A function for writing the data to L1.\n * @returns {SyncOrAsync<void>}\n */\n abstract write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D>;\n\n /**\n * Finds cache entries that match the given query.\n * Returns an async generator that yields `[key, entry]`.\n */\n abstract find(\n query: CacheQuery\n ): AsyncGenerator<[key: Key, value: DeepReadonly<CacheEntry<unknown>>], void, unknown>;\n\n /**\n * Finds and modifies cache entries that match the given query.\n * Extends `find(query)` and returns an async generator of modified keys.\n */\n abstract findAndModify(\n query: CacheQuery,\n cacheUpdate: CacheUpdate\n ): AsyncGenerator<Key, void, unknown>;\n}\n\nexport type NamedCacheInclusionPolicyService<Name extends string = 'cacheInclusionPolicy'> =\n NamedService<Name, CacheInclusionPolicyService>;\n\nexport type CacheInclusionPolicyServiceDescriptor = ServiceDescriptor<\n CacheInclusionPolicyService,\n 'cacheInclusionPolicy',\n '1.0'\n>;\n","/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\nfunction bfs(start, predicate, getChildren) {\n const queue = [...start];\n const visited = /* @__PURE__ */ new Set([...start]);\n const matches2 = /* @__PURE__ */ new Set();\n while (queue.length) {\n const curr = queue.shift();\n if (predicate(curr)) {\n matches2.add(curr);\n }\n const children = getChildren(curr);\n for (const child of children) {\n if (!visited.has(child)) {\n visited.add(child);\n queue.push(child);\n }\n }\n }\n return matches2;\n}\nfunction lineFormatter(position, message, filePath) {\n return `${message} (${filePath}:${position.line}:${position.column})`;\n}\nclass DefaultFileParserLogger {\n constructor(services, filePath) {\n this.services = services;\n this.filePath = filePath;\n }\n trace(position, message) {\n this.services.logger.trace(this.format(position, message));\n }\n debug(position, message) {\n this.services.logger.debug(this.format(position, message));\n }\n info(position, message) {\n this.services.logger.info(this.format(position, message));\n }\n warn(position, message) {\n this.services.logger.warn(this.format(position, message));\n }\n error(position, message) {\n this.services.logger.error(this.format(position, message));\n }\n format(position, message) {\n return lineFormatter(position, message, this.filePath);\n }\n}\nfunction matches(test, s) {\n if (test === void 0) {\n return false;\n } else if (typeof test === \"string\") {\n return s === test;\n } else if (test instanceof RegExp) {\n return test.test(s);\n } else if (typeof test === \"function\") {\n return test(s);\n }\n return test.some((m) => matches(m, s));\n}\nfunction includes(incexc, s) {\n if (matches(incexc.exclude, s)) {\n return false;\n }\n if (matches(incexc.include, s)) {\n return true;\n }\n if (incexc.include) {\n return false;\n }\n return true;\n}\nconst { create, freeze, keys, entries } = Object;\nconst { hasOwnProperty } = Object.prototype;\nconst { isArray } = Array;\nconst { push, indexOf, slice } = Array.prototype;\nconst { stringify, parse } = JSON;\nconst WeakSetConstructor = WeakSet;\nconst LogLevelMap = {\n TRACE: 4,\n DEBUG: 3,\n INFO: 2,\n WARN: 1,\n ERROR: 0\n};\nclass ConsoleLogger {\n constructor(level = \"WARN\", printer = console.log, formatter = (level2, message) => `${level2}: ${message}`) {\n this.level = level;\n this.printer = printer;\n this.formatter = formatter;\n this.messages = [];\n }\n trace(message) {\n this.log(\"TRACE\", message);\n }\n debug(message) {\n this.log(\"DEBUG\", message);\n }\n info(message) {\n this.log(\"INFO\", message);\n }\n warn(message) {\n this.log(\"WARN\", message);\n }\n error(message) {\n this.log(\"ERROR\", message);\n }\n log(level, message) {\n if (LogLevelMap[level] > LogLevelMap[this.level]) {\n return;\n }\n this.printer(this.formatter(level, message));\n }\n}\nfunction loggerService(level, printer, formatter) {\n return new ConsoleLogger(level, printer, formatter);\n}\nclass Ok {\n constructor(value) {\n this.value = value;\n }\n isOk() {\n return true;\n }\n isErr() {\n return !this.isOk();\n }\n}\nclass Err {\n constructor(error) {\n this.error = error;\n }\n isOk() {\n return false;\n }\n isErr() {\n return !this.isOk();\n }\n}\nconst ok = (value) => new Ok(value);\nconst err = (err2) => new Err(err2);\nclass DataNotFoundError extends Error {\n constructor(message) {\n super(message);\n this.name = \"DataNotFoundError\";\n }\n}\nclass DataIncompleteError extends Error {\n constructor(message, partialData) {\n super(message);\n this.partialData = partialData;\n this.name = \"DataIncompleteError\";\n }\n}\nfunction isDataNotFoundError(error) {\n return error instanceof DataNotFoundError || error.name === \"DataNotFoundError\";\n}\nfunction isDataIncompleteError(error) {\n return error instanceof DataIncompleteError || error.name === \"DataIncompleteError\";\n}\nfunction isCacheHitOrError(value) {\n if (value.isErr() && (isDataIncompleteError(value.error) || isDataNotFoundError(value.error))) {\n return false;\n }\n return true;\n}\nfunction isCacheMiss(value) {\n return !isCacheHitOrError(value);\n}\nfunction isResult(value) {\n return value != null && typeof value === \"object\" && \"isOk\" in value && \"isErr\" in value && typeof value.isOk === \"function\" && typeof value.isErr === \"function\" && (value.isOk() === true && value.isErr() === false && \"value\" in value || value.isOk() === false && value.isErr() === true && \"error\" in value);\n}\nfunction setOverlaps(setA, setB) {\n for (const element of setA) {\n if (setB.has(element)) {\n return true;\n }\n }\n return false;\n}\nfunction setDifference(setA, setB) {\n const differenceSet = /* @__PURE__ */ new Set();\n for (const element of setA) {\n if (!setB.has(element)) {\n differenceSet.add(element);\n }\n }\n return differenceSet;\n}\nfunction addAllToSet(targetSet, sourceSet) {\n for (const element of sourceSet) {\n targetSet.add(element);\n }\n}\nconst toTypeScriptSafeIdentifier = (s) => s.length >= 1 ? s[0].replace(/[^$_\\p{ID_Start}]/u, \"_\") + s.slice(1).replace(/[^$\\u200c\\u200d\\p{ID_Continue}]/gu, \"_\") : \"\";\nfunction isSubscribable(obj) {\n return typeof obj === \"object\" && obj !== null && \"subscribe\" in obj && typeof obj.subscribe === \"function\" && \"refresh\" in obj && typeof obj.refresh === \"function\";\n}\nfunction isSubscribableResult(x) {\n if (!isResult(x)) {\n return false;\n }\n return isSubscribable(x.isOk() ? x.value : x.error);\n}\nfunction buildSubscribableResult(result, subscribe, refresh) {\n if (result.isOk()) {\n return ok({ data: result.value, subscribe, refresh });\n } else {\n return err({ failure: result.error, subscribe, refresh });\n }\n}\nfunction resolvedPromiseLike(result) {\n if (isPromiseLike(result)) {\n return result.then((nextResult) => nextResult);\n }\n return {\n then: (onFulfilled, _onRejected) => {\n try {\n return resolvedPromiseLike(onFulfilled(result));\n } catch (e) {\n if (onFulfilled === void 0) {\n return resolvedPromiseLike(result);\n }\n return rejectedPromiseLike(e);\n }\n }\n };\n}\nfunction rejectedPromiseLike(reason) {\n if (isPromiseLike(reason)) {\n return reason.then((nextResult) => nextResult);\n }\n return {\n then: (_onFulfilled, onRejected) => {\n if (typeof onRejected === \"function\") {\n try {\n return resolvedPromiseLike(onRejected(reason));\n } catch (e) {\n return rejectedPromiseLike(e);\n }\n }\n return rejectedPromiseLike(reason);\n }\n };\n}\nfunction isPromiseLike(x) {\n return typeof (x == null ? void 0 : x.then) === \"function\";\n}\nfunction racesync(values) {\n for (const value of values) {\n let settled = void 0;\n if (isPromiseLike(value)) {\n value.then(\n (_) => {\n settled = value;\n },\n (_) => {\n settled = value;\n }\n );\n } else {\n settled = resolvedPromiseLike(value);\n }\n if (settled !== void 0) {\n return settled;\n }\n }\n return Promise.race(values);\n}\nfunction withResolvers() {\n let resolve, reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n}\nfunction deepEquals(x, y) {\n if (x === void 0) {\n return y === void 0;\n } else if (x === null) {\n return y === null;\n } else if (y === null) {\n return x === null;\n } else if (isArray(x)) {\n if (!isArray(y) || x.length !== y.length) {\n return false;\n }\n for (let i = 0; i < x.length; ++i) {\n if (!deepEquals(x[i], y[i])) {\n return false;\n }\n }\n return true;\n } else if (typeof x === \"object\") {\n if (typeof y !== \"object\") {\n return false;\n }\n const xkeys = Object.keys(x);\n const ykeys = Object.keys(y);\n if (xkeys.length !== ykeys.length) {\n return false;\n }\n for (let i = 0; i < xkeys.length; ++i) {\n const key = xkeys[i];\n if (!deepEquals(x[key], y[key])) {\n return false;\n }\n }\n return true;\n }\n return x === y;\n}\nfunction stableJSONStringify(node) {\n if (node && node.toJSON && typeof node.toJSON === \"function\") {\n node = node.toJSON();\n }\n if (node === void 0) {\n return;\n }\n if (typeof node === \"number\") {\n return isFinite(node) ? \"\" + node : \"null\";\n }\n if (typeof node !== \"object\") {\n return stringify(node);\n }\n let i;\n let out;\n if (isArray(node)) {\n out = \"[\";\n for (i = 0; i < node.length; i++) {\n if (i) {\n out += \",\";\n }\n out += stableJSONStringify(node[i]) || \"null\";\n }\n return out + \"]\";\n }\n if (node === null) {\n return \"null\";\n }\n const objKeys = keys(node).sort();\n out = \"\";\n for (i = 0; i < objKeys.length; i++) {\n const key = objKeys[i];\n const value = stableJSONStringify(node[key]);\n if (!value) {\n continue;\n }\n if (out) {\n out += \",\";\n }\n out += stringify(key) + \":\" + value;\n }\n return \"{\" + out + \"}\";\n}\nfunction toError(x) {\n if (x instanceof Error) {\n return x;\n }\n return new Error(typeof x === \"string\" ? x : JSON.stringify(x));\n}\nfunction deepCopy(x) {\n const stringified = stringify(x);\n return stringified ? parse(stringified) : void 0;\n}\nfunction readableStreamToAsyncIterable(stream) {\n if (stream.locked) {\n return err(new Error(\"ReadableStream is already locked\"));\n }\n if (Symbol.asyncIterator in stream) {\n return ok(stream);\n }\n const reader = stream.getReader();\n return ok({\n [Symbol.asyncIterator]: () => ({\n next: async () => {\n try {\n const result = await reader.read();\n if (result.done) {\n try {\n reader.releaseLock();\n } catch {\n }\n return { done: true, value: void 0 };\n }\n return {\n done: false,\n value: result.value\n };\n } catch (e) {\n try {\n reader.releaseLock();\n } catch {\n }\n throw e;\n }\n },\n return: async (value) => {\n try {\n await reader.cancel();\n } catch {\n }\n try {\n reader.releaseLock();\n } catch {\n }\n return { done: true, value };\n },\n throw: async (exception) => {\n try {\n await reader.cancel();\n } catch {\n }\n try {\n reader.releaseLock();\n } catch {\n }\n throw exception;\n }\n })\n });\n}\nfunction satisfies(provided, requested) {\n const providedN = provided.split(\".\").map((s) => parseInt(s));\n const requestedN = requested.split(\".\").map((s) => parseInt(s));\n return providedN[0] === requestedN[0] && providedN[1] >= requestedN[1];\n}\nfunction stringIsVersion(s) {\n const versionParts = s.split(\".\");\n return (versionParts.length === 2 || versionParts.length === 3) && versionParts.every((part) => part.match(/^\\d+$/));\n}\nvar HttpStatusCode = /* @__PURE__ */ ((HttpStatusCode2) => {\n HttpStatusCode2[HttpStatusCode2[\"Ok\"] = 200] = \"Ok\";\n HttpStatusCode2[HttpStatusCode2[\"Created\"] = 201] = \"Created\";\n HttpStatusCode2[HttpStatusCode2[\"NoContent\"] = 204] = \"NoContent\";\n HttpStatusCode2[HttpStatusCode2[\"NotModified\"] = 304] = \"NotModified\";\n HttpStatusCode2[HttpStatusCode2[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpStatusCode2[HttpStatusCode2[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpStatusCode2[HttpStatusCode2[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpStatusCode2[HttpStatusCode2[\"NotFound\"] = 404] = \"NotFound\";\n HttpStatusCode2[HttpStatusCode2[\"ServerError\"] = 500] = \"ServerError\";\n HttpStatusCode2[HttpStatusCode2[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n return HttpStatusCode2;\n})(HttpStatusCode || {});\nfunction getFetchResponseFromAuraError(err2) {\n if (err2.data !== void 0 && err2.data.statusCode !== void 0) {\n let data = {};\n data = err2.data;\n if (err2.id !== void 0) {\n data.id = err2.id;\n }\n return new FetchResponse(data.statusCode, data);\n }\n return new FetchResponse(500, {\n error: err2.message\n });\n}\nasync function coerceResponseToFetchResponse(response) {\n const { status } = response;\n const responseHeaders = {};\n response.headers.forEach((value, key) => {\n responseHeaders[key] = value;\n });\n let responseBody = null;\n if (status !== 204) {\n const contentType = responseHeaders[\"content-type\"];\n responseBody = contentType && contentType.startsWith(\"application/json\") ? await response.json() : await response.text();\n }\n return new FetchResponse(status, responseBody, responseHeaders);\n}\nfunction getStatusText(status) {\n switch (status) {\n case 200:\n return \"OK\";\n case 201:\n return \"Created\";\n case 304:\n return \"Not Modified\";\n case 400:\n return \"Bad Request\";\n case 404:\n return \"Not Found\";\n case 500:\n return \"Server Error\";\n default:\n return `Unexpected HTTP Status Code: ${status}`;\n }\n}\nclass FetchResponse extends Error {\n constructor(status, body, headers) {\n super();\n this.status = status;\n this.body = body;\n this.headers = headers || {};\n this.ok = status >= 200 && this.status <= 299;\n this.statusText = getStatusText(status);\n }\n}\nconst deeplyFrozen = new WeakSetConstructor();\nfunction deepFreeze(value) {\n if (typeof value !== \"object\" || value === null || deeplyFrozen.has(value)) {\n return;\n }\n deeplyFrozen.add(value);\n if (isArray(value)) {\n for (let i = 0, len = value.length; i < len; i += 1) {\n deepFreeze(value[i]);\n }\n } else {\n const keys$1 = keys(value);\n for (let i = 0, len = keys$1.length; i < len; i += 1) {\n deepFreeze(value[keys$1[i]]);\n }\n }\n freeze(value);\n}\nfunction isScalar(value) {\n return typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\" || value === null || value === void 0;\n}\nfunction isScalarObject(value) {\n return Object.values(value).every((value2) => isScalar(value2));\n}\nfunction isScalarArray(value) {\n return value.every((item) => isScalar(item));\n}\nfunction encodeQueryParam(paramName, value, explode) {\n switch (typeof value) {\n case \"string\":\n return [`${paramName}=${encodeURIComponent(value)}`];\n case \"number\":\n case \"boolean\":\n return [`${paramName}=${value}`];\n case \"object\":\n if (value === null) {\n return [];\n }\n if (isArray(value)) {\n if (!isScalarArray(value)) {\n throw new Error(`Unsupported non-scalar array type for ${paramName}`);\n }\n if (explode) {\n return value.map(\n (item) => `${paramName}=${item ? encodeURIComponent(item) : item}`\n );\n }\n return [\n `${paramName}=${value.map((item) => item ? encodeURIComponent(item) : item).join(\",\")}`\n ];\n }\n if (!isScalarObject(value)) {\n throw new Error(`Unsupported non-scalar object type for ${paramName}`);\n }\n if (explode) {\n return entries(value).map(\n ([key, value2]) => `${key}=${value2 ? encodeURIComponent(value2) : value2}`\n );\n }\n return [\n `${paramName}=${entries(value).flat().map((item) => item ? encodeURIComponent(item) : item).join(\",\")}`\n ];\n default:\n return [];\n }\n}\nclass InternalError extends Error {\n constructor(data) {\n super();\n this.data = data;\n this.type = \"internal\";\n }\n}\nclass UserVisibleError extends Error {\n constructor(data) {\n super();\n this.data = data;\n this.type = \"user-visible\";\n }\n}\nfunction applyDecorators(baseCommand, decorators, options) {\n if (!decorators || decorators.length === 0) {\n return baseCommand;\n }\n return decorators.reduce((command, decorator) => decorator(command, options), baseCommand);\n}\nexport {\n isArray as ArrayIsArray,\n indexOf as ArrayPrototypeIndexOf,\n push as ArrayPrototypePush,\n slice as ArrayPrototypeSlice,\n ConsoleLogger,\n DataIncompleteError,\n DataNotFoundError,\n DefaultFileParserLogger,\n Err,\n FetchResponse,\n HttpStatusCode,\n InternalError,\n parse as JSONParse,\n stringify as JSONStringify,\n LogLevelMap,\n create as ObjectCreate,\n entries as ObjectEntries,\n freeze as ObjectFreeze,\n keys as ObjectKeys,\n hasOwnProperty as ObjectPrototypeHasOwnProperty,\n Ok,\n UserVisibleError,\n WeakSetConstructor,\n addAllToSet,\n applyDecorators,\n bfs,\n buildSubscribableResult,\n coerceResponseToFetchResponse,\n deepCopy,\n deepEquals,\n deepFreeze,\n encodeQueryParam,\n err,\n getFetchResponseFromAuraError,\n includes,\n isCacheHitOrError,\n isCacheMiss,\n isDataIncompleteError,\n isDataNotFoundError,\n isPromiseLike,\n isResult,\n isSubscribable,\n isSubscribableResult,\n lineFormatter,\n loggerService,\n ok,\n racesync,\n readableStreamToAsyncIterable,\n rejectedPromiseLike,\n resolvedPromiseLike,\n satisfies,\n setDifference,\n setOverlaps,\n stableJSONStringify,\n stringIsVersion,\n toError,\n toTypeScriptSafeIdentifier,\n withResolvers\n};\n//# sourceMappingURL=index.js.map\n","/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\n/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\nconst { stringify, parse } = JSON;\nfunction deepCopy(x) {\n const stringified = stringify(x);\n return stringified ? parse(stringified) : void 0;\n}\nclass DefaultRecordableCache {\n constructor(baseCache) {\n this.baseCache = baseCache;\n this.keysRead = /* @__PURE__ */ new Set();\n this.missingKeysRead = /* @__PURE__ */ new Set();\n this.keysUpdated = /* @__PURE__ */ new Set();\n this.metadataKeysUpdated = /* @__PURE__ */ new Set();\n }\n delete(key) {\n this.keysUpdated.add(key);\n this.baseCache.delete(key);\n }\n get(key, options) {\n this.keysRead.add(key);\n const value = this.baseCache.get(key);\n if (value === void 0) {\n this.missingKeysRead.add(key);\n }\n if (options == null ? void 0 : options.copy) {\n return deepCopy(value);\n }\n return value;\n }\n set(key, value) {\n this.keysUpdated.add(key);\n this.metadataKeysUpdated.add(key);\n this.baseCache.set(key, value);\n }\n setMetadata(key, cacheControlMetadata) {\n this.metadataKeysUpdated.add(key);\n this.baseCache.setMetadata(key, cacheControlMetadata);\n }\n length() {\n return this.baseCache.length();\n }\n keys() {\n return this.baseCache.keys();\n }\n entries() {\n return this.baseCache.entries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass DefaultFilteredCache {\n constructor(baseCache, predicate) {\n this.baseCache = baseCache;\n this.predicate = predicate;\n }\n delete(key) {\n this.baseCache.delete(key);\n }\n get(key, options) {\n const result = this.baseCache.get(key);\n if (result && this.predicate(key, result)) {\n if (options == null ? void 0 : options.copy) {\n return deepCopy(result);\n }\n return result;\n }\n return void 0;\n }\n set(key, value) {\n this.baseCache.set(key, value);\n }\n setMetadata(key, cacheControlMetadata) {\n this.baseCache.setMetadata(key, cacheControlMetadata);\n }\n length() {\n return this.getFilteredKeys().size;\n }\n keys() {\n return this.getFilteredKeys();\n }\n entries() {\n return this.getFilteredEntries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n getFilteredEntries() {\n return this.baseCache.entries().filter(([key, _value]) => {\n return this.get(key);\n });\n }\n getFilteredKeys() {\n const filteredKeySet = /* @__PURE__ */ new Set();\n this.baseCache.keys().forEach((key) => {\n if (this.get(key)) {\n filteredKeySet.add(key);\n }\n });\n return filteredKeySet;\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass FixedTimeWritableCache {\n constructor(baseCache, generatedTime) {\n this.baseCache = baseCache;\n this.generatedTime = generatedTime;\n }\n delete(key) {\n this.baseCache.delete(key);\n }\n get(key, options) {\n return this.baseCache.get(key, options);\n }\n set(key, value) {\n this.baseCache.set(key, {\n ...value,\n metadata: {\n ...value.metadata,\n cacheControl: { ...value.metadata.cacheControl, generatedTime: this.generatedTime }\n }\n });\n }\n setMetadata(key, cacheControlMetadata) {\n this.baseCache.setMetadata(key, {\n ...cacheControlMetadata,\n generatedTime: this.generatedTime\n });\n }\n length() {\n return this.baseCache.length();\n }\n keys() {\n return this.baseCache.keys();\n }\n entries() {\n return this.baseCache.entries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass DefaultCache {\n constructor() {\n this.data = {};\n }\n get(key, options) {\n if (options == null ? void 0 : options.copy) {\n return deepCopy(this.data[key]);\n }\n return this.data[key];\n }\n /**\n * Adds the specified key/value to the cache.\n *\n * @param key key at which to store value\n * @param entry value to be stored\n */\n set(key, entry) {\n if (entry.metadata.cacheControl.type === \"no-store\") {\n return;\n }\n this.data[key] = {\n ...entry,\n metadata: {\n ...entry.metadata,\n type: entry.metadata.type || {\n namespace: \"OneStore:Internal\",\n name: \"UnknownType\"\n },\n cacheControl: {\n generatedTime: Date.now() / 1e3,\n ...entry.metadata.cacheControl\n }\n }\n };\n }\n /**\n * Removes the cache entry associated with the specified key.\n *\n * @param key key to be removed from the store\n */\n delete(key) {\n delete this.data[key];\n }\n /**\n * Sets the metadata for the specified key if the key is in cache.\n * If the key doesn't exist, it does nothing.\n *\n * @param key key at which to store metadata\n * @param cacheControlMetadata metadata to be stored\n */\n setMetadata(key, cacheControlMetadata) {\n if (key in this.data) {\n this.data[key].metadata.cacheControl = {\n generatedTime: Date.now() / 1e3,\n ...cacheControlMetadata\n };\n }\n }\n length() {\n return this.keys().size;\n }\n keys() {\n return new Set(Object.keys(this.data));\n }\n entries() {\n return Object.entries(this.data);\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass ClearableCache extends DefaultCache {\n clear() {\n this.data = {};\n }\n}\nfunction buildServiceDescriptor() {\n return {\n type: \"cache\",\n version: \"1.0\",\n service: new DefaultCache()\n };\n}\nexport {\n ClearableCache,\n buildServiceDescriptor\n};\n//# sourceMappingURL=index.js.map\n","import {\n resolvedPromiseLike,\n type SyncOrAsync,\n type Result,\n setDifference,\n} from '@conduit-client/utils';\nimport { CacheInclusionPolicyService } from './cache-inclusion-policy';\nimport type { Cache, Key } from '@conduit-client/service-cache/v1';\nimport { buildServiceDescriptor as buildCacheServiceDescriptor } from '@conduit-client/service-cache/v1';\n\n/**\n * Implementation of CacheInclusionPolicy that uses an inclusive\n * L2 durable cache as a second level behind an in memory, synchronous,\n * L1 cache.\n */\nexport abstract class DurableCacheInclusionPolicy extends CacheInclusionPolicyService {\n /**\n * Reads data out of a 2 level inclusive store.\n */\n read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n alreadyRevivedKeys?: Set<Key>;\n }): SyncOrAsync<D> {\n const { l1, readFromL1, alreadyRevivedKeys } = options;\n // eavesdrop on which keys read() accesses\n const recordableCache = l1.record();\n\n // let read() try to get the result\n return readFromL1(recordableCache).then((readResult) => {\n if (readResult.isOk()) {\n return readResult;\n }\n\n const keysToRevive = recordableCache.keysRead;\n\n // if we have already tried to revive all the keys that were read and still can't build then\n // don't go through another revive cycle.\n if (alreadyRevivedKeys && setDifference(keysToRevive, alreadyRevivedKeys).size === 0) {\n return readResult;\n }\n\n // revive missing keys\n return this.revive(keysToRevive, l1).then((revivedKeys) => {\n // if we couldn't revive all the keys then just return the earlier result\n if (setDifference(keysToRevive, revivedKeys).size > 0) {\n return readResult;\n }\n\n // revive found everything we requested, retry the read\n return this.read({ l1, readFromL1, alreadyRevivedKeys: revivedKeys });\n });\n });\n }\n\n /**\n * Writes data to a 2 level inclusive store.\n */\n write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D> {\n const { l1, writeToL1 } = options;\n // eavesdrop on which keys write() accesses\n const tempL1 = buildCacheServiceDescriptor().service.record();\n\n // Write to a temp store to see what keys are ingested\n return writeToL1(tempL1).then(() => {\n // Try to read those keys from the existing store to see what keys are missing and need to be revived\n const keysToReviveRecordableCache = l1.record();\n\n tempL1.keysUpdated.forEach((key: Key) => keysToReviveRecordableCache.get(key));\n tempL1.missingKeysRead.forEach((key: Key) => keysToReviveRecordableCache.get(key));\n // Revive missing keys\n const missingKeys = keysToReviveRecordableCache.missingKeysRead;\n\n const revivePromiseLike =\n missingKeys.size > 0\n ? this.revive(missingKeys, l1).then(() => undefined)\n : resolvedPromiseLike(undefined);\n\n return revivePromiseLike.then(() => {\n // Do real write\n const recordableCache = l1.record();\n\n return writeToL1(recordableCache).then((result) => {\n this.syncToL2Cache(recordableCache.keysUpdated, l1);\n // don't let durable sync issues cause the write to fail\n return result;\n });\n });\n });\n }\n\n /**\n * Revive a set of keys into the L1 cache from another cache level. The\n * cache inclusion policy will use this when any data is missing from the L1 cache\n * in order to try to find data that is requested and might be in other cache levels.\n *\n * @param keys The set of keys to be revived\n * @param l1 The L1 Cache instance to revive the data into\n */\n abstract revive(keys: Set<Key>, l1: Cache): SyncOrAsync<Set<Key>>;\n\n /**\n * Synchronize the data from an L1 cache into the persistent levels of your cache\n * by reading the keys present in keys from the l1Cache, and populating them in the\n * durable cache.\n *\n * @param changedKeys The set of keys to synchronize to the durable cache\n * @param l1 The L1 cache to synchronize the data from.\n */\n abstract syncToL2Cache(changedKeys: Set<Key>, l1: Cache): SyncOrAsync<void>;\n}\n","import type {\n CacheControlMetadata,\n CacheEntry,\n Key,\n FilterPredicate,\n} from '@conduit-client/service-cache/v1';\n\ninterface LogicalOperators<T> {\n $and?: T[];\n $or?: T[];\n $not?: T;\n}\n\nexport type CacheKeyQueryOperands = { $regex: RegExp };\n\nexport type CacheMetadataQueryOperands =\n | LogicalOperators<CacheMetadataQueryOperands>\n | { cacheControlType: { $eq: 'max-age' | 'stale-while-revalidate' | 'no-cache' | 'no-store' } }\n | { maxAge: { $gte?: number; $lte?: number } };\n\nexport type CacheValueQueryOperands = never; // TODO: Implement value-based querying\n\nexport type CacheQuery =\n | LogicalOperators<CacheQuery>\n | { key: CacheKeyQueryOperands }\n | { metadata: CacheMetadataQueryOperands }\n | { value: CacheValueQueryOperands };\n\nexport const isAndQuery = (query: CacheQuery): query is { $and: CacheQuery[] } => '$and' in query;\n\nexport const isOrQuery = (query: CacheQuery): query is { $or: CacheQuery[] } => '$or' in query;\n\nexport const isNotQuery = (query: CacheQuery): query is { $not: CacheQuery } => '$not' in query;\n\nexport const matchesMetadata = (\n metadataQuery: CacheMetadataQueryOperands,\n cacheControl: CacheControlMetadata\n): boolean => {\n if (\n 'cacheControlType' in metadataQuery &&\n cacheControl.type !== metadataQuery.cacheControlType.$eq\n ) {\n return false;\n }\n if ('maxAge' in metadataQuery && cacheControl.type === 'max-age') {\n const maxAge = cacheControl.maxAge ?? 0;\n if (\n (metadataQuery.maxAge.$gte !== undefined && maxAge < metadataQuery.maxAge.$gte) ||\n (metadataQuery.maxAge.$lte !== undefined && maxAge > metadataQuery.maxAge.$lte)\n ) {\n return false;\n }\n }\n return true;\n};\nexport function queryToPredicate(query: CacheQuery): FilterPredicate {\n return (key: Key, entry: CacheEntry<unknown>): boolean => {\n if (!query) return false;\n\n if (isAndQuery(query))\n return query.$and.every((subQuery) => queryToPredicate(subQuery)(key, entry));\n if (isOrQuery(query))\n return query.$or.some((subQuery) => queryToPredicate(subQuery)(key, entry));\n if (isNotQuery(query)) return !queryToPredicate(query.$not)(key, entry);\n\n if ('key' in query) return matchesKey(query.key, key);\n if ('metadata' in query)\n return matchesMetadata(query.metadata, entry.metadata.cacheControl);\n\n if ('value' in query) return false; // TODO: Not implemented\n\n throw new Error('Unknown Query Operation');\n };\n}\nfunction matchesKey(keyQuery: CacheKeyQueryOperands, key: Key): boolean {\n if ('$regex' in keyQuery) {\n return keyQuery.$regex.test(key);\n }\n return false;\n}\n","import type { CacheControlMetadata, CacheEntry } from '@conduit-client/service-cache/v1';\n\nexport type CacheUpdate = {\n type: InvalidateOperation;\n};\nexport type InvalidateOperation = 'invalidate';\n\n/**\n * Processes a cache update operation and determines the appropriate modification.\n *\n * This function analyzes the provided `update` and the `existing` cache entry\n * to determine the necessary update type. It returns one of three possible outcomes:\n *\n * - `{ type: 'entry', entry }`: A full cache entry update, including both value and metadata.\n * - `{ type: 'metadata', metadata }`: A metadata-only update, leaving the value unchanged.\n * - `{ type: 'no-op' }`: No changes are needed, and the cache should remain as is.\n *\n * @param update - The cache update operation to apply.\n * @param existing - The existing cache entry being modified.\n * @returns An object indicating the type of update:\n * - A full cache entry update (`type: 'entry'`)\n * - A metadata-only update (`type: 'metadata'`)\n * - A no-op (`type: 'no-op'`) if no changes are required.\n */\nexport function buildUpdate(\n update: CacheUpdate,\n existing: CacheEntry<unknown>\n):\n | { type: 'entry'; entry: CacheEntry<unknown> }\n | { type: 'metadata'; metadata: CacheControlMetadata }\n | { type: 'no-op' } {\n switch (update.type) {\n case 'invalidate':\n const updatedCacheControl = buildInvalidatedCacheControl(\n existing.metadata.cacheControl\n );\n return updatedCacheControl !== undefined\n ? { type: 'metadata', metadata: updatedCacheControl }\n : { type: 'no-op' };\n\n default:\n throw new Error(`Invalid update operation: ${update.type}`);\n }\n}\n\n/**\n * Builds an updated CacheControlMetadata object that invalidates the cache entry.\n *\n * @param existingCacheControl - The current CacheControlMetadata.\n * @returns A new CacheControlMetadata object with `maxAge` set to `0`, or undefined if no changes are needed.\n */\nfunction buildInvalidatedCacheControl(\n existingCacheControl: CacheControlMetadata\n): CacheControlMetadata | undefined {\n switch (existingCacheControl.type) {\n case 'max-age':\n case 'stale-while-revalidate':\n if (existingCacheControl.maxAge !== 0) {\n return {\n ...existingCacheControl,\n maxAge: 0,\n };\n }\n case 'no-cache':\n case 'no-store':\n }\n\n return undefined; // No-op: no changes\n}\n","import { DeepReadonly, ok, resolvedPromiseLike, Result, SyncOrAsync } from '@conduit-client/utils';\nimport { CacheInclusionPolicyService } from './cache-inclusion-policy';\nimport type { CacheEntry, Key, NamedCacheService, Cache } from '@conduit-client/service-cache/v1';\nimport type { CacheInclusionPolicyServiceDescriptor } from './cache-inclusion-policy';\nimport { CacheQuery, queryToPredicate } from './cache-query';\nimport { buildUpdate, CacheUpdate } from './cache-update';\n\n/**\n * Implementation of CacheInclusionPolicy that uses a single level, in memory,\n * synchronous L1 cache.\n */\nexport class InMemoryCacheInclusionPolicy extends CacheInclusionPolicyService {\n constructor(protected services: NamedCacheService) {\n super();\n }\n\n /**\n * Reads data out of a single level in memory store.\n */\n read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D> {\n const { l1, readFromL1 } = options;\n // l1 is all we've got\n return readFromL1(l1);\n }\n\n /**\n * Writes data to a single level in memory store.\n */\n write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }) {\n const { l1, writeToL1 } = options;\n return writeToL1(l1);\n }\n\n /**\n * Finds cache entries that match the given query.\n * Returns an async generator that yields `[key, entry]`.\n */\n async *find(\n query: CacheQuery\n ): AsyncGenerator<[key: Key, value: DeepReadonly<CacheEntry<unknown>>], void, unknown> {\n const cache = this.services.cache;\n const predicate = queryToPredicate(query);\n\n const filteredEntries = cache.filter(predicate).entries();\n for (const entry of filteredEntries) {\n yield entry;\n }\n }\n\n /**\n * Finds and modifies cache entries that match the given query.\n * Extends `find(query)` and returns an async generator of modified keys.\n */\n async *findAndModify(\n query: CacheQuery,\n cacheUpdate: CacheUpdate\n ): AsyncGenerator<Key, void, unknown> {\n const cache = this.services.cache;\n for await (const [key, value] of this.find(query)) {\n const update = buildUpdate(cacheUpdate, value);\n switch (update.type) {\n case 'entry':\n this.write({\n l1: cache,\n writeToL1: (l1) => resolvedPromiseLike(ok(l1.set(key, update.entry))),\n });\n yield key;\n break;\n case 'metadata':\n this.write({\n l1: cache,\n writeToL1: (l1) =>\n resolvedPromiseLike(ok(l1.setMetadata(key, update.metadata))),\n });\n yield key;\n break;\n case 'no-op':\n }\n }\n }\n}\n\n/**\n * Constructs an in-memory-only CacheInclusionPolicy.\n *\n * @returns in-memory-only CacheInclusionPolicy\n */\nexport function buildInMemoryCacheInclusionPolicyService(\n cache: Cache\n): CacheInclusionPolicyServiceDescriptor {\n return {\n service: new InMemoryCacheInclusionPolicy({ cache }),\n type: 'cacheInclusionPolicy',\n version: '1.0',\n };\n}\n"],"names":["buildCacheServiceDescriptor"],"mappings":";;;;;AAYO,MAAe,4BAA4B;AA2ClD;ACvDA;AAAA;AAAA;AAAA;AAAA;AAwHA,MAAM,GAAG;AAAA,EACP,YAAY,OAAO;AACjB,SAAK,QAAQ;AAAA,EACf;AAAA,EACA,OAAO;AACL,WAAO;AAAA,EACT;AAAA,EACA,QAAQ;AACN,WAAO,CAAC,KAAK,KAAI;AAAA,EACnB;AACF;AAYA,MAAM,KAAK,CAAC,UAAU,IAAI,GAAG,KAAK;AAyClC,SAAS,cAAc,MAAM,MAAM;AACjC,QAAM,gBAAgC,oBAAI,IAAG;AAC7C,aAAW,WAAW,MAAM;AAC1B,QAAI,CAAC,KAAK,IAAI,OAAO,GAAG;AACtB,oBAAc,IAAI,OAAO;AAAA,IAC3B;AAAA,EACF;AACA,SAAO;AACT;AAuBA,SAAS,oBAAoB,QAAQ;AACnC,MAAI,cAAc,MAAM,GAAG;AACzB,WAAO,OAAO,KAAK,CAAC,eAAe,UAAU;AAAA,EAC/C;AACA,SAAO;AAAA,IACL,MAAM,CAAC,aAAa,gBAAgB;AAClC,UAAI;AACF,eAAO,oBAAoB,YAAY,MAAM,CAAC;AAAA,MAChD,SAAS,GAAG;AACV,YAAI,gBAAgB,QAAQ;AAC1B,iBAAO,oBAAoB,MAAM;AAAA,QACnC;AACA,eAAO,oBAAoB,CAAC;AAAA,MAC9B;AAAA,IACF;AAAA,EACJ;AACA;AACA,SAAS,oBAAoB,QAAQ;AACnC,MAAI,cAAc,MAAM,GAAG;AACzB,WAAO,OAAO,KAAK,CAAC,eAAe,UAAU;AAAA,EAC/C;AACA,SAAO;AAAA,IACL,MAAM,CAAC,cAAc,eAAe;AAClC,UAAI,OAAO,eAAe,YAAY;AACpC,YAAI;AACF,iBAAO,oBAAoB,WAAW,MAAM,CAAC;AAAA,QAC/C,SAAS,GAAG;AACV,iBAAO,oBAAoB,CAAC;AAAA,QAC9B;AAAA,MACF;AACA,aAAO,oBAAoB,MAAM;AAAA,IACnC;AAAA,EACJ;AACA;AACA,SAAS,cAAc,GAAG;AACxB,SAAO,QAAQ,KAAK,OAAO,SAAS,EAAE,UAAU;AAClD;AC1PA;AAAA;AAAA;AAAA;AAAA;AAKA;AAAA;AAAA;AAAA;AAAA;AAKA,MAAM,EAAE,WAAW,MAAK,IAAK;AAC7B,SAAS,SAAS,GAAG;AACnB,QAAM,cAAc,UAAU,CAAC;AAC/B,SAAO,cAAc,MAAM,WAAW,IAAI;AAC5C;AACA,MAAM,uBAAuB;AAAA,EAC3B,YAAY,WAAW;AACrB,SAAK,YAAY;AACjB,SAAK,WAA2B,oBAAI,IAAG;AACvC,SAAK,kBAAkC,oBAAI,IAAG;AAC9C,SAAK,cAA8B,oBAAI,IAAG;AAC1C,SAAK,sBAAsC,oBAAI,IAAG;AAAA,EACpD;AAAA,EACA,OAAO,KAAK;AACV,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,SAAK,SAAS,IAAI,GAAG;AACrB,UAAM,QAAQ,KAAK,UAAU,IAAI,GAAG;AACpC,QAAI,UAAU,QAAQ;AACpB,WAAK,gBAAgB,IAAI,GAAG;AAAA,IAC9B;AACA,QAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,aAAO,SAAS,KAAK;AAAA,IACvB;AACA,WAAO;AAAA,EACT;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,oBAAoB,IAAI,GAAG;AAChC,SAAK,UAAU,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,oBAAoB,IAAI,GAAG;AAChC,SAAK,UAAU,YAAY,KAAK,oBAAoB;AAAA,EACtD;AAAA,EACA,SAAS;AACP,WAAO,KAAK,UAAU,OAAM;AAAA,EAC9B;AAAA,EACA,OAAO;AACL,WAAO,KAAK,UAAU,KAAI;AAAA,EAC5B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,UAAU,QAAO;AAAA,EAC/B;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,qBAAqB;AAAA,EACzB,YAAY,WAAW,WAAW;AAChC,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA,EACA,OAAO,KAAK;AACV,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,UAAM,SAAS,KAAK,UAAU,IAAI,GAAG;AACrC,QAAI,UAAU,KAAK,UAAU,KAAK,MAAM,GAAG;AACzC,UAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,eAAO,SAAS,MAAM;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,UAAU,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,UAAU,YAAY,KAAK,oBAAoB;AAAA,EACtD;AAAA,EACA,SAAS;AACP,WAAO,KAAK,gBAAe,EAAG;AAAA,EAChC;AAAA,EACA,OAAO;AACL,WAAO,KAAK,gBAAe;AAAA,EAC7B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,mBAAkB;AAAA,EAChC;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,qBAAqB;AACnB,WAAO,KAAK,UAAU,QAAO,EAAG,OAAO,CAAC,CAAC,KAAK,MAAM,MAAM;AACxD,aAAO,KAAK,IAAI,GAAG;AAAA,IACrB,CAAC;AAAA,EACH;AAAA,EACA,kBAAkB;AAChB,UAAM,iBAAiC,oBAAI,IAAG;AAC9C,SAAK,UAAU,KAAI,EAAG,QAAQ,CAAC,QAAQ;AACrC,UAAI,KAAK,IAAI,GAAG,GAAG;AACjB,uBAAe,IAAI,GAAG;AAAA,MACxB;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,uBAAuB;AAAA,EAC3B,YAAY,WAAW,eAAe;AACpC,SAAK,YAAY;AACjB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EACA,OAAO,KAAK;AACV,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,WAAO,KAAK,UAAU,IAAI,KAAK,OAAO;AAAA,EACxC;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,UAAU,IAAI,KAAK;AAAA,MACtB,GAAG;AAAA,MACH,UAAU;AAAA,QACR,GAAG,MAAM;AAAA,QACT,cAAc,EAAE,GAAG,MAAM,SAAS,cAAc,eAAe,KAAK,cAAa;AAAA,MACzF;AAAA,IACA,CAAK;AAAA,EACH;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,UAAU,YAAY,KAAK;AAAA,MAC9B,GAAG;AAAA,MACH,eAAe,KAAK;AAAA,IAC1B,CAAK;AAAA,EACH;AAAA,EACA,SAAS;AACP,WAAO,KAAK,UAAU,OAAM;AAAA,EAC9B;AAAA,EACA,OAAO;AACL,WAAO,KAAK,UAAU,KAAI;AAAA,EAC5B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,UAAU,QAAO;AAAA,EAC/B;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,aAAa;AAAA,EACjB,cAAc;AACZ,SAAK,OAAO,CAAA;AAAA,EACd;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,QAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,aAAO,SAAS,KAAK,KAAK,GAAG,CAAC;AAAA,IAChC;AACA,WAAO,KAAK,KAAK,GAAG;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,KAAK,OAAO;AACd,QAAI,MAAM,SAAS,aAAa,SAAS,YAAY;AACnD;AAAA,IACF;AACA,SAAK,KAAK,GAAG,IAAI;AAAA,MACf,GAAG;AAAA,MACH,UAAU;AAAA,QACR,GAAG,MAAM;AAAA,QACT,MAAM,MAAM,SAAS,QAAQ;AAAA,UAC3B,WAAW;AAAA,UACX,MAAM;AAAA,QAChB;AAAA,QACQ,cAAc;AAAA,UACZ,eAAe,KAAK,IAAG,IAAK;AAAA,UAC5B,GAAG,MAAM,SAAS;AAAA,QAC5B;AAAA,MACA;AAAA,IACA;AAAA,EACE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,KAAK;AACV,WAAO,KAAK,KAAK,GAAG;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,KAAK,sBAAsB;AACrC,QAAI,OAAO,KAAK,MAAM;AACpB,WAAK,KAAK,GAAG,EAAE,SAAS,eAAe;AAAA,QACrC,eAAe,KAAK,IAAG,IAAK;AAAA,QAC5B,GAAG;AAAA,MACX;AAAA,IACI;AAAA,EACF;AAAA,EACA,SAAS;AACP,WAAO,KAAK,KAAI,EAAG;AAAA,EACrB;AAAA,EACA,OAAO;AACL,WAAO,IAAI,IAAI,OAAO,KAAK,KAAK,IAAI,CAAC;AAAA,EACvC;AAAA,EACA,UAAU;AACR,WAAO,OAAO,QAAQ,KAAK,IAAI;AAAA,EACjC;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AAMA,SAAS,yBAAyB;AAChC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS,IAAI,aAAY;AAAA,EAC7B;AACA;ACjPO,MAAe,oCAAoC,4BAA4B;AAAA;AAAA;AAAA;AAAA,EAIlF,KAAsC,SAInB;AACf,UAAM,EAAE,IAAI,YAAY,mBAAA,IAAuB;AAE/C,UAAM,kBAAkB,GAAG,OAAA;AAG3B,WAAO,WAAW,eAAe,EAAE,KAAK,CAAC,eAAe;AACpD,UAAI,WAAW,QAAQ;AACnB,eAAO;AAAA,MACX;AAEA,YAAM,eAAe,gBAAgB;AAIrC,UAAI,sBAAsB,cAAc,cAAc,kBAAkB,EAAE,SAAS,GAAG;AAClF,eAAO;AAAA,MACX;AAGA,aAAO,KAAK,OAAO,cAAc,EAAE,EAAE,KAAK,CAAC,gBAAgB;AAEvD,YAAI,cAAc,cAAc,WAAW,EAAE,OAAO,GAAG;AACnD,iBAAO;AAAA,QACX;AAGA,eAAO,KAAK,KAAK,EAAE,IAAI,YAAY,oBAAoB,aAAa;AAAA,MACxE,CAAC;AAAA,IACL,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAuC,SAGpB;AACf,UAAM,EAAE,IAAI,UAAA,IAAc;AAE1B,UAAM,SAASA,yBAA8B,QAAQ,OAAA;AAGrD,WAAO,UAAU,MAAM,EAAE,KAAK,MAAM;AAEhC,YAAM,8BAA8B,GAAG,OAAA;AAEvC,aAAO,YAAY,QAAQ,CAAC,QAAa,4BAA4B,IAAI,GAAG,CAAC;AAC7E,aAAO,gBAAgB,QAAQ,CAAC,QAAa,4BAA4B,IAAI,GAAG,CAAC;AAEjF,YAAM,cAAc,4BAA4B;AAEhD,YAAM,oBACF,YAAY,OAAO,IACb,KAAK,OAAO,aAAa,EAAE,EAAE,KAAK,MAAM,MAAS,IACjD,oBAAoB,MAAS;AAEvC,aAAO,kBAAkB,KAAK,MAAM;AAEhC,cAAM,kBAAkB,GAAG,OAAA;AAE3B,eAAO,UAAU,eAAe,EAAE,KAAK,CAAC,WAAW;AAC/C,eAAK,cAAc,gBAAgB,aAAa,EAAE;AAElD,iBAAO;AAAA,QACX,CAAC;AAAA,MACL,CAAC;AAAA,IACL,CAAC;AAAA,EACL;AAqBJ;ACrFO,MAAM,aAAa,CAAC,UAAuD,UAAU;AAErF,MAAM,YAAY,CAAC,UAAsD,SAAS;AAElF,MAAM,aAAa,CAAC,UAAqD,UAAU;AAEnF,MAAM,kBAAkB,CAC3B,eACA,iBACU;AACV,MACI,sBAAsB,iBACtB,aAAa,SAAS,cAAc,iBAAiB,KACvD;AACE,WAAO;AAAA,EACX;AACA,MAAI,YAAY,iBAAiB,aAAa,SAAS,WAAW;AAC9D,UAAM,SAAS,aAAa,UAAU;AACtC,QACK,cAAc,OAAO,SAAS,UAAa,SAAS,cAAc,OAAO,QACzE,cAAc,OAAO,SAAS,UAAa,SAAS,cAAc,OAAO,MAC5E;AACE,aAAO;AAAA,IACX;AAAA,EACJ;AACA,SAAO;AACX;AACO,SAAS,iBAAiB,OAAoC;AACjE,SAAO,CAAC,KAAU,UAAwC;AACtD,QAAI,CAAC,MAAO,QAAO;AAEnB,QAAI,WAAW,KAAK;AAChB,aAAO,MAAM,KAAK,MAAM,CAAC,aAAa,iBAAiB,QAAQ,EAAE,KAAK,KAAK,CAAC;AAChF,QAAI,UAAU,KAAK;AACf,aAAO,MAAM,IAAI,KAAK,CAAC,aAAa,iBAAiB,QAAQ,EAAE,KAAK,KAAK,CAAC;AAC9E,QAAI,WAAW,KAAK,EAAG,QAAO,CAAC,iBAAiB,MAAM,IAAI,EAAE,KAAK,KAAK;AAEtE,QAAI,SAAS,MAAO,QAAO,WAAW,MAAM,KAAK,GAAG;AACpD,QAAI,cAAc;AACd,aAAO,gBAAgB,MAAM,UAAU,MAAM,SAAS,YAAY;AAEtE,QAAI,WAAW,MAAO,QAAO;AAE7B,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC7C;AACJ;AACA,SAAS,WAAW,UAAiC,KAAmB;AACpE,MAAI,YAAY,UAAU;AACtB,WAAO,SAAS,OAAO,KAAK,GAAG;AAAA,EACnC;AACA,SAAO;AACX;ACvDO,SAAS,YACZ,QACA,UAIoB;AACpB,UAAQ,OAAO,MAAA;AAAA,IACX,KAAK;AACD,YAAM,sBAAsB;AAAA,QACxB,SAAS,SAAS;AAAA,MAAA;AAEtB,aAAO,wBAAwB,SACzB,EAAE,MAAM,YAAY,UAAU,oBAAA,IAC9B,EAAE,MAAM,QAAA;AAAA,IAElB;AACI,YAAM,IAAI,MAAM,6BAA6B,OAAO,IAAI,EAAE;AAAA,EAAA;AAEtE;AAQA,SAAS,6BACL,sBACgC;AAChC,UAAQ,qBAAqB,MAAA;AAAA,IACzB,KAAK;AAAA,IACL,KAAK;AACD,UAAI,qBAAqB,WAAW,GAAG;AACnC,eAAO;AAAA,UACH,GAAG;AAAA,UACH,QAAQ;AAAA,QAAA;AAAA,MAEhB;AAAA,EAEC;AAGT,SAAO;AACX;ACzDO,MAAM,qCAAqC,4BAA4B;AAAA,EAC1E,YAAsB,UAA6B;AAC/C,UAAA;AADkB,SAAA,WAAA;AAAA,EAEtB;AAAA;AAAA;AAAA;AAAA,EAKA,KAAsC,SAGnB;AACf,UAAM,EAAE,IAAI,WAAA,IAAe;AAE3B,WAAO,WAAW,EAAE;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAuC,SAGpC;AACC,UAAM,EAAE,IAAI,UAAA,IAAc;AAC1B,WAAO,UAAU,EAAE;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,KACH,OACmF;AACnF,UAAM,QAAQ,KAAK,SAAS;AAC5B,UAAM,YAAY,iBAAiB,KAAK;AAExC,UAAM,kBAAkB,MAAM,OAAO,SAAS,EAAE,QAAA;AAChD,eAAW,SAAS,iBAAiB;AACjC,YAAM;AAAA,IACV;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cACH,OACA,aACkC;AAClC,UAAM,QAAQ,KAAK,SAAS;AAC5B,qBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,GAAG;AAC/C,YAAM,SAAS,YAAY,aAAa,KAAK;AAC7C,cAAQ,OAAO,MAAA;AAAA,QACX,KAAK;AACD,eAAK,MAAM;AAAA,YACP,IAAI;AAAA,YACJ,WAAW,CAAC,OAAO,oBAAoB,GAAG,GAAG,IAAI,KAAK,OAAO,KAAK,CAAC,CAAC;AAAA,UAAA,CACvE;AACD,gBAAM;AACN;AAAA,QACJ,KAAK;AACD,eAAK,MAAM;AAAA,YACP,IAAI;AAAA,YACJ,WAAW,CAAC,OACR,oBAAoB,GAAG,GAAG,YAAY,KAAK,OAAO,QAAQ,CAAC,CAAC;AAAA,UAAA,CACnE;AACD,gBAAM;AACN;AAAA,MACC;AAAA,IAEb;AAAA,EACJ;AACJ;AAOO,SAAS,yCACZ,OACqC;AACrC,SAAO;AAAA,IACH,SAAS,IAAI,6BAA6B,EAAE,OAAO;AAAA,IACnD,MAAM;AAAA,IACN,SAAS;AAAA,EAAA;AAEjB;"}
1
+ {"version":3,"file":"index.js","sources":["../../src/v1/cache-inclusion-policy.ts","../../../../utils/dist/index.js","../../../cache/dist/v1/index.js","../../src/v1/durable-cache-inclusion-policy.ts","../../src/v1/cache-query.ts","../../src/v1/cache-update.ts","../../src/v1/in-memory-cache-inclusion-policy.ts"],"sourcesContent":["import { type SyncOrAsync, type Result } from '@conduit-client/utils';\nimport type { DeepReadonly, NamedService, ServiceDescriptor } from '@conduit-client/utils';\nimport type { CacheEntry, Key, Cache } from '@conduit-client/service-cache/v1';\nimport { CacheQuery } from './cache-query';\nimport { CacheUpdate } from './cache-update';\n\n/**\n * CacheInclusionPolicy is an interface for accessing the cache\n * and synchronizing the cache data with another external cache.\n *\n * https://en.wikipedia.org/wiki/Cache_inclusion_policy\n */\nexport abstract class CacheInclusionPolicyService {\n /**\n * Reads data out of a cache applying a multilevel cache inclusion policy to the results.\n *\n * @param {Cache} l1 The cache to use for L1 cache reads.\n * @param {(cache: Cache) => SyncOrAsync<D>} readFromL1 A function for reading the data from L1.\n * The readFromL1 function returns a boolean indicating whether the completed read operation\n * was successful.\n * @returns {SyncOrAsync<D>}\n */\n abstract read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D>;\n\n /**\n * Write data out to a cache applying a multilevel cache inclusion policy to the writes.\n *\n * @param {Cache} l1 The cache to use for L1 cache writes.\n * @param {(l1: Cache) => SyncOrAsync<void>} writeToL1 A function for writing the data to L1.\n * @returns {SyncOrAsync<void>}\n */\n abstract write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D>;\n\n /**\n * Finds cache entries that match the given query.\n * Returns an async generator that yields `[key, entry]`.\n */\n abstract find(\n query: CacheQuery\n ): AsyncGenerator<[key: Key, value: DeepReadonly<CacheEntry<unknown>>], void, unknown>;\n\n /**\n * Finds and modifies cache entries that match the given query.\n * Extends `find(query)` and returns an async generator of modified keys.\n */\n abstract findAndModify(\n query: CacheQuery,\n cacheUpdate: CacheUpdate\n ): AsyncGenerator<Key, void, unknown>;\n}\n\nexport type NamedCacheInclusionPolicyService<Name extends string = 'cacheInclusionPolicy'> =\n NamedService<Name, CacheInclusionPolicyService>;\n\nexport type CacheInclusionPolicyServiceDescriptor = ServiceDescriptor<\n CacheInclusionPolicyService,\n 'cacheInclusionPolicy',\n '1.0'\n>;\n","/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\nfunction bfs(start, predicate, getChildren) {\n const queue = [...start];\n const visited = /* @__PURE__ */ new Set([...start]);\n const matches2 = /* @__PURE__ */ new Set();\n while (queue.length) {\n const curr = queue.shift();\n if (predicate(curr)) {\n matches2.add(curr);\n }\n const children = getChildren(curr);\n for (const child of children) {\n if (!visited.has(child)) {\n visited.add(child);\n queue.push(child);\n }\n }\n }\n return matches2;\n}\nfunction lineFormatter(position, message, filePath) {\n return `${message} (${filePath}:${position.line}:${position.column})`;\n}\nclass DefaultFileParserLogger {\n constructor(services, filePath) {\n this.services = services;\n this.filePath = filePath;\n }\n trace(position, message) {\n this.services.logger.trace(this.format(position, message));\n }\n debug(position, message) {\n this.services.logger.debug(this.format(position, message));\n }\n info(position, message) {\n this.services.logger.info(this.format(position, message));\n }\n warn(position, message) {\n this.services.logger.warn(this.format(position, message));\n }\n error(position, message) {\n this.services.logger.error(this.format(position, message));\n }\n format(position, message) {\n return lineFormatter(position, message, this.filePath);\n }\n}\nfunction matches(test, s) {\n if (test === void 0) {\n return false;\n } else if (typeof test === \"string\") {\n return s === test;\n } else if (test instanceof RegExp) {\n return test.test(s);\n } else if (typeof test === \"function\") {\n return test(s);\n }\n return test.some((m) => matches(m, s));\n}\nfunction includes(incexc, s) {\n if (matches(incexc.exclude, s)) {\n return false;\n }\n if (matches(incexc.include, s)) {\n return true;\n }\n if (incexc.include) {\n return false;\n }\n return true;\n}\nconst { create, freeze, keys, entries } = Object;\nconst { hasOwnProperty } = Object.prototype;\nconst { isArray } = Array;\nconst { push, indexOf, slice } = Array.prototype;\nconst { stringify, parse } = JSON;\nconst WeakSetConstructor = WeakSet;\nconst LogLevelMap = {\n TRACE: 4,\n DEBUG: 3,\n INFO: 2,\n WARN: 1,\n ERROR: 0\n};\nclass ConsoleLogger {\n constructor(level = \"WARN\", printer = console.log, formatter = (level2, message) => `${level2}: ${message}`) {\n this.level = level;\n this.printer = printer;\n this.formatter = formatter;\n this.messages = [];\n }\n trace(message) {\n this.log(\"TRACE\", message);\n }\n debug(message) {\n this.log(\"DEBUG\", message);\n }\n info(message) {\n this.log(\"INFO\", message);\n }\n warn(message) {\n this.log(\"WARN\", message);\n }\n error(message) {\n this.log(\"ERROR\", message);\n }\n log(level, message) {\n if (LogLevelMap[level] > LogLevelMap[this.level]) {\n return;\n }\n this.printer(this.formatter(level, message));\n }\n}\nfunction loggerService(level, printer, formatter) {\n return new ConsoleLogger(level, printer, formatter);\n}\nclass Ok {\n constructor(value) {\n this.value = value;\n }\n isOk() {\n return true;\n }\n isErr() {\n return !this.isOk();\n }\n}\nclass Err {\n constructor(error) {\n this.error = error;\n }\n isOk() {\n return false;\n }\n isErr() {\n return !this.isOk();\n }\n}\nconst ok = (value) => new Ok(value);\nconst err = (err2) => new Err(err2);\nclass DataNotFoundError extends Error {\n constructor(message) {\n super(message);\n this.name = \"DataNotFoundError\";\n }\n}\nclass DataIncompleteError extends Error {\n constructor(message, partialData) {\n super(message);\n this.partialData = partialData;\n this.name = \"DataIncompleteError\";\n }\n}\nfunction isDataNotFoundError(error) {\n return error instanceof DataNotFoundError || error.name === \"DataNotFoundError\";\n}\nfunction isDataIncompleteError(error) {\n return error instanceof DataIncompleteError || error.name === \"DataIncompleteError\";\n}\nfunction isCacheHitOrError(value) {\n if (value.isErr() && (isDataIncompleteError(value.error) || isDataNotFoundError(value.error))) {\n return false;\n }\n return true;\n}\nfunction isCacheMiss(value) {\n return !isCacheHitOrError(value);\n}\nfunction isResult(value) {\n return value != null && typeof value === \"object\" && \"isOk\" in value && \"isErr\" in value && typeof value.isOk === \"function\" && typeof value.isErr === \"function\" && (value.isOk() === true && value.isErr() === false && \"value\" in value || value.isOk() === false && value.isErr() === true && \"error\" in value);\n}\nfunction setOverlaps(setA, setB) {\n for (const element of setA) {\n if (setB.has(element)) {\n return true;\n }\n }\n return false;\n}\nfunction setDifference(setA, setB) {\n const differenceSet = /* @__PURE__ */ new Set();\n for (const element of setA) {\n if (!setB.has(element)) {\n differenceSet.add(element);\n }\n }\n return differenceSet;\n}\nfunction addAllToSet(targetSet, sourceSet) {\n for (const element of sourceSet) {\n targetSet.add(element);\n }\n}\nconst toTypeScriptSafeIdentifier = (s) => s.length >= 1 ? s[0].replace(/[^$_\\p{ID_Start}]/u, \"_\") + s.slice(1).replace(/[^$\\u200c\\u200d\\p{ID_Continue}]/gu, \"_\") : \"\";\nfunction isSubscribable(obj) {\n return typeof obj === \"object\" && obj !== null && \"subscribe\" in obj && typeof obj.subscribe === \"function\" && \"refresh\" in obj && typeof obj.refresh === \"function\";\n}\nfunction isSubscribableResult(x) {\n if (!isResult(x)) {\n return false;\n }\n return isSubscribable(x.isOk() ? x.value : x.error);\n}\nfunction buildSubscribableResult(result, subscribe, refresh) {\n if (result.isOk()) {\n return ok({ data: result.value, subscribe, refresh });\n } else {\n return err({ failure: result.error, subscribe, refresh });\n }\n}\nfunction resolvedPromiseLike(result) {\n if (isPromiseLike(result)) {\n return result.then((nextResult) => nextResult);\n }\n return {\n then: (onFulfilled, _onRejected) => {\n try {\n return resolvedPromiseLike(onFulfilled(result));\n } catch (e) {\n if (onFulfilled === void 0) {\n return resolvedPromiseLike(result);\n }\n return rejectedPromiseLike(e);\n }\n }\n };\n}\nfunction rejectedPromiseLike(reason) {\n if (isPromiseLike(reason)) {\n return reason.then((nextResult) => nextResult);\n }\n return {\n then: (_onFulfilled, onRejected) => {\n if (typeof onRejected === \"function\") {\n try {\n return resolvedPromiseLike(onRejected(reason));\n } catch (e) {\n return rejectedPromiseLike(e);\n }\n }\n return rejectedPromiseLike(reason);\n }\n };\n}\nfunction isPromiseLike(x) {\n return typeof (x == null ? void 0 : x.then) === \"function\";\n}\nfunction racesync(values) {\n for (const value of values) {\n let settled = void 0;\n if (isPromiseLike(value)) {\n value.then(\n (_) => {\n settled = value;\n },\n (_) => {\n settled = value;\n }\n );\n } else {\n settled = resolvedPromiseLike(value);\n }\n if (settled !== void 0) {\n return settled;\n }\n }\n return Promise.race(values);\n}\nfunction withResolvers() {\n let resolve, reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n}\nfunction deepEquals(x, y) {\n if (x === void 0) {\n return y === void 0;\n } else if (x === null) {\n return y === null;\n } else if (y === null) {\n return x === null;\n } else if (isArray(x)) {\n if (!isArray(y) || x.length !== y.length) {\n return false;\n }\n for (let i = 0; i < x.length; ++i) {\n if (!deepEquals(x[i], y[i])) {\n return false;\n }\n }\n return true;\n } else if (typeof x === \"object\") {\n if (typeof y !== \"object\") {\n return false;\n }\n const xkeys = Object.keys(x);\n const ykeys = Object.keys(y);\n if (xkeys.length !== ykeys.length) {\n return false;\n }\n for (let i = 0; i < xkeys.length; ++i) {\n const key = xkeys[i];\n if (!deepEquals(x[key], y[key])) {\n return false;\n }\n }\n return true;\n }\n return x === y;\n}\nfunction stableJSONStringify(node) {\n if (node && node.toJSON && typeof node.toJSON === \"function\") {\n node = node.toJSON();\n }\n if (node === void 0) {\n return;\n }\n if (typeof node === \"number\") {\n return isFinite(node) ? \"\" + node : \"null\";\n }\n if (typeof node !== \"object\") {\n return stringify(node);\n }\n let i;\n let out;\n if (isArray(node)) {\n out = \"[\";\n for (i = 0; i < node.length; i++) {\n if (i) {\n out += \",\";\n }\n out += stableJSONStringify(node[i]) || \"null\";\n }\n return out + \"]\";\n }\n if (node === null) {\n return \"null\";\n }\n const objKeys = keys(node).sort();\n out = \"\";\n for (i = 0; i < objKeys.length; i++) {\n const key = objKeys[i];\n const value = stableJSONStringify(node[key]);\n if (!value) {\n continue;\n }\n if (out) {\n out += \",\";\n }\n out += stringify(key) + \":\" + value;\n }\n return \"{\" + out + \"}\";\n}\nfunction toError(x) {\n if (x instanceof Error) {\n return x;\n }\n return new Error(typeof x === \"string\" ? x : JSON.stringify(x));\n}\nfunction deepCopy(x) {\n const stringified = stringify(x);\n return stringified ? parse(stringified) : void 0;\n}\nfunction readableStreamToAsyncIterable(stream) {\n if (stream.locked) {\n return err(new Error(\"ReadableStream is already locked\"));\n }\n if (Symbol.asyncIterator in stream) {\n return ok(stream);\n }\n const reader = stream.getReader();\n return ok({\n [Symbol.asyncIterator]: () => ({\n next: async () => {\n try {\n const result = await reader.read();\n if (result.done) {\n try {\n reader.releaseLock();\n } catch {\n }\n return { done: true, value: void 0 };\n }\n return {\n done: false,\n value: result.value\n };\n } catch (e) {\n try {\n reader.releaseLock();\n } catch {\n }\n throw e;\n }\n },\n return: async (value) => {\n try {\n await reader.cancel();\n } catch {\n }\n try {\n reader.releaseLock();\n } catch {\n }\n return { done: true, value };\n },\n throw: async (exception) => {\n try {\n await reader.cancel();\n } catch {\n }\n try {\n reader.releaseLock();\n } catch {\n }\n throw exception;\n }\n })\n });\n}\nfunction satisfies(provided, requested) {\n const providedN = provided.split(\".\").map((s) => parseInt(s));\n const requestedN = requested.split(\".\").map((s) => parseInt(s));\n return providedN[0] === requestedN[0] && providedN[1] >= requestedN[1];\n}\nfunction stringIsVersion(s) {\n const versionParts = s.split(\".\");\n return (versionParts.length === 2 || versionParts.length === 3) && versionParts.every((part) => part.match(/^\\d+$/));\n}\nvar HttpStatusCode = /* @__PURE__ */ ((HttpStatusCode2) => {\n HttpStatusCode2[HttpStatusCode2[\"Ok\"] = 200] = \"Ok\";\n HttpStatusCode2[HttpStatusCode2[\"Created\"] = 201] = \"Created\";\n HttpStatusCode2[HttpStatusCode2[\"NoContent\"] = 204] = \"NoContent\";\n HttpStatusCode2[HttpStatusCode2[\"NotModified\"] = 304] = \"NotModified\";\n HttpStatusCode2[HttpStatusCode2[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpStatusCode2[HttpStatusCode2[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpStatusCode2[HttpStatusCode2[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpStatusCode2[HttpStatusCode2[\"NotFound\"] = 404] = \"NotFound\";\n HttpStatusCode2[HttpStatusCode2[\"ServerError\"] = 500] = \"ServerError\";\n HttpStatusCode2[HttpStatusCode2[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n return HttpStatusCode2;\n})(HttpStatusCode || {});\nfunction getFetchResponseFromAuraError(err2) {\n if (err2.data !== void 0 && err2.data.statusCode !== void 0) {\n let data = {};\n data = err2.data;\n if (err2.id !== void 0) {\n data.id = err2.id;\n }\n return new FetchResponse(data.statusCode, data);\n }\n return new FetchResponse(500, {\n error: err2.message\n });\n}\nasync function coerceResponseToFetchResponse(response) {\n const { status } = response;\n const responseHeaders = {};\n response.headers.forEach((value, key) => {\n responseHeaders[key] = value;\n });\n let responseBody = null;\n if (status !== 204) {\n const contentType = responseHeaders[\"content-type\"];\n responseBody = contentType && contentType.startsWith(\"application/json\") ? await response.json() : await response.text();\n }\n return new FetchResponse(status, responseBody, responseHeaders);\n}\nfunction getStatusText(status) {\n switch (status) {\n case 200:\n return \"OK\";\n case 201:\n return \"Created\";\n case 304:\n return \"Not Modified\";\n case 400:\n return \"Bad Request\";\n case 404:\n return \"Not Found\";\n case 500:\n return \"Server Error\";\n default:\n return `Unexpected HTTP Status Code: ${status}`;\n }\n}\nclass FetchResponse extends Error {\n constructor(status, body, headers) {\n super();\n this.status = status;\n this.body = body;\n this.headers = headers || {};\n this.ok = status >= 200 && this.status <= 299;\n this.statusText = getStatusText(status);\n }\n}\nconst deeplyFrozen = new WeakSetConstructor();\nfunction deepFreeze(value) {\n if (typeof value !== \"object\" || value === null || deeplyFrozen.has(value)) {\n return;\n }\n deeplyFrozen.add(value);\n if (isArray(value)) {\n for (let i = 0, len = value.length; i < len; i += 1) {\n deepFreeze(value[i]);\n }\n } else {\n const keys$1 = keys(value);\n for (let i = 0, len = keys$1.length; i < len; i += 1) {\n deepFreeze(value[keys$1[i]]);\n }\n }\n freeze(value);\n}\nfunction isScalar(value) {\n return typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\" || value === null || value === void 0;\n}\nfunction isScalarObject(value) {\n return Object.values(value).every((value2) => isScalar(value2));\n}\nfunction isScalarArray(value) {\n return value.every((item) => isScalar(item));\n}\nfunction encodeQueryParam(paramName, value, explode) {\n switch (typeof value) {\n case \"string\":\n return [`${paramName}=${encodeURIComponent(value)}`];\n case \"number\":\n case \"boolean\":\n return [`${paramName}=${value}`];\n case \"object\":\n if (value === null) {\n return [];\n }\n if (isArray(value)) {\n if (!isScalarArray(value)) {\n throw new Error(`Unsupported non-scalar array type for ${paramName}`);\n }\n if (explode) {\n return value.map(\n (item) => `${paramName}=${item ? encodeURIComponent(item) : item}`\n );\n }\n return [\n `${paramName}=${value.map((item) => item ? encodeURIComponent(item) : item).join(\",\")}`\n ];\n }\n if (!isScalarObject(value)) {\n throw new Error(`Unsupported non-scalar object type for ${paramName}`);\n }\n if (explode) {\n return entries(value).map(\n ([key, value2]) => `${key}=${value2 ? encodeURIComponent(value2) : value2}`\n );\n }\n return [\n `${paramName}=${entries(value).flat().map((item) => item ? encodeURIComponent(item) : item).join(\",\")}`\n ];\n default:\n return [];\n }\n}\nclass InternalError extends Error {\n constructor(data) {\n super();\n this.data = data;\n this.type = \"internal\";\n }\n}\nclass UserVisibleError extends Error {\n constructor(data) {\n super();\n this.data = data;\n this.type = \"user-visible\";\n }\n}\nfunction isUserVisibleError(error) {\n return error instanceof Error && \"type\" in error && error.type === \"user-visible\";\n}\nfunction logError(error) {\n if (isUserVisibleError(error)) {\n return;\n }\n console.error(\"OneStore Command threw an error that we did not expect\", error);\n}\nfunction applyDecorators(baseCommand, decorators, options) {\n if (!decorators || decorators.length === 0) {\n return baseCommand;\n }\n return decorators.reduce((command, decorator) => decorator(command, options), baseCommand);\n}\nexport {\n isArray as ArrayIsArray,\n indexOf as ArrayPrototypeIndexOf,\n push as ArrayPrototypePush,\n slice as ArrayPrototypeSlice,\n ConsoleLogger,\n DataIncompleteError,\n DataNotFoundError,\n DefaultFileParserLogger,\n Err,\n FetchResponse,\n HttpStatusCode,\n InternalError,\n parse as JSONParse,\n stringify as JSONStringify,\n LogLevelMap,\n create as ObjectCreate,\n entries as ObjectEntries,\n freeze as ObjectFreeze,\n keys as ObjectKeys,\n hasOwnProperty as ObjectPrototypeHasOwnProperty,\n Ok,\n UserVisibleError,\n WeakSetConstructor,\n addAllToSet,\n applyDecorators,\n bfs,\n buildSubscribableResult,\n coerceResponseToFetchResponse,\n deepCopy,\n deepEquals,\n deepFreeze,\n encodeQueryParam,\n err,\n getFetchResponseFromAuraError,\n includes,\n isCacheHitOrError,\n isCacheMiss,\n isDataIncompleteError,\n isDataNotFoundError,\n isPromiseLike,\n isResult,\n isSubscribable,\n isSubscribableResult,\n isUserVisibleError,\n lineFormatter,\n logError,\n loggerService,\n ok,\n racesync,\n readableStreamToAsyncIterable,\n rejectedPromiseLike,\n resolvedPromiseLike,\n satisfies,\n setDifference,\n setOverlaps,\n stableJSONStringify,\n stringIsVersion,\n toError,\n toTypeScriptSafeIdentifier,\n withResolvers\n};\n//# sourceMappingURL=index.js.map\n","/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\n/*!\n * Copyright (c) 2022, Salesforce, Inc.,\n * All rights reserved.\n * For full license text, see the LICENSE.txt file\n */\nconst { stringify, parse } = JSON;\nfunction deepCopy(x) {\n const stringified = stringify(x);\n return stringified ? parse(stringified) : void 0;\n}\nclass DefaultRecordableCache {\n constructor(baseCache) {\n this.baseCache = baseCache;\n this.keysRead = /* @__PURE__ */ new Set();\n this.missingKeysRead = /* @__PURE__ */ new Set();\n this.keysUpdated = /* @__PURE__ */ new Set();\n this.metadataKeysUpdated = /* @__PURE__ */ new Set();\n }\n delete(key) {\n this.keysUpdated.add(key);\n this.baseCache.delete(key);\n }\n get(key, options) {\n this.keysRead.add(key);\n const value = this.baseCache.get(key);\n if (value === void 0) {\n this.missingKeysRead.add(key);\n }\n if (options == null ? void 0 : options.copy) {\n return deepCopy(value);\n }\n return value;\n }\n set(key, value) {\n this.keysUpdated.add(key);\n this.metadataKeysUpdated.add(key);\n this.baseCache.set(key, value);\n }\n setMetadata(key, cacheControlMetadata) {\n this.metadataKeysUpdated.add(key);\n this.baseCache.setMetadata(key, cacheControlMetadata);\n }\n length() {\n return this.baseCache.length();\n }\n keys() {\n return this.baseCache.keys();\n }\n entries() {\n return this.baseCache.entries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass DefaultFilteredCache {\n constructor(baseCache, predicate) {\n this.baseCache = baseCache;\n this.predicate = predicate;\n }\n delete(key) {\n this.baseCache.delete(key);\n }\n get(key, options) {\n const result = this.baseCache.get(key);\n if (result && this.predicate(key, result)) {\n if (options == null ? void 0 : options.copy) {\n return deepCopy(result);\n }\n return result;\n }\n return void 0;\n }\n set(key, value) {\n this.baseCache.set(key, value);\n }\n setMetadata(key, cacheControlMetadata) {\n this.baseCache.setMetadata(key, cacheControlMetadata);\n }\n length() {\n return this.getFilteredKeys().size;\n }\n keys() {\n return this.getFilteredKeys();\n }\n entries() {\n return this.getFilteredEntries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n getFilteredEntries() {\n return this.baseCache.entries().filter(([key, _value]) => {\n return this.get(key);\n });\n }\n getFilteredKeys() {\n const filteredKeySet = /* @__PURE__ */ new Set();\n this.baseCache.keys().forEach((key) => {\n if (this.get(key)) {\n filteredKeySet.add(key);\n }\n });\n return filteredKeySet;\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass FixedTimeWritableCache {\n constructor(baseCache, generatedTime) {\n this.baseCache = baseCache;\n this.generatedTime = generatedTime;\n }\n delete(key) {\n this.baseCache.delete(key);\n }\n get(key, options) {\n return this.baseCache.get(key, options);\n }\n set(key, value) {\n this.baseCache.set(key, {\n ...value,\n metadata: {\n ...value.metadata,\n cacheControl: { ...value.metadata.cacheControl, generatedTime: this.generatedTime }\n }\n });\n }\n setMetadata(key, cacheControlMetadata) {\n this.baseCache.setMetadata(key, {\n ...cacheControlMetadata,\n generatedTime: this.generatedTime\n });\n }\n length() {\n return this.baseCache.length();\n }\n keys() {\n return this.baseCache.keys();\n }\n entries() {\n return this.baseCache.entries();\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass DefaultCache {\n constructor() {\n this.data = {};\n }\n get(key, options) {\n if (options == null ? void 0 : options.copy) {\n return deepCopy(this.data[key]);\n }\n return this.data[key];\n }\n /**\n * Adds the specified key/value to the cache.\n *\n * @param key key at which to store value\n * @param entry value to be stored\n */\n set(key, entry) {\n if (entry.metadata.cacheControl.type === \"no-store\") {\n return;\n }\n this.data[key] = {\n ...entry,\n metadata: {\n ...entry.metadata,\n type: entry.metadata.type || {\n namespace: \"OneStore:Internal\",\n name: \"UnknownType\"\n },\n cacheControl: {\n generatedTime: Date.now() / 1e3,\n ...entry.metadata.cacheControl\n }\n }\n };\n }\n /**\n * Removes the cache entry associated with the specified key.\n *\n * @param key key to be removed from the store\n */\n delete(key) {\n delete this.data[key];\n }\n /**\n * Sets the metadata for the specified key if the key is in cache.\n * If the key doesn't exist, it does nothing.\n *\n * @param key key at which to store metadata\n * @param cacheControlMetadata metadata to be stored\n */\n setMetadata(key, cacheControlMetadata) {\n if (key in this.data) {\n this.data[key].metadata.cacheControl = {\n generatedTime: Date.now() / 1e3,\n ...cacheControlMetadata\n };\n }\n }\n length() {\n return this.keys().size;\n }\n keys() {\n return new Set(Object.keys(this.data));\n }\n entries() {\n return Object.entries(this.data);\n }\n record() {\n return new DefaultRecordableCache(this);\n }\n filter(predicate) {\n return new DefaultFilteredCache(this, predicate);\n }\n buildFixedTimeWritableCache(generatedTime) {\n return new FixedTimeWritableCache(this, generatedTime);\n }\n}\nclass ClearableCache extends DefaultCache {\n clear() {\n this.data = {};\n }\n}\nfunction buildServiceDescriptor() {\n return {\n type: \"cache\",\n version: \"1.0\",\n service: new DefaultCache()\n };\n}\nexport {\n ClearableCache,\n buildServiceDescriptor\n};\n//# sourceMappingURL=index.js.map\n","import {\n resolvedPromiseLike,\n type SyncOrAsync,\n type Result,\n setDifference,\n} from '@conduit-client/utils';\nimport { CacheInclusionPolicyService } from './cache-inclusion-policy';\nimport type { Cache, Key } from '@conduit-client/service-cache/v1';\nimport { buildServiceDescriptor as buildCacheServiceDescriptor } from '@conduit-client/service-cache/v1';\n\n/**\n * Implementation of CacheInclusionPolicy that uses an inclusive\n * L2 durable cache as a second level behind an in memory, synchronous,\n * L1 cache.\n */\nexport abstract class DurableCacheInclusionPolicy extends CacheInclusionPolicyService {\n /**\n * Reads data out of a 2 level inclusive store.\n */\n read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n alreadyRevivedKeys?: Set<Key>;\n }): SyncOrAsync<D> {\n const { l1, readFromL1, alreadyRevivedKeys } = options;\n // eavesdrop on which keys read() accesses\n const recordableCache = l1.record();\n\n // let read() try to get the result\n return readFromL1(recordableCache).then((readResult) => {\n if (readResult.isOk()) {\n return readResult;\n }\n\n const keysToRevive = recordableCache.keysRead;\n\n // if we have already tried to revive all the keys that were read and still can't build then\n // don't go through another revive cycle.\n if (alreadyRevivedKeys && setDifference(keysToRevive, alreadyRevivedKeys).size === 0) {\n return readResult;\n }\n\n // revive missing keys\n return this.revive(keysToRevive, l1).then((revivedKeys) => {\n // if we couldn't revive all the keys then just return the earlier result\n if (setDifference(keysToRevive, revivedKeys).size > 0) {\n return readResult;\n }\n\n // revive found everything we requested, retry the read\n return this.read({ l1, readFromL1, alreadyRevivedKeys: revivedKeys });\n });\n });\n }\n\n /**\n * Writes data to a 2 level inclusive store.\n */\n write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D> {\n const { l1, writeToL1 } = options;\n // eavesdrop on which keys write() accesses\n const tempL1 = buildCacheServiceDescriptor().service.record();\n\n // Write to a temp store to see what keys are ingested\n return writeToL1(tempL1).then(() => {\n // Try to read those keys from the existing store to see what keys are missing and need to be revived\n const keysToReviveRecordableCache = l1.record();\n\n tempL1.keysUpdated.forEach((key: Key) => keysToReviveRecordableCache.get(key));\n tempL1.missingKeysRead.forEach((key: Key) => keysToReviveRecordableCache.get(key));\n // Revive missing keys\n const missingKeys = keysToReviveRecordableCache.missingKeysRead;\n\n const revivePromiseLike =\n missingKeys.size > 0\n ? this.revive(missingKeys, l1).then(() => undefined)\n : resolvedPromiseLike(undefined);\n\n return revivePromiseLike.then(() => {\n // Do real write\n const recordableCache = l1.record();\n\n return writeToL1(recordableCache).then((result) => {\n this.syncToL2Cache(recordableCache.keysUpdated, l1);\n // don't let durable sync issues cause the write to fail\n return result;\n });\n });\n });\n }\n\n /**\n * Revive a set of keys into the L1 cache from another cache level. The\n * cache inclusion policy will use this when any data is missing from the L1 cache\n * in order to try to find data that is requested and might be in other cache levels.\n *\n * @param keys The set of keys to be revived\n * @param l1 The L1 Cache instance to revive the data into\n */\n abstract revive(keys: Set<Key>, l1: Cache): SyncOrAsync<Set<Key>>;\n\n /**\n * Synchronize the data from an L1 cache into the persistent levels of your cache\n * by reading the keys present in keys from the l1Cache, and populating them in the\n * durable cache.\n *\n * @param changedKeys The set of keys to synchronize to the durable cache\n * @param l1 The L1 cache to synchronize the data from.\n */\n abstract syncToL2Cache(changedKeys: Set<Key>, l1: Cache): SyncOrAsync<void>;\n}\n","import type {\n CacheControlMetadata,\n CacheEntry,\n Key,\n FilterPredicate,\n} from '@conduit-client/service-cache/v1';\n\ninterface LogicalOperators<T> {\n $and?: T[];\n $or?: T[];\n $not?: T;\n}\n\nexport type CacheKeyQueryOperands = { $regex: RegExp };\n\nexport type CacheMetadataQueryOperands =\n | LogicalOperators<CacheMetadataQueryOperands>\n | { cacheControlType: { $eq: 'max-age' | 'stale-while-revalidate' | 'no-cache' | 'no-store' } }\n | { maxAge: { $gte?: number; $lte?: number } };\n\nexport type CacheValueQueryOperands = never; // TODO: Implement value-based querying\n\nexport type CacheQuery =\n | LogicalOperators<CacheQuery>\n | { key: CacheKeyQueryOperands }\n | { metadata: CacheMetadataQueryOperands }\n | { value: CacheValueQueryOperands };\n\nexport const isAndQuery = (query: CacheQuery): query is { $and: CacheQuery[] } => '$and' in query;\n\nexport const isOrQuery = (query: CacheQuery): query is { $or: CacheQuery[] } => '$or' in query;\n\nexport const isNotQuery = (query: CacheQuery): query is { $not: CacheQuery } => '$not' in query;\n\nexport const matchesMetadata = (\n metadataQuery: CacheMetadataQueryOperands,\n cacheControl: CacheControlMetadata\n): boolean => {\n if (\n 'cacheControlType' in metadataQuery &&\n cacheControl.type !== metadataQuery.cacheControlType.$eq\n ) {\n return false;\n }\n if ('maxAge' in metadataQuery && cacheControl.type === 'max-age') {\n const maxAge = cacheControl.maxAge ?? 0;\n if (\n (metadataQuery.maxAge.$gte !== undefined && maxAge < metadataQuery.maxAge.$gte) ||\n (metadataQuery.maxAge.$lte !== undefined && maxAge > metadataQuery.maxAge.$lte)\n ) {\n return false;\n }\n }\n return true;\n};\nexport function queryToPredicate(query: CacheQuery): FilterPredicate {\n return (key: Key, entry: CacheEntry<unknown>): boolean => {\n if (!query) return false;\n\n if (isAndQuery(query))\n return query.$and.every((subQuery) => queryToPredicate(subQuery)(key, entry));\n if (isOrQuery(query))\n return query.$or.some((subQuery) => queryToPredicate(subQuery)(key, entry));\n if (isNotQuery(query)) return !queryToPredicate(query.$not)(key, entry);\n\n if ('key' in query) return matchesKey(query.key, key);\n if ('metadata' in query)\n return matchesMetadata(query.metadata, entry.metadata.cacheControl);\n\n if ('value' in query) return false; // TODO: Not implemented\n\n throw new Error('Unknown Query Operation');\n };\n}\nfunction matchesKey(keyQuery: CacheKeyQueryOperands, key: Key): boolean {\n if ('$regex' in keyQuery) {\n return keyQuery.$regex.test(key);\n }\n return false;\n}\n","import type { CacheControlMetadata, CacheEntry } from '@conduit-client/service-cache/v1';\n\nexport type CacheUpdate = {\n type: InvalidateOperation;\n};\nexport type InvalidateOperation = 'invalidate';\n\n/**\n * Processes a cache update operation and determines the appropriate modification.\n *\n * This function analyzes the provided `update` and the `existing` cache entry\n * to determine the necessary update type. It returns one of three possible outcomes:\n *\n * - `{ type: 'entry', entry }`: A full cache entry update, including both value and metadata.\n * - `{ type: 'metadata', metadata }`: A metadata-only update, leaving the value unchanged.\n * - `{ type: 'no-op' }`: No changes are needed, and the cache should remain as is.\n *\n * @param update - The cache update operation to apply.\n * @param existing - The existing cache entry being modified.\n * @returns An object indicating the type of update:\n * - A full cache entry update (`type: 'entry'`)\n * - A metadata-only update (`type: 'metadata'`)\n * - A no-op (`type: 'no-op'`) if no changes are required.\n */\nexport function buildUpdate(\n update: CacheUpdate,\n existing: CacheEntry<unknown>\n):\n | { type: 'entry'; entry: CacheEntry<unknown> }\n | { type: 'metadata'; metadata: CacheControlMetadata }\n | { type: 'no-op' } {\n switch (update.type) {\n case 'invalidate':\n const updatedCacheControl = buildInvalidatedCacheControl(\n existing.metadata.cacheControl\n );\n return updatedCacheControl !== undefined\n ? { type: 'metadata', metadata: updatedCacheControl }\n : { type: 'no-op' };\n\n default:\n throw new Error(`Invalid update operation: ${update.type}`);\n }\n}\n\n/**\n * Builds an updated CacheControlMetadata object that invalidates the cache entry.\n *\n * @param existingCacheControl - The current CacheControlMetadata.\n * @returns A new CacheControlMetadata object with `maxAge` set to `0`, or undefined if no changes are needed.\n */\nfunction buildInvalidatedCacheControl(\n existingCacheControl: CacheControlMetadata\n): CacheControlMetadata | undefined {\n switch (existingCacheControl.type) {\n case 'max-age':\n case 'stale-while-revalidate':\n if (existingCacheControl.maxAge !== 0) {\n return {\n ...existingCacheControl,\n maxAge: 0,\n };\n }\n case 'no-cache':\n case 'no-store':\n }\n\n return undefined; // No-op: no changes\n}\n","import { DeepReadonly, ok, resolvedPromiseLike, Result, SyncOrAsync } from '@conduit-client/utils';\nimport { CacheInclusionPolicyService } from './cache-inclusion-policy';\nimport type { CacheEntry, Key, NamedCacheService, Cache } from '@conduit-client/service-cache/v1';\nimport type { CacheInclusionPolicyServiceDescriptor } from './cache-inclusion-policy';\nimport { CacheQuery, queryToPredicate } from './cache-query';\nimport { buildUpdate, CacheUpdate } from './cache-update';\n\n/**\n * Implementation of CacheInclusionPolicy that uses a single level, in memory,\n * synchronous L1 cache.\n */\nexport class InMemoryCacheInclusionPolicy extends CacheInclusionPolicyService {\n constructor(protected services: NamedCacheService) {\n super();\n }\n\n /**\n * Reads data out of a single level in memory store.\n */\n read<D extends Result<void, unknown>>(options: {\n l1: Cache;\n readFromL1: (l1: Cache) => SyncOrAsync<D>;\n }): SyncOrAsync<D> {\n const { l1, readFromL1 } = options;\n // l1 is all we've got\n return readFromL1(l1);\n }\n\n /**\n * Writes data to a single level in memory store.\n */\n write<D extends Result<void, unknown>>(options: {\n l1: Cache;\n writeToL1: (l1: Cache) => SyncOrAsync<D>;\n }) {\n const { l1, writeToL1 } = options;\n return writeToL1(l1);\n }\n\n /**\n * Finds cache entries that match the given query.\n * Returns an async generator that yields `[key, entry]`.\n */\n async *find(\n query: CacheQuery\n ): AsyncGenerator<[key: Key, value: DeepReadonly<CacheEntry<unknown>>], void, unknown> {\n const cache = this.services.cache;\n const predicate = queryToPredicate(query);\n\n const filteredEntries = cache.filter(predicate).entries();\n for (const entry of filteredEntries) {\n yield entry;\n }\n }\n\n /**\n * Finds and modifies cache entries that match the given query.\n * Extends `find(query)` and returns an async generator of modified keys.\n */\n async *findAndModify(\n query: CacheQuery,\n cacheUpdate: CacheUpdate\n ): AsyncGenerator<Key, void, unknown> {\n const cache = this.services.cache;\n for await (const [key, value] of this.find(query)) {\n const update = buildUpdate(cacheUpdate, value);\n switch (update.type) {\n case 'entry':\n this.write({\n l1: cache,\n writeToL1: (l1) => resolvedPromiseLike(ok(l1.set(key, update.entry))),\n });\n yield key;\n break;\n case 'metadata':\n this.write({\n l1: cache,\n writeToL1: (l1) =>\n resolvedPromiseLike(ok(l1.setMetadata(key, update.metadata))),\n });\n yield key;\n break;\n case 'no-op':\n }\n }\n }\n}\n\n/**\n * Constructs an in-memory-only CacheInclusionPolicy.\n *\n * @returns in-memory-only CacheInclusionPolicy\n */\nexport function buildInMemoryCacheInclusionPolicyService(\n cache: Cache\n): CacheInclusionPolicyServiceDescriptor {\n return {\n service: new InMemoryCacheInclusionPolicy({ cache }),\n type: 'cacheInclusionPolicy',\n version: '1.0',\n };\n}\n"],"names":["buildCacheServiceDescriptor"],"mappings":";;;;;AAYO,MAAe,4BAA4B;AA2ClD;ACvDA;AAAA;AAAA;AAAA;AAAA;AAwHA,MAAM,GAAG;AAAA,EACP,YAAY,OAAO;AACjB,SAAK,QAAQ;AAAA,EACf;AAAA,EACA,OAAO;AACL,WAAO;AAAA,EACT;AAAA,EACA,QAAQ;AACN,WAAO,CAAC,KAAK,KAAI;AAAA,EACnB;AACF;AAYA,MAAM,KAAK,CAAC,UAAU,IAAI,GAAG,KAAK;AAyClC,SAAS,cAAc,MAAM,MAAM;AACjC,QAAM,gBAAgC,oBAAI,IAAG;AAC7C,aAAW,WAAW,MAAM;AAC1B,QAAI,CAAC,KAAK,IAAI,OAAO,GAAG;AACtB,oBAAc,IAAI,OAAO;AAAA,IAC3B;AAAA,EACF;AACA,SAAO;AACT;AAuBA,SAAS,oBAAoB,QAAQ;AACnC,MAAI,cAAc,MAAM,GAAG;AACzB,WAAO,OAAO,KAAK,CAAC,eAAe,UAAU;AAAA,EAC/C;AACA,SAAO;AAAA,IACL,MAAM,CAAC,aAAa,gBAAgB;AAClC,UAAI;AACF,eAAO,oBAAoB,YAAY,MAAM,CAAC;AAAA,MAChD,SAAS,GAAG;AACV,YAAI,gBAAgB,QAAQ;AAC1B,iBAAO,oBAAoB,MAAM;AAAA,QACnC;AACA,eAAO,oBAAoB,CAAC;AAAA,MAC9B;AAAA,IACF;AAAA,EACJ;AACA;AACA,SAAS,oBAAoB,QAAQ;AACnC,MAAI,cAAc,MAAM,GAAG;AACzB,WAAO,OAAO,KAAK,CAAC,eAAe,UAAU;AAAA,EAC/C;AACA,SAAO;AAAA,IACL,MAAM,CAAC,cAAc,eAAe;AAClC,UAAI,OAAO,eAAe,YAAY;AACpC,YAAI;AACF,iBAAO,oBAAoB,WAAW,MAAM,CAAC;AAAA,QAC/C,SAAS,GAAG;AACV,iBAAO,oBAAoB,CAAC;AAAA,QAC9B;AAAA,MACF;AACA,aAAO,oBAAoB,MAAM;AAAA,IACnC;AAAA,EACJ;AACA;AACA,SAAS,cAAc,GAAG;AACxB,SAAO,QAAQ,KAAK,OAAO,SAAS,EAAE,UAAU;AAClD;AC1PA;AAAA;AAAA;AAAA;AAAA;AAKA;AAAA;AAAA;AAAA;AAAA;AAKA,MAAM,EAAE,WAAW,MAAK,IAAK;AAC7B,SAAS,SAAS,GAAG;AACnB,QAAM,cAAc,UAAU,CAAC;AAC/B,SAAO,cAAc,MAAM,WAAW,IAAI;AAC5C;AACA,MAAM,uBAAuB;AAAA,EAC3B,YAAY,WAAW;AACrB,SAAK,YAAY;AACjB,SAAK,WAA2B,oBAAI,IAAG;AACvC,SAAK,kBAAkC,oBAAI,IAAG;AAC9C,SAAK,cAA8B,oBAAI,IAAG;AAC1C,SAAK,sBAAsC,oBAAI,IAAG;AAAA,EACpD;AAAA,EACA,OAAO,KAAK;AACV,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,SAAK,SAAS,IAAI,GAAG;AACrB,UAAM,QAAQ,KAAK,UAAU,IAAI,GAAG;AACpC,QAAI,UAAU,QAAQ;AACpB,WAAK,gBAAgB,IAAI,GAAG;AAAA,IAC9B;AACA,QAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,aAAO,SAAS,KAAK;AAAA,IACvB;AACA,WAAO;AAAA,EACT;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,YAAY,IAAI,GAAG;AACxB,SAAK,oBAAoB,IAAI,GAAG;AAChC,SAAK,UAAU,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,oBAAoB,IAAI,GAAG;AAChC,SAAK,UAAU,YAAY,KAAK,oBAAoB;AAAA,EACtD;AAAA,EACA,SAAS;AACP,WAAO,KAAK,UAAU,OAAM;AAAA,EAC9B;AAAA,EACA,OAAO;AACL,WAAO,KAAK,UAAU,KAAI;AAAA,EAC5B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,UAAU,QAAO;AAAA,EAC/B;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,qBAAqB;AAAA,EACzB,YAAY,WAAW,WAAW;AAChC,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA,EACA,OAAO,KAAK;AACV,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,UAAM,SAAS,KAAK,UAAU,IAAI,GAAG;AACrC,QAAI,UAAU,KAAK,UAAU,KAAK,MAAM,GAAG;AACzC,UAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,eAAO,SAAS,MAAM;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,UAAU,IAAI,KAAK,KAAK;AAAA,EAC/B;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,UAAU,YAAY,KAAK,oBAAoB;AAAA,EACtD;AAAA,EACA,SAAS;AACP,WAAO,KAAK,gBAAe,EAAG;AAAA,EAChC;AAAA,EACA,OAAO;AACL,WAAO,KAAK,gBAAe;AAAA,EAC7B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,mBAAkB;AAAA,EAChC;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,qBAAqB;AACnB,WAAO,KAAK,UAAU,QAAO,EAAG,OAAO,CAAC,CAAC,KAAK,MAAM,MAAM;AACxD,aAAO,KAAK,IAAI,GAAG;AAAA,IACrB,CAAC;AAAA,EACH;AAAA,EACA,kBAAkB;AAChB,UAAM,iBAAiC,oBAAI,IAAG;AAC9C,SAAK,UAAU,KAAI,EAAG,QAAQ,CAAC,QAAQ;AACrC,UAAI,KAAK,IAAI,GAAG,GAAG;AACjB,uBAAe,IAAI,GAAG;AAAA,MACxB;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,uBAAuB;AAAA,EAC3B,YAAY,WAAW,eAAe;AACpC,SAAK,YAAY;AACjB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EACA,OAAO,KAAK;AACV,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,WAAO,KAAK,UAAU,IAAI,KAAK,OAAO;AAAA,EACxC;AAAA,EACA,IAAI,KAAK,OAAO;AACd,SAAK,UAAU,IAAI,KAAK;AAAA,MACtB,GAAG;AAAA,MACH,UAAU;AAAA,QACR,GAAG,MAAM;AAAA,QACT,cAAc,EAAE,GAAG,MAAM,SAAS,cAAc,eAAe,KAAK,cAAa;AAAA,MACzF;AAAA,IACA,CAAK;AAAA,EACH;AAAA,EACA,YAAY,KAAK,sBAAsB;AACrC,SAAK,UAAU,YAAY,KAAK;AAAA,MAC9B,GAAG;AAAA,MACH,eAAe,KAAK;AAAA,IAC1B,CAAK;AAAA,EACH;AAAA,EACA,SAAS;AACP,WAAO,KAAK,UAAU,OAAM;AAAA,EAC9B;AAAA,EACA,OAAO;AACL,WAAO,KAAK,UAAU,KAAI;AAAA,EAC5B;AAAA,EACA,UAAU;AACR,WAAO,KAAK,UAAU,QAAO;AAAA,EAC/B;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AACA,MAAM,aAAa;AAAA,EACjB,cAAc;AACZ,SAAK,OAAO,CAAA;AAAA,EACd;AAAA,EACA,IAAI,KAAK,SAAS;AAChB,QAAI,WAAW,OAAO,SAAS,QAAQ,MAAM;AAC3C,aAAO,SAAS,KAAK,KAAK,GAAG,CAAC;AAAA,IAChC;AACA,WAAO,KAAK,KAAK,GAAG;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,KAAK,OAAO;AACd,QAAI,MAAM,SAAS,aAAa,SAAS,YAAY;AACnD;AAAA,IACF;AACA,SAAK,KAAK,GAAG,IAAI;AAAA,MACf,GAAG;AAAA,MACH,UAAU;AAAA,QACR,GAAG,MAAM;AAAA,QACT,MAAM,MAAM,SAAS,QAAQ;AAAA,UAC3B,WAAW;AAAA,UACX,MAAM;AAAA,QAChB;AAAA,QACQ,cAAc;AAAA,UACZ,eAAe,KAAK,IAAG,IAAK;AAAA,UAC5B,GAAG,MAAM,SAAS;AAAA,QAC5B;AAAA,MACA;AAAA,IACA;AAAA,EACE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,KAAK;AACV,WAAO,KAAK,KAAK,GAAG;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,YAAY,KAAK,sBAAsB;AACrC,QAAI,OAAO,KAAK,MAAM;AACpB,WAAK,KAAK,GAAG,EAAE,SAAS,eAAe;AAAA,QACrC,eAAe,KAAK,IAAG,IAAK;AAAA,QAC5B,GAAG;AAAA,MACX;AAAA,IACI;AAAA,EACF;AAAA,EACA,SAAS;AACP,WAAO,KAAK,KAAI,EAAG;AAAA,EACrB;AAAA,EACA,OAAO;AACL,WAAO,IAAI,IAAI,OAAO,KAAK,KAAK,IAAI,CAAC;AAAA,EACvC;AAAA,EACA,UAAU;AACR,WAAO,OAAO,QAAQ,KAAK,IAAI;AAAA,EACjC;AAAA,EACA,SAAS;AACP,WAAO,IAAI,uBAAuB,IAAI;AAAA,EACxC;AAAA,EACA,OAAO,WAAW;AAChB,WAAO,IAAI,qBAAqB,MAAM,SAAS;AAAA,EACjD;AAAA,EACA,4BAA4B,eAAe;AACzC,WAAO,IAAI,uBAAuB,MAAM,aAAa;AAAA,EACvD;AACF;AAMA,SAAS,yBAAyB;AAChC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS,IAAI,aAAY;AAAA,EAC7B;AACA;ACjPO,MAAe,oCAAoC,4BAA4B;AAAA;AAAA;AAAA;AAAA,EAIlF,KAAsC,SAInB;AACf,UAAM,EAAE,IAAI,YAAY,mBAAA,IAAuB;AAE/C,UAAM,kBAAkB,GAAG,OAAA;AAG3B,WAAO,WAAW,eAAe,EAAE,KAAK,CAAC,eAAe;AACpD,UAAI,WAAW,QAAQ;AACnB,eAAO;AAAA,MACX;AAEA,YAAM,eAAe,gBAAgB;AAIrC,UAAI,sBAAsB,cAAc,cAAc,kBAAkB,EAAE,SAAS,GAAG;AAClF,eAAO;AAAA,MACX;AAGA,aAAO,KAAK,OAAO,cAAc,EAAE,EAAE,KAAK,CAAC,gBAAgB;AAEvD,YAAI,cAAc,cAAc,WAAW,EAAE,OAAO,GAAG;AACnD,iBAAO;AAAA,QACX;AAGA,eAAO,KAAK,KAAK,EAAE,IAAI,YAAY,oBAAoB,aAAa;AAAA,MACxE,CAAC;AAAA,IACL,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAuC,SAGpB;AACf,UAAM,EAAE,IAAI,UAAA,IAAc;AAE1B,UAAM,SAASA,yBAA8B,QAAQ,OAAA;AAGrD,WAAO,UAAU,MAAM,EAAE,KAAK,MAAM;AAEhC,YAAM,8BAA8B,GAAG,OAAA;AAEvC,aAAO,YAAY,QAAQ,CAAC,QAAa,4BAA4B,IAAI,GAAG,CAAC;AAC7E,aAAO,gBAAgB,QAAQ,CAAC,QAAa,4BAA4B,IAAI,GAAG,CAAC;AAEjF,YAAM,cAAc,4BAA4B;AAEhD,YAAM,oBACF,YAAY,OAAO,IACb,KAAK,OAAO,aAAa,EAAE,EAAE,KAAK,MAAM,MAAS,IACjD,oBAAoB,MAAS;AAEvC,aAAO,kBAAkB,KAAK,MAAM;AAEhC,cAAM,kBAAkB,GAAG,OAAA;AAE3B,eAAO,UAAU,eAAe,EAAE,KAAK,CAAC,WAAW;AAC/C,eAAK,cAAc,gBAAgB,aAAa,EAAE;AAElD,iBAAO;AAAA,QACX,CAAC;AAAA,MACL,CAAC;AAAA,IACL,CAAC;AAAA,EACL;AAqBJ;ACrFO,MAAM,aAAa,CAAC,UAAuD,UAAU;AAErF,MAAM,YAAY,CAAC,UAAsD,SAAS;AAElF,MAAM,aAAa,CAAC,UAAqD,UAAU;AAEnF,MAAM,kBAAkB,CAC3B,eACA,iBACU;AACV,MACI,sBAAsB,iBACtB,aAAa,SAAS,cAAc,iBAAiB,KACvD;AACE,WAAO;AAAA,EACX;AACA,MAAI,YAAY,iBAAiB,aAAa,SAAS,WAAW;AAC9D,UAAM,SAAS,aAAa,UAAU;AACtC,QACK,cAAc,OAAO,SAAS,UAAa,SAAS,cAAc,OAAO,QACzE,cAAc,OAAO,SAAS,UAAa,SAAS,cAAc,OAAO,MAC5E;AACE,aAAO;AAAA,IACX;AAAA,EACJ;AACA,SAAO;AACX;AACO,SAAS,iBAAiB,OAAoC;AACjE,SAAO,CAAC,KAAU,UAAwC;AACtD,QAAI,CAAC,MAAO,QAAO;AAEnB,QAAI,WAAW,KAAK;AAChB,aAAO,MAAM,KAAK,MAAM,CAAC,aAAa,iBAAiB,QAAQ,EAAE,KAAK,KAAK,CAAC;AAChF,QAAI,UAAU,KAAK;AACf,aAAO,MAAM,IAAI,KAAK,CAAC,aAAa,iBAAiB,QAAQ,EAAE,KAAK,KAAK,CAAC;AAC9E,QAAI,WAAW,KAAK,EAAG,QAAO,CAAC,iBAAiB,MAAM,IAAI,EAAE,KAAK,KAAK;AAEtE,QAAI,SAAS,MAAO,QAAO,WAAW,MAAM,KAAK,GAAG;AACpD,QAAI,cAAc;AACd,aAAO,gBAAgB,MAAM,UAAU,MAAM,SAAS,YAAY;AAEtE,QAAI,WAAW,MAAO,QAAO;AAE7B,UAAM,IAAI,MAAM,yBAAyB;AAAA,EAC7C;AACJ;AACA,SAAS,WAAW,UAAiC,KAAmB;AACpE,MAAI,YAAY,UAAU;AACtB,WAAO,SAAS,OAAO,KAAK,GAAG;AAAA,EACnC;AACA,SAAO;AACX;ACvDO,SAAS,YACZ,QACA,UAIoB;AACpB,UAAQ,OAAO,MAAA;AAAA,IACX,KAAK;AACD,YAAM,sBAAsB;AAAA,QACxB,SAAS,SAAS;AAAA,MAAA;AAEtB,aAAO,wBAAwB,SACzB,EAAE,MAAM,YAAY,UAAU,oBAAA,IAC9B,EAAE,MAAM,QAAA;AAAA,IAElB;AACI,YAAM,IAAI,MAAM,6BAA6B,OAAO,IAAI,EAAE;AAAA,EAAA;AAEtE;AAQA,SAAS,6BACL,sBACgC;AAChC,UAAQ,qBAAqB,MAAA;AAAA,IACzB,KAAK;AAAA,IACL,KAAK;AACD,UAAI,qBAAqB,WAAW,GAAG;AACnC,eAAO;AAAA,UACH,GAAG;AAAA,UACH,QAAQ;AAAA,QAAA;AAAA,MAEhB;AAAA,EAEC;AAGT,SAAO;AACX;ACzDO,MAAM,qCAAqC,4BAA4B;AAAA,EAC1E,YAAsB,UAA6B;AAC/C,UAAA;AADkB,SAAA,WAAA;AAAA,EAEtB;AAAA;AAAA;AAAA;AAAA,EAKA,KAAsC,SAGnB;AACf,UAAM,EAAE,IAAI,WAAA,IAAe;AAE3B,WAAO,WAAW,EAAE;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAuC,SAGpC;AACC,UAAM,EAAE,IAAI,UAAA,IAAc;AAC1B,WAAO,UAAU,EAAE;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,KACH,OACmF;AACnF,UAAM,QAAQ,KAAK,SAAS;AAC5B,UAAM,YAAY,iBAAiB,KAAK;AAExC,UAAM,kBAAkB,MAAM,OAAO,SAAS,EAAE,QAAA;AAChD,eAAW,SAAS,iBAAiB;AACjC,YAAM;AAAA,IACV;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cACH,OACA,aACkC;AAClC,UAAM,QAAQ,KAAK,SAAS;AAC5B,qBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,GAAG;AAC/C,YAAM,SAAS,YAAY,aAAa,KAAK;AAC7C,cAAQ,OAAO,MAAA;AAAA,QACX,KAAK;AACD,eAAK,MAAM;AAAA,YACP,IAAI;AAAA,YACJ,WAAW,CAAC,OAAO,oBAAoB,GAAG,GAAG,IAAI,KAAK,OAAO,KAAK,CAAC,CAAC;AAAA,UAAA,CACvE;AACD,gBAAM;AACN;AAAA,QACJ,KAAK;AACD,eAAK,MAAM;AAAA,YACP,IAAI;AAAA,YACJ,WAAW,CAAC,OACR,oBAAoB,GAAG,GAAG,YAAY,KAAK,OAAO,QAAQ,CAAC,CAAC;AAAA,UAAA,CACnE;AACD,gBAAM;AACN;AAAA,MACC;AAAA,IAEb;AAAA,EACJ;AACJ;AAOO,SAAS,yCACZ,OACqC;AACrC,SAAO;AAAA,IACH,SAAS,IAAI,6BAA6B,EAAE,OAAO;AAAA,IACnD,MAAM;AAAA,IACN,SAAS;AAAA,EAAA;AAEjB;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@conduit-client/service-cache-inclusion-policy",
3
- "version": "2.0.1",
3
+ "version": "3.0.0",
4
4
  "private": false,
5
5
  "description": "OneStore Cache Inclusion Policy Service definition",
6
6
  "repository": {
@@ -31,8 +31,8 @@
31
31
  "watch": "yarn build --watch"
32
32
  },
33
33
  "dependencies": {
34
- "@conduit-client/service-cache": "2.0.1",
35
- "@conduit-client/utils": "2.0.1"
34
+ "@conduit-client/service-cache": "3.0.0",
35
+ "@conduit-client/utils": "3.0.0"
36
36
  },
37
37
  "volta": {
38
38
  "extends": "../../../../package.json"