@mmstack/resource 19.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +111 -0
- package/fesm2022/mmstack-resource.mjs +1049 -0
- package/fesm2022/mmstack-resource.mjs.map +1 -0
- package/index.d.ts +1 -0
- package/lib/mutation-resource.d.ts +80 -0
- package/lib/public_api.d.ts +3 -0
- package/lib/query-resource.d.ts +92 -0
- package/lib/util/cache/cache.d.ts +177 -0
- package/lib/util/cache/cache.interceptor.d.ts +39 -0
- package/lib/util/cache/index.d.ts +2 -0
- package/lib/util/cache/public_api.d.ts +2 -0
- package/lib/util/circuit-breaker.d.ts +74 -0
- package/lib/util/dedupe.interceptor.d.ts +50 -0
- package/lib/util/equality.d.ts +3 -0
- package/lib/util/has-slow-connection.d.ts +1 -0
- package/lib/util/index.d.ts +9 -0
- package/lib/util/persist.d.ts +3 -0
- package/lib/util/public_api.d.ts +3 -0
- package/lib/util/refresh.d.ts +3 -0
- package/lib/util/retry-on-error.d.ts +6 -0
- package/lib/util/url-with-params.d.ts +2 -0
- package/package.json +42 -0
|
@@ -0,0 +1,1049 @@
|
|
|
1
|
+
import { computed, untracked, InjectionToken, inject, isDevMode, signal, effect, linkedSignal, ResourceStatus, DestroyRef } from '@angular/core';
|
|
2
|
+
import { takeUntilDestroyed, toObservable } from '@angular/core/rxjs-interop';
|
|
3
|
+
import { of, tap, map, finalize, shareReplay, interval, firstValueFrom, combineLatestWith, filter } from 'rxjs';
|
|
4
|
+
import { HttpContextToken, HttpContext, HttpResponse, HttpParams, httpResource, HttpClient } from '@angular/common/http';
|
|
5
|
+
import { mutable, toWritable } from '@mmstack/primitives';
|
|
6
|
+
import { v7 } from 'uuid';
|
|
7
|
+
import { keys, hash, entries } from '@mmstack/object';
|
|
8
|
+
|
|
9
|
+
const ONE_DAY = 1000 * 60 * 60 * 24;
|
|
10
|
+
const ONE_HOUR = 1000 * 60 * 60;
|
|
11
|
+
const DEFAULT_CLEANUP_OPT = {
|
|
12
|
+
type: 'lru',
|
|
13
|
+
maxSize: 200,
|
|
14
|
+
checkInterval: ONE_HOUR,
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* A generic cache implementation that stores data with time-to-live (TTL) and stale-while-revalidate capabilities.
|
|
18
|
+
*
|
|
19
|
+
* @typeParam T - The type of data to be stored in the cache.
|
|
20
|
+
*/
|
|
21
|
+
class Cache {
|
|
22
|
+
ttl;
|
|
23
|
+
staleTime;
|
|
24
|
+
internal = mutable(new Map());
|
|
25
|
+
cleanupOpt;
|
|
26
|
+
/**
|
|
27
|
+
* Creates a new `Cache` instance.
|
|
28
|
+
*
|
|
29
|
+
* @param ttl - The default Time To Live (TTL) for cache entries, in milliseconds. Defaults to one day.
|
|
30
|
+
* @param staleTime - The default duration, in milliseconds, during which a cache entry is considered
|
|
31
|
+
* stale but can still be used while revalidation occurs in the background. Defaults to 1 hour.
|
|
32
|
+
* @param cleanupOpt - Options for configuring the cache cleanup strategy. Defaults to LRU with a
|
|
33
|
+
* `maxSize` of 200 and a `checkInterval` of one hour.
|
|
34
|
+
*/
|
|
35
|
+
constructor(ttl = ONE_DAY, staleTime = ONE_HOUR, cleanupOpt = {
|
|
36
|
+
type: 'lru',
|
|
37
|
+
maxSize: 1000,
|
|
38
|
+
checkInterval: ONE_HOUR,
|
|
39
|
+
}) {
|
|
40
|
+
this.ttl = ttl;
|
|
41
|
+
this.staleTime = staleTime;
|
|
42
|
+
this.cleanupOpt = {
|
|
43
|
+
...DEFAULT_CLEANUP_OPT,
|
|
44
|
+
...cleanupOpt,
|
|
45
|
+
};
|
|
46
|
+
if (this.cleanupOpt.maxSize <= 0)
|
|
47
|
+
throw new Error('maxSize must be greater than 0');
|
|
48
|
+
// cleanup cache based on provided options regularly
|
|
49
|
+
const cleanupInterval = setInterval(() => {
|
|
50
|
+
this.cleanup();
|
|
51
|
+
}, cleanupOpt.checkInterval);
|
|
52
|
+
const destroyId = v7();
|
|
53
|
+
// cleanup if object is garbage collected, this is because the cache can be quite large from a memory standpoint & we dont want all that floating garbage
|
|
54
|
+
const registry = new FinalizationRegistry((id) => {
|
|
55
|
+
if (id === destroyId) {
|
|
56
|
+
clearInterval(cleanupInterval);
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
registry.register(this, destroyId);
|
|
60
|
+
}
|
|
61
|
+
/** @internal */
|
|
62
|
+
getInternal(key) {
|
|
63
|
+
const keySignal = computed(() => key());
|
|
64
|
+
return computed(() => {
|
|
65
|
+
const key = keySignal();
|
|
66
|
+
if (!key)
|
|
67
|
+
return null;
|
|
68
|
+
const found = this.internal().get(key);
|
|
69
|
+
const now = Date.now();
|
|
70
|
+
if (!found || found.expiresAt <= now)
|
|
71
|
+
return null;
|
|
72
|
+
found.useCount++;
|
|
73
|
+
return {
|
|
74
|
+
...found,
|
|
75
|
+
isStale: found.stale <= now,
|
|
76
|
+
};
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Retrieves a cache entry without affecting its usage count (for LRU). This is primarily
|
|
81
|
+
* for internal use or debugging.
|
|
82
|
+
* @internal
|
|
83
|
+
* @param key - The key of the entry to retrieve.
|
|
84
|
+
* @returns The cache entry, or `null` if not found or expired.
|
|
85
|
+
*/
|
|
86
|
+
getUntracked(key) {
|
|
87
|
+
return untracked(this.getInternal(() => key));
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Retrieves a cache entry as a signal.
|
|
91
|
+
*
|
|
92
|
+
* @param key - A function that returns the cache key. The key is a signal, allowing for dynamic keys. If the function returns null the value is also null.
|
|
93
|
+
* @returns A signal that holds the cache entry, or `null` if not found or expired. The signal
|
|
94
|
+
* updates whenever the cache entry changes (e.g., due to revalidation or expiration).
|
|
95
|
+
*/
|
|
96
|
+
get(key) {
|
|
97
|
+
return this.getInternal(key);
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Stores a value in the cache.
|
|
101
|
+
*
|
|
102
|
+
* @param key - The key under which to store the value.
|
|
103
|
+
* @param value - The value to store.
|
|
104
|
+
* @param staleTime - (Optional) The stale time for this entry, in milliseconds. Overrides the default `staleTime`.
|
|
105
|
+
* @param ttl - (Optional) The TTL for this entry, in milliseconds. Overrides the default `ttl`.
|
|
106
|
+
*/
|
|
107
|
+
store(key, value, staleTime = this.staleTime, ttl = this.ttl) {
|
|
108
|
+
const entry = this.getUntracked(key);
|
|
109
|
+
if (entry) {
|
|
110
|
+
clearTimeout(entry.timeout); // stop invalidation
|
|
111
|
+
}
|
|
112
|
+
const prevCount = entry?.useCount ?? 0;
|
|
113
|
+
// ttl cannot be less than staleTime
|
|
114
|
+
if (ttl < staleTime)
|
|
115
|
+
staleTime = ttl;
|
|
116
|
+
const now = Date.now();
|
|
117
|
+
this.internal.mutate((map) => {
|
|
118
|
+
map.set(key, {
|
|
119
|
+
value,
|
|
120
|
+
created: entry?.created ?? now,
|
|
121
|
+
useCount: prevCount + 1,
|
|
122
|
+
stale: now + staleTime,
|
|
123
|
+
expiresAt: now + ttl,
|
|
124
|
+
timeout: setTimeout(() => this.invalidate(key), ttl),
|
|
125
|
+
});
|
|
126
|
+
return map;
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Invalidates (removes) a cache entry.
|
|
131
|
+
*
|
|
132
|
+
* @param key - The key of the entry to invalidate.
|
|
133
|
+
*/
|
|
134
|
+
invalidate(key) {
|
|
135
|
+
const entry = this.getUntracked(key);
|
|
136
|
+
if (!entry)
|
|
137
|
+
return;
|
|
138
|
+
clearTimeout(entry.timeout);
|
|
139
|
+
this.internal.mutate((map) => {
|
|
140
|
+
map.delete(key);
|
|
141
|
+
return map;
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
/** @internal */
|
|
145
|
+
cleanup() {
|
|
146
|
+
if (untracked(this.internal).size <= this.cleanupOpt.maxSize)
|
|
147
|
+
return;
|
|
148
|
+
const sorted = Array.from(untracked(this.internal).entries()).toSorted((a, b) => {
|
|
149
|
+
if (this.cleanupOpt.type === 'lru') {
|
|
150
|
+
return a[1].useCount - b[1].useCount; // least used first
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
return a[1].created - b[1].created; // oldest first
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
const keepCount = Math.floor(this.cleanupOpt.maxSize / 2);
|
|
157
|
+
const removed = sorted.slice(0, sorted.length - keepCount);
|
|
158
|
+
const keep = sorted.slice(removed.length, sorted.length);
|
|
159
|
+
removed.forEach(([, e]) => {
|
|
160
|
+
clearTimeout(e.timeout);
|
|
161
|
+
});
|
|
162
|
+
this.internal.set(new Map(keep));
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
const CLIENT_CACHE_TOKEN = new InjectionToken('INTERNAL_CLIENT_CACHE');
|
|
166
|
+
/**
|
|
167
|
+
* Provides the instance of the QueryCache for queryResource. This should probably be called
|
|
168
|
+
* in your application's root configuration, but can also be overriden with component/module providers.
|
|
169
|
+
*
|
|
170
|
+
* @param options - Optional configuration options for the cache.
|
|
171
|
+
* @returns An Angular `Provider` for the cache.
|
|
172
|
+
*
|
|
173
|
+
* @example
|
|
174
|
+
* // In your app.config.ts or AppModule providers:
|
|
175
|
+
*
|
|
176
|
+
* import { provideQueryCache } from './your-cache';
|
|
177
|
+
*
|
|
178
|
+
* export const appConfig: ApplicationConfig = {
|
|
179
|
+
* providers: [
|
|
180
|
+
* provideQueryCache({
|
|
181
|
+
* ttl: 60000, // Default TTL of 60 seconds
|
|
182
|
+
* staleTime: 30000, // Default staleTime of 30 seconds
|
|
183
|
+
* }),
|
|
184
|
+
* // ... other providers
|
|
185
|
+
* ]
|
|
186
|
+
* };
|
|
187
|
+
*/
|
|
188
|
+
function provideQueryCache(opt) {
|
|
189
|
+
return {
|
|
190
|
+
provide: CLIENT_CACHE_TOKEN,
|
|
191
|
+
useValue: new Cache(opt?.ttl, opt?.staleTime, opt?.cleanup),
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
class NoopCache extends Cache {
|
|
195
|
+
store(_, __, ___ = super.staleTime, ____ = super.ttl) {
|
|
196
|
+
// noop
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* Injects the `QueryCache` instance that is used within queryResource.
|
|
201
|
+
* Allows for direct modification of cached data, but is mostly meant for internal use.
|
|
202
|
+
*
|
|
203
|
+
* @param injector - (Optional) The injector to use. If not provided, the current
|
|
204
|
+
* injection context is used.
|
|
205
|
+
* @returns The `QueryCache` instance.
|
|
206
|
+
*
|
|
207
|
+
* @example
|
|
208
|
+
* // In your component or service:
|
|
209
|
+
*
|
|
210
|
+
* import { injectQueryCache } from './your-cache';
|
|
211
|
+
*
|
|
212
|
+
* constructor() {
|
|
213
|
+
* const cache = injectQueryCache();
|
|
214
|
+
*
|
|
215
|
+
* const myData = cache.get(() => 'my-data-key');
|
|
216
|
+
* if (myData() !== null) {
|
|
217
|
+
* // ... use cached data ...
|
|
218
|
+
* }
|
|
219
|
+
* }
|
|
220
|
+
*/
|
|
221
|
+
function injectQueryCache(injector) {
|
|
222
|
+
const cache = injector
|
|
223
|
+
? injector.get(CLIENT_CACHE_TOKEN, null, {
|
|
224
|
+
optional: true,
|
|
225
|
+
})
|
|
226
|
+
: inject(CLIENT_CACHE_TOKEN, {
|
|
227
|
+
optional: true,
|
|
228
|
+
});
|
|
229
|
+
if (!cache) {
|
|
230
|
+
if (isDevMode())
|
|
231
|
+
throw new Error('Cache not provided, please add provideQueryCache() to providers array');
|
|
232
|
+
else
|
|
233
|
+
return new NoopCache();
|
|
234
|
+
}
|
|
235
|
+
return cache;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const CACHE_CONTEXT = new HttpContextToken(() => ({
|
|
239
|
+
cache: false,
|
|
240
|
+
}));
|
|
241
|
+
function setCacheContext(ctx = new HttpContext(), opt) {
|
|
242
|
+
return ctx.set(CACHE_CONTEXT, { ...opt, cache: true });
|
|
243
|
+
}
|
|
244
|
+
function getCacheContext(ctx) {
|
|
245
|
+
return ctx.get(CACHE_CONTEXT);
|
|
246
|
+
}
|
|
247
|
+
function parseCacheControlHeader(req) {
|
|
248
|
+
const header = req.headers.get('Cache-Control');
|
|
249
|
+
let sMaxAge = null;
|
|
250
|
+
const directives = {
|
|
251
|
+
noStore: false,
|
|
252
|
+
noCache: false,
|
|
253
|
+
mustRevalidate: false,
|
|
254
|
+
immutable: false,
|
|
255
|
+
maxAge: null,
|
|
256
|
+
staleWhileRevalidate: null,
|
|
257
|
+
};
|
|
258
|
+
if (!header)
|
|
259
|
+
return directives;
|
|
260
|
+
const parts = header.split(',');
|
|
261
|
+
for (const part of parts) {
|
|
262
|
+
const [unparsedKey, value] = part.trim().split('=');
|
|
263
|
+
const key = unparsedKey.trim().toLowerCase();
|
|
264
|
+
switch (key) {
|
|
265
|
+
case 'no-store':
|
|
266
|
+
directives.noStore = true;
|
|
267
|
+
break;
|
|
268
|
+
case 'no-cache':
|
|
269
|
+
directives.noCache = true;
|
|
270
|
+
break;
|
|
271
|
+
case 'must-revalidate':
|
|
272
|
+
case 'proxy-revalidate':
|
|
273
|
+
directives.mustRevalidate = true;
|
|
274
|
+
break;
|
|
275
|
+
case 'immutable':
|
|
276
|
+
directives.immutable = true;
|
|
277
|
+
break;
|
|
278
|
+
case 'max-age': {
|
|
279
|
+
if (!value)
|
|
280
|
+
break;
|
|
281
|
+
const parsedValue = parseInt(value, 10);
|
|
282
|
+
if (!isNaN(parsedValue))
|
|
283
|
+
directives.maxAge = parsedValue;
|
|
284
|
+
break;
|
|
285
|
+
}
|
|
286
|
+
case 's-max-age': {
|
|
287
|
+
if (!value)
|
|
288
|
+
break;
|
|
289
|
+
const parsedValue = parseInt(value, 10);
|
|
290
|
+
if (!isNaN(parsedValue))
|
|
291
|
+
sMaxAge = parsedValue;
|
|
292
|
+
break;
|
|
293
|
+
}
|
|
294
|
+
case 'stale-while-revalidate': {
|
|
295
|
+
if (!value)
|
|
296
|
+
break;
|
|
297
|
+
const parsedValue = parseInt(value, 10);
|
|
298
|
+
if (!isNaN(parsedValue))
|
|
299
|
+
directives.staleWhileRevalidate = parsedValue;
|
|
300
|
+
break;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
// s-max-age takes precedence over max-age
|
|
305
|
+
if (sMaxAge !== null)
|
|
306
|
+
directives.maxAge = sMaxAge;
|
|
307
|
+
// if no store nothing else is relevant
|
|
308
|
+
if (directives.noStore)
|
|
309
|
+
return {
|
|
310
|
+
noStore: true,
|
|
311
|
+
noCache: false,
|
|
312
|
+
mustRevalidate: false,
|
|
313
|
+
immutable: false,
|
|
314
|
+
maxAge: null,
|
|
315
|
+
staleWhileRevalidate: null,
|
|
316
|
+
};
|
|
317
|
+
// max age does not apply to immutable resources
|
|
318
|
+
if (directives.immutable)
|
|
319
|
+
return {
|
|
320
|
+
...directives,
|
|
321
|
+
maxAge: null,
|
|
322
|
+
};
|
|
323
|
+
return directives;
|
|
324
|
+
}
|
|
325
|
+
function resolveTimings(cacheControl, staleTime, ttl) {
|
|
326
|
+
const timings = {
|
|
327
|
+
staleTime,
|
|
328
|
+
ttl,
|
|
329
|
+
};
|
|
330
|
+
if (cacheControl.immutable)
|
|
331
|
+
return {
|
|
332
|
+
staleTime: Infinity,
|
|
333
|
+
ttl: Infinity,
|
|
334
|
+
};
|
|
335
|
+
// if no-cache is set, we must always revalidate
|
|
336
|
+
if (cacheControl.noCache || cacheControl.mustRevalidate)
|
|
337
|
+
timings.staleTime = 0;
|
|
338
|
+
if (cacheControl.staleWhileRevalidate !== null)
|
|
339
|
+
timings.staleTime = cacheControl.staleWhileRevalidate;
|
|
340
|
+
if (cacheControl.maxAge !== null)
|
|
341
|
+
timings.ttl = cacheControl.maxAge * 1000;
|
|
342
|
+
// if stale-while-revalidate is set, we must revalidate after that time at the latest, but we can still serve the stale data
|
|
343
|
+
if (cacheControl.staleWhileRevalidate !== null) {
|
|
344
|
+
const ms = cacheControl.staleWhileRevalidate * 1000;
|
|
345
|
+
if (timings.staleTime === undefined || timings.staleTime > ms)
|
|
346
|
+
timings.staleTime = ms;
|
|
347
|
+
}
|
|
348
|
+
return timings;
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Creates an `HttpInterceptorFn` that implements caching for HTTP requests. This interceptor
|
|
352
|
+
* checks for a caching configuration in the request's `HttpContext` (internally set by the queryResource).
|
|
353
|
+
* If caching is enabled, it attempts to retrieve responses from the cache. If a cached response
|
|
354
|
+
* is found and is not stale, it's returned directly. If the cached response is stale, it's returned,
|
|
355
|
+
* and a background revalidation request is made. If no cached response is found, the request
|
|
356
|
+
* is made to the server, and the response is cached according to the configured TTL and staleness.
|
|
357
|
+
* The interceptor also respects `Cache-Control` headers from the server.
|
|
358
|
+
*
|
|
359
|
+
* @param allowedMethods - An array of HTTP methods for which caching should be enabled.
|
|
360
|
+
* Defaults to `['GET', 'HEAD', 'OPTIONS']`.
|
|
361
|
+
*
|
|
362
|
+
* @returns An `HttpInterceptorFn` that implements the caching logic.
|
|
363
|
+
*
|
|
364
|
+
* @example
|
|
365
|
+
* // In your app.config.ts or module providers:
|
|
366
|
+
*
|
|
367
|
+
* import { provideHttpClient, withInterceptors } from '@angular/common/http';
|
|
368
|
+
* import { createCacheInterceptor } from '@mmstack/resource';
|
|
369
|
+
*
|
|
370
|
+
* export const appConfig: ApplicationConfig = {
|
|
371
|
+
* providers: [
|
|
372
|
+
* provideHttpClient(withInterceptors([createCacheInterceptor()])),
|
|
373
|
+
* // ... other providers
|
|
374
|
+
* ],
|
|
375
|
+
* };
|
|
376
|
+
*/
|
|
377
|
+
function createCacheInterceptor(allowedMethods = ['GET', 'HEAD', 'OPTIONS']) {
|
|
378
|
+
const CACHE_METHODS = new Set(allowedMethods);
|
|
379
|
+
return (req, next) => {
|
|
380
|
+
const cache = injectQueryCache();
|
|
381
|
+
if (!CACHE_METHODS.has(req.method))
|
|
382
|
+
return next(req);
|
|
383
|
+
const opt = getCacheContext(req.context);
|
|
384
|
+
if (!opt.cache)
|
|
385
|
+
return next(req);
|
|
386
|
+
const key = opt.key ?? req.urlWithParams;
|
|
387
|
+
const entry = cache.getUntracked(key); // null if expired or not found
|
|
388
|
+
// If the entry is not stale, return it
|
|
389
|
+
if (entry && !entry.isStale)
|
|
390
|
+
return of(entry.value);
|
|
391
|
+
// resource itself handles case of showing stale data...the request must process as this will "refresh said data"
|
|
392
|
+
const eTag = entry?.value.headers.get('ETag');
|
|
393
|
+
const lastModified = entry?.value.headers.get('Last-Modified');
|
|
394
|
+
if (eTag) {
|
|
395
|
+
req = req.clone({ setHeaders: { 'If-None-Match': eTag } });
|
|
396
|
+
}
|
|
397
|
+
if (lastModified) {
|
|
398
|
+
req = req.clone({ setHeaders: { 'If-Modified-Since': lastModified } });
|
|
399
|
+
}
|
|
400
|
+
return next(req).pipe(tap((event) => {
|
|
401
|
+
if (event instanceof HttpResponse && event.ok) {
|
|
402
|
+
const cacheControl = parseCacheControlHeader(event);
|
|
403
|
+
if (cacheControl.noStore)
|
|
404
|
+
return;
|
|
405
|
+
const { staleTime, ttl } = resolveTimings(cacheControl, opt.staleTime, opt.ttl);
|
|
406
|
+
cache.store(key, event, staleTime, ttl);
|
|
407
|
+
}
|
|
408
|
+
}), map((event) => {
|
|
409
|
+
// handle 304 responses due to eTag/last-modified
|
|
410
|
+
if (event instanceof HttpResponse && event.status === 304 && entry) {
|
|
411
|
+
return entry.value;
|
|
412
|
+
}
|
|
413
|
+
return event;
|
|
414
|
+
}));
|
|
415
|
+
};
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
/** @internal */
|
|
419
|
+
function internalCeateCircuitBreaker(treshold = 5, resetTimeout = 30000) {
|
|
420
|
+
const halfOpen = signal(false);
|
|
421
|
+
const failureCount = signal(0);
|
|
422
|
+
const status = computed(() => {
|
|
423
|
+
if (failureCount() >= treshold)
|
|
424
|
+
return 'CLOSED';
|
|
425
|
+
return halfOpen() ? 'HALF_OPEN' : 'OPEN';
|
|
426
|
+
});
|
|
427
|
+
const isClosed = computed(() => status() === 'CLOSED');
|
|
428
|
+
const success = () => {
|
|
429
|
+
failureCount.set(0);
|
|
430
|
+
halfOpen.set(false);
|
|
431
|
+
};
|
|
432
|
+
const tryOnce = () => {
|
|
433
|
+
if (!untracked(isClosed))
|
|
434
|
+
return;
|
|
435
|
+
halfOpen.set(true);
|
|
436
|
+
failureCount.set(treshold - 1);
|
|
437
|
+
};
|
|
438
|
+
const effectRef = effect((cleanup) => {
|
|
439
|
+
if (!isClosed())
|
|
440
|
+
return;
|
|
441
|
+
const timeout = setTimeout(tryOnce, resetTimeout);
|
|
442
|
+
return cleanup(() => clearTimeout(timeout));
|
|
443
|
+
});
|
|
444
|
+
const fail = () => {
|
|
445
|
+
failureCount.set(failureCount() + 1);
|
|
446
|
+
halfOpen.set(false);
|
|
447
|
+
};
|
|
448
|
+
return {
|
|
449
|
+
status,
|
|
450
|
+
isClosed,
|
|
451
|
+
fail,
|
|
452
|
+
success,
|
|
453
|
+
halfOpen: tryOnce,
|
|
454
|
+
destroy: () => effectRef.destroy(),
|
|
455
|
+
};
|
|
456
|
+
}
|
|
457
|
+
/** @internal */
|
|
458
|
+
function createNeverBrokenCircuitBreaker() {
|
|
459
|
+
return {
|
|
460
|
+
isClosed: computed(() => false),
|
|
461
|
+
status: signal('OPEN'),
|
|
462
|
+
fail: () => {
|
|
463
|
+
// noop
|
|
464
|
+
},
|
|
465
|
+
success: () => {
|
|
466
|
+
// noop
|
|
467
|
+
},
|
|
468
|
+
halfOpen: () => {
|
|
469
|
+
// noop
|
|
470
|
+
},
|
|
471
|
+
destroy: () => {
|
|
472
|
+
// noop
|
|
473
|
+
},
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
/**
|
|
477
|
+
* Creates a circuit breaker instance.
|
|
478
|
+
*
|
|
479
|
+
* @param options - Configuration options for the circuit breaker. Can be:
|
|
480
|
+
* - `undefined`: Creates a "no-op" circuit breaker that is always open (never trips).
|
|
481
|
+
* - `true`: Creates a circuit breaker with default settings (threshold: 5, timeout: 30000ms).
|
|
482
|
+
* - `CircuitBreaker`: Reuses an existing `CircuitBreaker` instance.
|
|
483
|
+
* - `{ threshold?: number; timeout?: number; }`: Creates a circuit breaker with the specified threshold and timeout.
|
|
484
|
+
*
|
|
485
|
+
* @returns A `CircuitBreaker` instance.
|
|
486
|
+
*
|
|
487
|
+
* @example
|
|
488
|
+
* // Create a circuit breaker with default settings:
|
|
489
|
+
* const breaker = createCircuitBreaker();
|
|
490
|
+
*
|
|
491
|
+
* // Create a circuit breaker with custom settings:
|
|
492
|
+
* const customBreaker = createCircuitBreaker({ threshold: 10, timeout: 60000 });
|
|
493
|
+
*
|
|
494
|
+
* // Share a single circuit breaker instance across multiple resources:
|
|
495
|
+
* const sharedBreaker = createCircuitBreaker();
|
|
496
|
+
* const resource1 = queryResource(..., { circuitBreaker: sharedBreaker });
|
|
497
|
+
* const resource2 = mutationResource(..., { circuitBreaker: sharedBreaker });
|
|
498
|
+
*/
|
|
499
|
+
function createCircuitBreaker(opt) {
|
|
500
|
+
if (opt === false)
|
|
501
|
+
return createNeverBrokenCircuitBreaker();
|
|
502
|
+
if (typeof opt === 'object' && 'isClosed' in opt)
|
|
503
|
+
return opt;
|
|
504
|
+
return internalCeateCircuitBreaker(opt?.treshold, opt?.timeout);
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// Heavily inspired by: https://dev.to/kasual1/request-deduplication-in-angular-3pd8
|
|
508
|
+
const NO_DEDUPE = new HttpContextToken(() => false);
|
|
509
|
+
/**
|
|
510
|
+
* Disables request deduplication for a specific HTTP request.
|
|
511
|
+
*
|
|
512
|
+
* @param ctx - The `HttpContext` to modify. If not provided, a new `HttpContext` is created.
|
|
513
|
+
* @returns The modified `HttpContext` with the `NO_DEDUPE` token set to `true`.
|
|
514
|
+
*
|
|
515
|
+
* @example
|
|
516
|
+
* // Disable deduplication for a specific POST request:
|
|
517
|
+
* const context = noDedupe();
|
|
518
|
+
* this.http.post('/api/data', payload, { context }).subscribe(...);
|
|
519
|
+
*
|
|
520
|
+
* // Disable deduplication, modifying an existing context:
|
|
521
|
+
* let context = new HttpContext();
|
|
522
|
+
* context = noDedupe(context);
|
|
523
|
+
* this.http.post('/api/data', payload, { context }).subscribe(...);
|
|
524
|
+
*/
|
|
525
|
+
function noDedupe(ctx = new HttpContext()) {
|
|
526
|
+
return ctx.set(NO_DEDUPE, true);
|
|
527
|
+
}
|
|
528
|
+
/**
|
|
529
|
+
* Creates an `HttpInterceptorFn` that deduplicates identical HTTP requests.
|
|
530
|
+
* If multiple identical requests (same URL and parameters) are made concurrently,
|
|
531
|
+
* only the first request will be sent to the server. Subsequent requests will
|
|
532
|
+
* receive the response from the first request.
|
|
533
|
+
*
|
|
534
|
+
* @param allowed - An array of HTTP methods for which deduplication should be enabled.
|
|
535
|
+
* Defaults to `['GET', 'DELETE', 'HEAD', 'OPTIONS']`.
|
|
536
|
+
*
|
|
537
|
+
* @returns An `HttpInterceptorFn` that implements the request deduplication logic.
|
|
538
|
+
*
|
|
539
|
+
* @example
|
|
540
|
+
* // In your app.config.ts or module providers:
|
|
541
|
+
* import { provideHttpClient, withInterceptors } from '@angular/common/http';
|
|
542
|
+
* import { createDedupeRequestsInterceptor } from './your-dedupe-interceptor';
|
|
543
|
+
*
|
|
544
|
+
* export const appConfig: ApplicationConfig = {
|
|
545
|
+
* providers: [
|
|
546
|
+
* provideHttpClient(withInterceptors([createDedupeRequestsInterceptor()])),
|
|
547
|
+
* // ... other providers
|
|
548
|
+
* ],
|
|
549
|
+
* };
|
|
550
|
+
*
|
|
551
|
+
* // You can also specify which methods should be deduped
|
|
552
|
+
* export const appConfig: ApplicationConfig = {
|
|
553
|
+
* providers: [
|
|
554
|
+
* provideHttpClient(withInterceptors([createDedupeRequestsInterceptor(['GET'])])), // only dedupe GET calls
|
|
555
|
+
* // ... other providers
|
|
556
|
+
* ],
|
|
557
|
+
* };
|
|
558
|
+
*/
|
|
559
|
+
function createDedupeRequestsInterceptor(allowed = ['GET', 'DELETE', 'HEAD', 'OPTIONS']) {
|
|
560
|
+
const inFlight = new Map();
|
|
561
|
+
const DEDUPE_METHODS = new Set(allowed);
|
|
562
|
+
return (req, next) => {
|
|
563
|
+
if (!DEDUPE_METHODS.has(req.method) || req.context.get(NO_DEDUPE))
|
|
564
|
+
return next(req);
|
|
565
|
+
const found = inFlight.get(req.urlWithParams);
|
|
566
|
+
if (found)
|
|
567
|
+
return found;
|
|
568
|
+
const request = next(req).pipe(finalize(() => inFlight.delete(req.urlWithParams)), shareReplay());
|
|
569
|
+
inFlight.set(req.urlWithParams, request);
|
|
570
|
+
return request;
|
|
571
|
+
};
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
function equalTransferCache(a, b) {
|
|
575
|
+
if (!a && !b)
|
|
576
|
+
return true;
|
|
577
|
+
if (!a || !b)
|
|
578
|
+
return false;
|
|
579
|
+
if (typeof a !== typeof b)
|
|
580
|
+
return false;
|
|
581
|
+
if (typeof a === 'boolean' || typeof b === 'boolean')
|
|
582
|
+
return a === b;
|
|
583
|
+
if (!a.includeHeaders && !b.includeHeaders)
|
|
584
|
+
return true;
|
|
585
|
+
if (!a.includeHeaders || !b.includeHeaders)
|
|
586
|
+
return false;
|
|
587
|
+
if (a.includeHeaders.length !== b.includeHeaders.length)
|
|
588
|
+
return false;
|
|
589
|
+
if (a.includeHeaders.length === 0)
|
|
590
|
+
return true;
|
|
591
|
+
const aSet = new Set(a.includeHeaders ?? []);
|
|
592
|
+
return b.includeHeaders.every((header) => aSet.has(header));
|
|
593
|
+
}
|
|
594
|
+
function equalParams(a, b) {
|
|
595
|
+
if (!a && !b)
|
|
596
|
+
return true;
|
|
597
|
+
if (!a || !b)
|
|
598
|
+
return false;
|
|
599
|
+
const aKeys = keys(a);
|
|
600
|
+
const bKeys = keys(b);
|
|
601
|
+
if (aKeys.length !== bKeys.length)
|
|
602
|
+
return false;
|
|
603
|
+
return aKeys.every((key) => a[key] === b[key]);
|
|
604
|
+
}
|
|
605
|
+
function equalBody(a, b) {
|
|
606
|
+
if (!a && !b)
|
|
607
|
+
return true;
|
|
608
|
+
if (!a || !b)
|
|
609
|
+
return false;
|
|
610
|
+
return hash(a) === hash(b);
|
|
611
|
+
}
|
|
612
|
+
function equalHeaders(a, b) {
|
|
613
|
+
if (!a && !b)
|
|
614
|
+
return true;
|
|
615
|
+
if (!a || !b)
|
|
616
|
+
return false;
|
|
617
|
+
const aKeys = keys(a);
|
|
618
|
+
const bKeys = keys(b);
|
|
619
|
+
if (aKeys.length !== bKeys.length)
|
|
620
|
+
return false;
|
|
621
|
+
return aKeys.every((key) => a[key] === b[key]);
|
|
622
|
+
}
|
|
623
|
+
function equalContext(a, b) {
|
|
624
|
+
if (!a && !b)
|
|
625
|
+
return true;
|
|
626
|
+
if (!a || !b)
|
|
627
|
+
return false;
|
|
628
|
+
const aKeys = keys(a);
|
|
629
|
+
const bKeys = keys(b);
|
|
630
|
+
if (aKeys.length !== bKeys.length)
|
|
631
|
+
return false;
|
|
632
|
+
return aKeys.every((key) => a[key] === b[key]);
|
|
633
|
+
}
|
|
634
|
+
function createEqualRequest(equalResult) {
|
|
635
|
+
const eqb = equalResult ?? equalBody;
|
|
636
|
+
return (a, b) => {
|
|
637
|
+
if (!a && !b)
|
|
638
|
+
return true;
|
|
639
|
+
if (!a || !b)
|
|
640
|
+
return false;
|
|
641
|
+
if (a.url !== b.url)
|
|
642
|
+
return false;
|
|
643
|
+
if (a.method !== b.method)
|
|
644
|
+
return false;
|
|
645
|
+
if (!equalParams(a.params, b.params))
|
|
646
|
+
return false;
|
|
647
|
+
if (!equalHeaders(a.headers, b.headers))
|
|
648
|
+
return false;
|
|
649
|
+
if (!eqb(a.body, b.body))
|
|
650
|
+
return false;
|
|
651
|
+
if (!equalContext(a.context, b.context))
|
|
652
|
+
return false;
|
|
653
|
+
if (a.withCredentials !== b.withCredentials)
|
|
654
|
+
return false;
|
|
655
|
+
if (a.reportProgress !== b.reportProgress)
|
|
656
|
+
return false;
|
|
657
|
+
if (!equalTransferCache(a.transferCache, b.transferCache))
|
|
658
|
+
return false;
|
|
659
|
+
return true;
|
|
660
|
+
};
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
function hasSlowConnection() {
|
|
664
|
+
if (window &&
|
|
665
|
+
'navigator' in window &&
|
|
666
|
+
'connection' in window.navigator &&
|
|
667
|
+
typeof window.navigator.connection === 'object' &&
|
|
668
|
+
!!window.navigator.connection &&
|
|
669
|
+
'effectiveType' in window.navigator.connection &&
|
|
670
|
+
typeof window.navigator.connection.effectiveType === 'string')
|
|
671
|
+
return window.navigator.connection.effectiveType.endsWith('2g');
|
|
672
|
+
return false;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
function presist(value, usePrevious, equal) {
|
|
676
|
+
// linkedSignal allows us to access previous source value
|
|
677
|
+
const persisted = linkedSignal({
|
|
678
|
+
source: () => ({
|
|
679
|
+
value: value(),
|
|
680
|
+
usePrevious: usePrevious(),
|
|
681
|
+
}),
|
|
682
|
+
computation: (source, prev) => {
|
|
683
|
+
if (source.usePrevious && prev)
|
|
684
|
+
return prev.value;
|
|
685
|
+
return source.value;
|
|
686
|
+
},
|
|
687
|
+
equal,
|
|
688
|
+
});
|
|
689
|
+
// if original value was WritableSignal then override linkedSignal methods to original...angular uses linkedSignal under the hood in ResourceImpl, this applies to that.
|
|
690
|
+
if ('set' in value) {
|
|
691
|
+
persisted.set = value.set;
|
|
692
|
+
persisted.update = value.update;
|
|
693
|
+
persisted.asReadonly = value.asReadonly;
|
|
694
|
+
}
|
|
695
|
+
return persisted;
|
|
696
|
+
}
|
|
697
|
+
function persistResourceValues(resource, hasCachedValue, persist = false, equal) {
|
|
698
|
+
if (!persist)
|
|
699
|
+
return resource;
|
|
700
|
+
return {
|
|
701
|
+
...resource,
|
|
702
|
+
statusCode: presist(resource.statusCode, resource.isLoading),
|
|
703
|
+
headers: presist(resource.headers, resource.isLoading),
|
|
704
|
+
value: presist(resource.value, computed(() => resource.isLoading() || !hasCachedValue()), // should show cached value if available
|
|
705
|
+
equal),
|
|
706
|
+
};
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
// refresh resource every n miliseconds or don't refresh if undefined provided. 0 also excluded, due to it not being a valid usecase
|
|
710
|
+
function refresh(resource, destroyRef, refresh) {
|
|
711
|
+
if (!refresh)
|
|
712
|
+
return resource; // no refresh requested
|
|
713
|
+
// we can use RxJs here as reloading the resource will always be a side effect & as such does not impact the reactive graph in any way.
|
|
714
|
+
let sub = interval(refresh)
|
|
715
|
+
.pipe(takeUntilDestroyed(destroyRef))
|
|
716
|
+
.subscribe(() => resource.reload());
|
|
717
|
+
const reload = () => {
|
|
718
|
+
sub.unsubscribe(); // do not conflict with manual reload
|
|
719
|
+
const hasReloaded = resource.reload();
|
|
720
|
+
// resubscribe after manual reload
|
|
721
|
+
sub = interval(refresh)
|
|
722
|
+
.pipe(takeUntilDestroyed(destroyRef))
|
|
723
|
+
.subscribe(() => resource.reload());
|
|
724
|
+
return hasReloaded;
|
|
725
|
+
};
|
|
726
|
+
return {
|
|
727
|
+
...resource,
|
|
728
|
+
reload,
|
|
729
|
+
destroy: () => {
|
|
730
|
+
sub.unsubscribe();
|
|
731
|
+
resource.destroy();
|
|
732
|
+
},
|
|
733
|
+
};
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
// Retry on error, if number is provided it will retry that many times with exponential backoff, otherwise it will use the options provided
|
|
737
|
+
function retryOnError(res, opt) {
|
|
738
|
+
const max = opt ? (typeof opt === 'number' ? opt : (opt.max ?? 0)) : 0;
|
|
739
|
+
const backoff = typeof opt === 'object' ? (opt.backoff ?? 1000) : 1000;
|
|
740
|
+
let retries = 0;
|
|
741
|
+
let timeout;
|
|
742
|
+
const onError = () => {
|
|
743
|
+
if (retries >= max)
|
|
744
|
+
return;
|
|
745
|
+
retries++;
|
|
746
|
+
if (timeout)
|
|
747
|
+
clearTimeout(timeout);
|
|
748
|
+
setTimeout(() => res.reload(), retries <= 0 ? 0 : backoff * Math.pow(2, retries - 1));
|
|
749
|
+
};
|
|
750
|
+
const onSuccess = () => {
|
|
751
|
+
if (timeout)
|
|
752
|
+
clearTimeout(timeout);
|
|
753
|
+
retries = 0;
|
|
754
|
+
};
|
|
755
|
+
const ref = effect(() => {
|
|
756
|
+
switch (res.status()) {
|
|
757
|
+
case ResourceStatus.Error:
|
|
758
|
+
return onError();
|
|
759
|
+
case ResourceStatus.Resolved:
|
|
760
|
+
return onSuccess();
|
|
761
|
+
}
|
|
762
|
+
});
|
|
763
|
+
return {
|
|
764
|
+
...res,
|
|
765
|
+
destroy: () => {
|
|
766
|
+
ref.destroy(); // cleanup on manual destroy
|
|
767
|
+
res.destroy();
|
|
768
|
+
},
|
|
769
|
+
};
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
function normalizeParams(params) {
|
|
773
|
+
if (params instanceof HttpParams)
|
|
774
|
+
return params.toString();
|
|
775
|
+
const paramMap = new Map();
|
|
776
|
+
for (const [key, value] of entries(params)) {
|
|
777
|
+
if (Array.isArray(value)) {
|
|
778
|
+
paramMap.set(key, value.map(encodeURIComponent).join(','));
|
|
779
|
+
}
|
|
780
|
+
else {
|
|
781
|
+
paramMap.set(key, encodeURIComponent(value.toString()));
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
return Array.from(paramMap.entries())
|
|
785
|
+
.map(([key, value]) => `${key}=${value}`)
|
|
786
|
+
.join('&');
|
|
787
|
+
}
|
|
788
|
+
function urlWithParams(req) {
|
|
789
|
+
if (!req.params)
|
|
790
|
+
return req.url;
|
|
791
|
+
return `${req.url}?${normalizeParams(req.params)}`;
|
|
792
|
+
}
|
|
793
|
+
|
|
794
|
+
function queryResource(request, options) {
|
|
795
|
+
const cache = injectQueryCache(options?.injector);
|
|
796
|
+
const destroyRef = options?.injector
|
|
797
|
+
? options.injector.get(DestroyRef)
|
|
798
|
+
: inject(DestroyRef);
|
|
799
|
+
const cb = createCircuitBreaker(options?.circuitBreaker === true
|
|
800
|
+
? undefined
|
|
801
|
+
: (options?.circuitBreaker ?? false));
|
|
802
|
+
const stableRequest = computed(() => {
|
|
803
|
+
if (cb.isClosed())
|
|
804
|
+
return undefined;
|
|
805
|
+
return request();
|
|
806
|
+
}, {
|
|
807
|
+
equal: createEqualRequest(options?.equal),
|
|
808
|
+
});
|
|
809
|
+
const hashFn = typeof options?.cache === 'object'
|
|
810
|
+
? (options.cache.hash ?? urlWithParams)
|
|
811
|
+
: urlWithParams;
|
|
812
|
+
const staleTime = typeof options?.cache === 'object' ? options.cache.staleTime : 0;
|
|
813
|
+
const ttl = typeof options?.cache === 'object' ? options.cache.ttl : undefined;
|
|
814
|
+
const cacheKey = computed(() => {
|
|
815
|
+
const r = stableRequest();
|
|
816
|
+
if (!r)
|
|
817
|
+
return null;
|
|
818
|
+
return hashFn(r);
|
|
819
|
+
});
|
|
820
|
+
const cachedRequest = options?.cache
|
|
821
|
+
? computed(() => {
|
|
822
|
+
const r = stableRequest();
|
|
823
|
+
if (!r)
|
|
824
|
+
return r;
|
|
825
|
+
return {
|
|
826
|
+
...r,
|
|
827
|
+
context: setCacheContext(r.context, {
|
|
828
|
+
staleTime,
|
|
829
|
+
ttl,
|
|
830
|
+
key: cacheKey() ?? hashFn(r),
|
|
831
|
+
}),
|
|
832
|
+
};
|
|
833
|
+
})
|
|
834
|
+
: stableRequest;
|
|
835
|
+
let resource = httpResource(cachedRequest, {
|
|
836
|
+
...options,
|
|
837
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
838
|
+
parse: options?.parse, // Not my favorite thing to do, but here it is completely safe.
|
|
839
|
+
});
|
|
840
|
+
// get full HttpResonse from Cache
|
|
841
|
+
const cachedEvent = cache.get(cacheKey);
|
|
842
|
+
const parse = options?.parse ?? ((val) => val);
|
|
843
|
+
const actualCacheValue = computed(() => {
|
|
844
|
+
const ce = cachedEvent();
|
|
845
|
+
if (!ce || !(ce instanceof HttpResponse))
|
|
846
|
+
return;
|
|
847
|
+
return parse(ce.body);
|
|
848
|
+
});
|
|
849
|
+
// retains last cache value after it is invalidated for lifetime of resource
|
|
850
|
+
const cachedValue = linkedSignal({
|
|
851
|
+
source: () => actualCacheValue(),
|
|
852
|
+
computation: (source, prev) => {
|
|
853
|
+
if (!source && prev)
|
|
854
|
+
return prev.value;
|
|
855
|
+
return source;
|
|
856
|
+
},
|
|
857
|
+
});
|
|
858
|
+
const value = options?.cache
|
|
859
|
+
? toWritable(computed(() => {
|
|
860
|
+
return cachedValue() ?? resource.value();
|
|
861
|
+
}), resource.value.set, resource.value.update)
|
|
862
|
+
: resource.value;
|
|
863
|
+
resource = refresh(resource, destroyRef, options?.refresh);
|
|
864
|
+
resource = retryOnError(resource, options?.retry);
|
|
865
|
+
resource = persistResourceValues({ ...resource, value }, computed(() => !!cachedValue()), options?.keepPrevious, options?.equal);
|
|
866
|
+
const onError = options?.onError; // Put in own variable to ensure value remains even if options are somehow mutated in-line
|
|
867
|
+
if (onError) {
|
|
868
|
+
const onErrorRef = effect(() => {
|
|
869
|
+
const err = resource.error();
|
|
870
|
+
if (err)
|
|
871
|
+
onError(err);
|
|
872
|
+
});
|
|
873
|
+
// cleanup on manual destroy, I'm comfortable setting these props in-line as we have yet to 'release' the object out of this lexical scope
|
|
874
|
+
const destroyRest = resource.destroy;
|
|
875
|
+
resource.destroy = () => {
|
|
876
|
+
onErrorRef.destroy();
|
|
877
|
+
destroyRest();
|
|
878
|
+
};
|
|
879
|
+
}
|
|
880
|
+
// iterate circuit breaker state, is effect as a computed would cause a circular dependency (resource -> cb -> resource)
|
|
881
|
+
const cbEffectRef = effect(() => {
|
|
882
|
+
const status = resource.status();
|
|
883
|
+
if (status === ResourceStatus.Error)
|
|
884
|
+
cb.fail();
|
|
885
|
+
else if (status === ResourceStatus.Resolved)
|
|
886
|
+
cb.success();
|
|
887
|
+
});
|
|
888
|
+
const set = (value) => {
|
|
889
|
+
resource.set(value);
|
|
890
|
+
const k = untracked(cacheKey);
|
|
891
|
+
if (options?.cache && k)
|
|
892
|
+
cache.store(k, new HttpResponse({
|
|
893
|
+
body: value,
|
|
894
|
+
status: 200,
|
|
895
|
+
statusText: 'OK',
|
|
896
|
+
}));
|
|
897
|
+
};
|
|
898
|
+
const update = (updater) => {
|
|
899
|
+
set(updater(untracked(resource.value)));
|
|
900
|
+
};
|
|
901
|
+
const client = options?.injector
|
|
902
|
+
? options.injector.get(HttpClient)
|
|
903
|
+
: inject(HttpClient);
|
|
904
|
+
return {
|
|
905
|
+
...resource,
|
|
906
|
+
value,
|
|
907
|
+
set,
|
|
908
|
+
update,
|
|
909
|
+
disabled: computed(() => cb.isClosed() || stableRequest() === undefined),
|
|
910
|
+
reload: () => {
|
|
911
|
+
cb.halfOpen(); // open the circuit for manual reload
|
|
912
|
+
return resource.reload();
|
|
913
|
+
},
|
|
914
|
+
destroy: () => {
|
|
915
|
+
cbEffectRef.destroy();
|
|
916
|
+
cb.destroy();
|
|
917
|
+
resource.destroy();
|
|
918
|
+
},
|
|
919
|
+
prefetch: async (partial) => {
|
|
920
|
+
if (!options?.cache || hasSlowConnection())
|
|
921
|
+
return Promise.resolve();
|
|
922
|
+
const request = untracked(cachedRequest);
|
|
923
|
+
if (!request)
|
|
924
|
+
return Promise.resolve();
|
|
925
|
+
const prefetchRequest = {
|
|
926
|
+
...request,
|
|
927
|
+
...partial,
|
|
928
|
+
};
|
|
929
|
+
try {
|
|
930
|
+
await firstValueFrom(client.request(prefetchRequest.method ?? 'GET', prefetchRequest.url, {
|
|
931
|
+
...prefetchRequest,
|
|
932
|
+
headers: prefetchRequest.headers,
|
|
933
|
+
observe: 'response',
|
|
934
|
+
}));
|
|
935
|
+
return;
|
|
936
|
+
}
|
|
937
|
+
catch (err) {
|
|
938
|
+
if (isDevMode())
|
|
939
|
+
console.error('Prefetch failed: ', err);
|
|
940
|
+
return;
|
|
941
|
+
}
|
|
942
|
+
},
|
|
943
|
+
};
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
/**
|
|
947
|
+
* Creates a resource for performing mutations (e.g., POST, PUT, PATCH, DELETE requests).
|
|
948
|
+
* Unlike `queryResource`, `mutationResource` is designed for one-off operations that change data.
|
|
949
|
+
* It does *not* cache responses and does not provide a `value` signal. Instead, it focuses on
|
|
950
|
+
* managing the mutation lifecycle (pending, error, success) and provides callbacks for handling
|
|
951
|
+
* these states.
|
|
952
|
+
*
|
|
953
|
+
* @param request A function that returns the base `HttpResourceRequest` to be made. This
|
|
954
|
+
* function is called reactively. Unlike `queryResource`, the `body` property
|
|
955
|
+
* of the request is provided when `mutate` is called, *not* here. If the
|
|
956
|
+
* function returns `undefined`, the mutation is considered "disabled." All properties,
|
|
957
|
+
* except the body, can be set here.
|
|
958
|
+
* @param options Configuration options for the mutation resource. This includes callbacks
|
|
959
|
+
* for `onMutate`, `onError`, `onSuccess`, and `onSettled`.
|
|
960
|
+
* @typeParam TResult - The type of the expected result from the mutation.
|
|
961
|
+
* @typeParam TRaw - The raw response type from the HTTP request (defaults to TResult).
|
|
962
|
+
* @typeParam TCTX - The type of the context value returned by `onMutate`.
|
|
963
|
+
* @returns A `MutationResourceRef` instance, which provides methods for triggering the mutation
|
|
964
|
+
* and observing its status.
|
|
965
|
+
*/
|
|
966
|
+
function mutationResource(request, options = {}) {
|
|
967
|
+
const equal = createEqualRequest(options?.equal);
|
|
968
|
+
const baseRequest = computed(() => request(), {
|
|
969
|
+
equal,
|
|
970
|
+
});
|
|
971
|
+
const nextRequest = signal(null, {
|
|
972
|
+
equal: (a, b) => {
|
|
973
|
+
if (!a && !b)
|
|
974
|
+
return true;
|
|
975
|
+
if (!a || !b)
|
|
976
|
+
return false;
|
|
977
|
+
return equal(a, b);
|
|
978
|
+
},
|
|
979
|
+
});
|
|
980
|
+
const req = computed(() => {
|
|
981
|
+
const nr = nextRequest();
|
|
982
|
+
if (!nr)
|
|
983
|
+
return;
|
|
984
|
+
const base = baseRequest();
|
|
985
|
+
const url = base?.url ?? nr.url;
|
|
986
|
+
if (!url)
|
|
987
|
+
return;
|
|
988
|
+
return {
|
|
989
|
+
...base,
|
|
990
|
+
...nr,
|
|
991
|
+
url,
|
|
992
|
+
};
|
|
993
|
+
});
|
|
994
|
+
const { onMutate, onError, onSuccess, onSettled, ...rest } = options;
|
|
995
|
+
const resource = queryResource(req, {
|
|
996
|
+
...rest,
|
|
997
|
+
defaultValue: null, // doesnt matter since .value is not accessible
|
|
998
|
+
});
|
|
999
|
+
let ctx = undefined;
|
|
1000
|
+
const destroyRef = options.injector
|
|
1001
|
+
? options.injector.get(DestroyRef)
|
|
1002
|
+
: inject(DestroyRef);
|
|
1003
|
+
const error$ = toObservable(resource.error);
|
|
1004
|
+
const value$ = toObservable(resource.value);
|
|
1005
|
+
const statusSub = toObservable(resource.status)
|
|
1006
|
+
.pipe(combineLatestWith(error$, value$), map(([status, error, value]) => {
|
|
1007
|
+
if (status === ResourceStatus.Error && error) {
|
|
1008
|
+
return {
|
|
1009
|
+
status: ResourceStatus.Error,
|
|
1010
|
+
error,
|
|
1011
|
+
};
|
|
1012
|
+
}
|
|
1013
|
+
if (status === ResourceStatus.Resolved) {
|
|
1014
|
+
return {
|
|
1015
|
+
status: ResourceStatus.Resolved,
|
|
1016
|
+
value,
|
|
1017
|
+
};
|
|
1018
|
+
}
|
|
1019
|
+
return null;
|
|
1020
|
+
}), filter((v) => v !== null), takeUntilDestroyed(destroyRef))
|
|
1021
|
+
.subscribe((result) => {
|
|
1022
|
+
if (result.status === ResourceStatus.Error)
|
|
1023
|
+
onError?.(result.error, ctx);
|
|
1024
|
+
else
|
|
1025
|
+
onSuccess?.(result.value, ctx);
|
|
1026
|
+
onSettled?.(ctx);
|
|
1027
|
+
ctx = undefined;
|
|
1028
|
+
nextRequest.set(null);
|
|
1029
|
+
});
|
|
1030
|
+
return {
|
|
1031
|
+
...resource,
|
|
1032
|
+
destroy: () => {
|
|
1033
|
+
statusSub.unsubscribe();
|
|
1034
|
+
resource.destroy();
|
|
1035
|
+
},
|
|
1036
|
+
mutate: (value) => {
|
|
1037
|
+
ctx = onMutate?.(value.body);
|
|
1038
|
+
nextRequest.set(value);
|
|
1039
|
+
},
|
|
1040
|
+
current: nextRequest,
|
|
1041
|
+
};
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
/**
|
|
1045
|
+
* Generated bundle index. Do not edit.
|
|
1046
|
+
*/
|
|
1047
|
+
|
|
1048
|
+
export { Cache, createCacheInterceptor, createCircuitBreaker, createDedupeRequestsInterceptor, injectQueryCache, mutationResource, noDedupe, provideQueryCache, queryResource };
|
|
1049
|
+
//# sourceMappingURL=mmstack-resource.mjs.map
|