@tanstack/query-core 5.85.9 → 5.86.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/legacy/streamedQuery.cjs +7 -8
- package/build/legacy/streamedQuery.cjs.map +1 -1
- package/build/legacy/streamedQuery.d.cts +17 -9
- package/build/legacy/streamedQuery.d.ts +17 -9
- package/build/legacy/streamedQuery.js +7 -8
- package/build/legacy/streamedQuery.js.map +1 -1
- package/build/modern/streamedQuery.cjs +7 -8
- package/build/modern/streamedQuery.cjs.map +1 -1
- package/build/modern/streamedQuery.d.cts +17 -9
- package/build/modern/streamedQuery.d.ts +17 -9
- package/build/modern/streamedQuery.js +7 -8
- package/build/modern/streamedQuery.js.map +1 -1
- package/package.json +1 -1
- package/src/streamedQuery.ts +47 -23
|
@@ -25,9 +25,10 @@ __export(streamedQuery_exports, {
|
|
|
25
25
|
module.exports = __toCommonJS(streamedQuery_exports);
|
|
26
26
|
var import_utils = require("./utils.cjs");
|
|
27
27
|
function streamedQuery({
|
|
28
|
-
|
|
28
|
+
streamFn,
|
|
29
29
|
refetchMode = "reset",
|
|
30
|
-
|
|
30
|
+
reducer = (items, chunk) => (0, import_utils.addToEnd)(items, chunk),
|
|
31
|
+
initialValue = []
|
|
31
32
|
}) {
|
|
32
33
|
return async (context) => {
|
|
33
34
|
const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
|
|
@@ -40,8 +41,8 @@ function streamedQuery({
|
|
|
40
41
|
fetchStatus: "fetching"
|
|
41
42
|
});
|
|
42
43
|
}
|
|
43
|
-
let result =
|
|
44
|
-
const stream = await
|
|
44
|
+
let result = initialValue;
|
|
45
|
+
const stream = await streamFn(context);
|
|
45
46
|
for await (const chunk of stream) {
|
|
46
47
|
if (context.signal.aborted) {
|
|
47
48
|
break;
|
|
@@ -49,12 +50,10 @@ function streamedQuery({
|
|
|
49
50
|
if (!isRefetch || refetchMode !== "replace") {
|
|
50
51
|
context.client.setQueryData(
|
|
51
52
|
context.queryKey,
|
|
52
|
-
(prev
|
|
53
|
-
return (0, import_utils.addToEnd)(prev, chunk, maxChunks);
|
|
54
|
-
}
|
|
53
|
+
(prev) => reducer(prev === void 0 ? initialValue : prev, chunk)
|
|
55
54
|
);
|
|
56
55
|
}
|
|
57
|
-
result = (
|
|
56
|
+
result = reducer(result, chunk);
|
|
58
57
|
}
|
|
59
58
|
if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
|
|
60
59
|
context.client.setQueryData(context.queryKey, result);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param
|
|
1
|
+
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\ntype BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {\n streamFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}\n\ntype SimpleStreamedQueryParams<\n TQueryFnData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer?: never\n initialValue?: never\n}\n\ntype ReducibleStreamedQueryParams<\n TQueryFnData,\n TData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer: (acc: TData, chunk: TQueryFnData) => TData\n initialValue: TData\n}\n\ntype StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> =\n | SimpleStreamedQueryParams<TQueryFnData, TQueryKey>\n | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param reducer - A function to reduce the streamed chunks into the final data.\n * Defaults to a function that appends chunks to the end of the array.\n * @param initialValue - Initial value to be used while the first chunk is being fetched.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TData = Array<TQueryFnData>,\n TQueryKey extends QueryKey = QueryKey,\n>({\n streamFn,\n refetchMode = 'reset',\n reducer = (items, chunk) =>\n addToEnd(items as Array<TQueryFnData>, chunk) as TData,\n initialValue = [] as TData,\n}: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<\n TData,\n TQueryKey\n> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result = initialValue\n\n const stream = await streamFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<TData>(context.queryKey, (prev) =>\n reducer(prev === undefined ? initialValue : prev, chunk),\n )\n }\n result = reducer(result, chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<TData>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AA6ClB,SAAS,cAId;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd,UAAU,CAAC,OAAO,cAChB,uBAAS,OAA8B,KAAK;AAAA,EAC9C,eAAe,CAAC;AAClB,GAGE;AACA,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAClD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAAS;AAEb,UAAM,SAAS,MAAM,SAAS,OAAO;AAErC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UAAoB,QAAQ;AAAA,UAAU,CAAC,SACpD,QAAQ,SAAS,SAAY,eAAe,MAAM,KAAK;AAAA,QACzD;AAAA,MACF;AACA,eAAS,QAAQ,QAAQ,KAAK;AAAA,IAChC;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAoB,QAAQ,UAAU,MAAM;AAAA,IAC7D;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
|
|
@@ -2,6 +2,19 @@ import { I as QueryKey, a1 as QueryFunctionContext, Y as QueryFunction } from '.
|
|
|
2
2
|
import './removable.cjs';
|
|
3
3
|
import './subscribable.cjs';
|
|
4
4
|
|
|
5
|
+
type BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {
|
|
6
|
+
streamFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
7
|
+
refetchMode?: 'append' | 'reset' | 'replace';
|
|
8
|
+
};
|
|
9
|
+
type SimpleStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
10
|
+
reducer?: never;
|
|
11
|
+
initialValue?: never;
|
|
12
|
+
};
|
|
13
|
+
type ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
14
|
+
reducer: (acc: TData, chunk: TQueryFnData) => TData;
|
|
15
|
+
initialValue: TData;
|
|
16
|
+
};
|
|
17
|
+
type StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = SimpleStreamedQueryParams<TQueryFnData, TQueryKey> | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>;
|
|
5
18
|
/**
|
|
6
19
|
* This is a helper function to create a query function that streams data from an AsyncIterable.
|
|
7
20
|
* Data will be an Array of all the chunks received.
|
|
@@ -12,15 +25,10 @@ import './subscribable.cjs';
|
|
|
12
25
|
* Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
|
|
13
26
|
* Set to `'append'` to append new data to the existing data.
|
|
14
27
|
* Set to `'replace'` to write all data to the cache once the stream ends.
|
|
15
|
-
* @param
|
|
16
|
-
* Defaults to
|
|
17
|
-
*
|
|
18
|
-
* If the number of chunks exceeds this number, the oldest chunk will be removed.
|
|
28
|
+
* @param reducer - A function to reduce the streamed chunks into the final data.
|
|
29
|
+
* Defaults to a function that appends chunks to the end of the array.
|
|
30
|
+
* @param initialValue - Initial value to be used while the first chunk is being fetched.
|
|
19
31
|
*/
|
|
20
|
-
declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({
|
|
21
|
-
queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
22
|
-
refetchMode?: 'append' | 'reset' | 'replace';
|
|
23
|
-
maxChunks?: number;
|
|
24
|
-
}): QueryFunction<Array<TQueryFnData>, TQueryKey>;
|
|
32
|
+
declare function streamedQuery<TQueryFnData = unknown, TData = Array<TQueryFnData>, TQueryKey extends QueryKey = QueryKey>({ streamFn, refetchMode, reducer, initialValue, }: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<TData, TQueryKey>;
|
|
25
33
|
|
|
26
34
|
export { streamedQuery };
|
|
@@ -2,6 +2,19 @@ import { I as QueryKey, a1 as QueryFunctionContext, Y as QueryFunction } from '.
|
|
|
2
2
|
import './removable.js';
|
|
3
3
|
import './subscribable.js';
|
|
4
4
|
|
|
5
|
+
type BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {
|
|
6
|
+
streamFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
7
|
+
refetchMode?: 'append' | 'reset' | 'replace';
|
|
8
|
+
};
|
|
9
|
+
type SimpleStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
10
|
+
reducer?: never;
|
|
11
|
+
initialValue?: never;
|
|
12
|
+
};
|
|
13
|
+
type ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
14
|
+
reducer: (acc: TData, chunk: TQueryFnData) => TData;
|
|
15
|
+
initialValue: TData;
|
|
16
|
+
};
|
|
17
|
+
type StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = SimpleStreamedQueryParams<TQueryFnData, TQueryKey> | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>;
|
|
5
18
|
/**
|
|
6
19
|
* This is a helper function to create a query function that streams data from an AsyncIterable.
|
|
7
20
|
* Data will be an Array of all the chunks received.
|
|
@@ -12,15 +25,10 @@ import './subscribable.js';
|
|
|
12
25
|
* Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
|
|
13
26
|
* Set to `'append'` to append new data to the existing data.
|
|
14
27
|
* Set to `'replace'` to write all data to the cache once the stream ends.
|
|
15
|
-
* @param
|
|
16
|
-
* Defaults to
|
|
17
|
-
*
|
|
18
|
-
* If the number of chunks exceeds this number, the oldest chunk will be removed.
|
|
28
|
+
* @param reducer - A function to reduce the streamed chunks into the final data.
|
|
29
|
+
* Defaults to a function that appends chunks to the end of the array.
|
|
30
|
+
* @param initialValue - Initial value to be used while the first chunk is being fetched.
|
|
19
31
|
*/
|
|
20
|
-
declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({
|
|
21
|
-
queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
22
|
-
refetchMode?: 'append' | 'reset' | 'replace';
|
|
23
|
-
maxChunks?: number;
|
|
24
|
-
}): QueryFunction<Array<TQueryFnData>, TQueryKey>;
|
|
32
|
+
declare function streamedQuery<TQueryFnData = unknown, TData = Array<TQueryFnData>, TQueryKey extends QueryKey = QueryKey>({ streamFn, refetchMode, reducer, initialValue, }: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<TData, TQueryKey>;
|
|
25
33
|
|
|
26
34
|
export { streamedQuery };
|
|
@@ -3,9 +3,10 @@ import "./chunk-PXG64RU4.js";
|
|
|
3
3
|
// src/streamedQuery.ts
|
|
4
4
|
import { addToEnd } from "./utils.js";
|
|
5
5
|
function streamedQuery({
|
|
6
|
-
|
|
6
|
+
streamFn,
|
|
7
7
|
refetchMode = "reset",
|
|
8
|
-
|
|
8
|
+
reducer = (items, chunk) => addToEnd(items, chunk),
|
|
9
|
+
initialValue = []
|
|
9
10
|
}) {
|
|
10
11
|
return async (context) => {
|
|
11
12
|
const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
|
|
@@ -18,8 +19,8 @@ function streamedQuery({
|
|
|
18
19
|
fetchStatus: "fetching"
|
|
19
20
|
});
|
|
20
21
|
}
|
|
21
|
-
let result =
|
|
22
|
-
const stream = await
|
|
22
|
+
let result = initialValue;
|
|
23
|
+
const stream = await streamFn(context);
|
|
23
24
|
for await (const chunk of stream) {
|
|
24
25
|
if (context.signal.aborted) {
|
|
25
26
|
break;
|
|
@@ -27,12 +28,10 @@ function streamedQuery({
|
|
|
27
28
|
if (!isRefetch || refetchMode !== "replace") {
|
|
28
29
|
context.client.setQueryData(
|
|
29
30
|
context.queryKey,
|
|
30
|
-
(prev
|
|
31
|
-
return addToEnd(prev, chunk, maxChunks);
|
|
32
|
-
}
|
|
31
|
+
(prev) => reducer(prev === void 0 ? initialValue : prev, chunk)
|
|
33
32
|
);
|
|
34
33
|
}
|
|
35
|
-
result =
|
|
34
|
+
result = reducer(result, chunk);
|
|
36
35
|
}
|
|
37
36
|
if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
|
|
38
37
|
context.client.setQueryData(context.queryKey, result);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param
|
|
1
|
+
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\ntype BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {\n streamFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}\n\ntype SimpleStreamedQueryParams<\n TQueryFnData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer?: never\n initialValue?: never\n}\n\ntype ReducibleStreamedQueryParams<\n TQueryFnData,\n TData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer: (acc: TData, chunk: TQueryFnData) => TData\n initialValue: TData\n}\n\ntype StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> =\n | SimpleStreamedQueryParams<TQueryFnData, TQueryKey>\n | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param reducer - A function to reduce the streamed chunks into the final data.\n * Defaults to a function that appends chunks to the end of the array.\n * @param initialValue - Initial value to be used while the first chunk is being fetched.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TData = Array<TQueryFnData>,\n TQueryKey extends QueryKey = QueryKey,\n>({\n streamFn,\n refetchMode = 'reset',\n reducer = (items, chunk) =>\n addToEnd(items as Array<TQueryFnData>, chunk) as TData,\n initialValue = [] as TData,\n}: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<\n TData,\n TQueryKey\n> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result = initialValue\n\n const stream = await streamFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<TData>(context.queryKey, (prev) =>\n reducer(prev === undefined ? initialValue : prev, chunk),\n )\n }\n result = reducer(result, chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<TData>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;AAAA,SAAS,gBAAgB;AA6ClB,SAAS,cAId;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd,UAAU,CAAC,OAAO,UAChB,SAAS,OAA8B,KAAK;AAAA,EAC9C,eAAe,CAAC;AAClB,GAGE;AACA,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAClD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAAS;AAEb,UAAM,SAAS,MAAM,SAAS,OAAO;AAErC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UAAoB,QAAQ;AAAA,UAAU,CAAC,SACpD,QAAQ,SAAS,SAAY,eAAe,MAAM,KAAK;AAAA,QACzD;AAAA,MACF;AACA,eAAS,QAAQ,QAAQ,KAAK;AAAA,IAChC;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAoB,QAAQ,UAAU,MAAM;AAAA,IAC7D;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
|
|
@@ -25,9 +25,10 @@ __export(streamedQuery_exports, {
|
|
|
25
25
|
module.exports = __toCommonJS(streamedQuery_exports);
|
|
26
26
|
var import_utils = require("./utils.cjs");
|
|
27
27
|
function streamedQuery({
|
|
28
|
-
|
|
28
|
+
streamFn,
|
|
29
29
|
refetchMode = "reset",
|
|
30
|
-
|
|
30
|
+
reducer = (items, chunk) => (0, import_utils.addToEnd)(items, chunk),
|
|
31
|
+
initialValue = []
|
|
31
32
|
}) {
|
|
32
33
|
return async (context) => {
|
|
33
34
|
const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
|
|
@@ -40,8 +41,8 @@ function streamedQuery({
|
|
|
40
41
|
fetchStatus: "fetching"
|
|
41
42
|
});
|
|
42
43
|
}
|
|
43
|
-
let result =
|
|
44
|
-
const stream = await
|
|
44
|
+
let result = initialValue;
|
|
45
|
+
const stream = await streamFn(context);
|
|
45
46
|
for await (const chunk of stream) {
|
|
46
47
|
if (context.signal.aborted) {
|
|
47
48
|
break;
|
|
@@ -49,12 +50,10 @@ function streamedQuery({
|
|
|
49
50
|
if (!isRefetch || refetchMode !== "replace") {
|
|
50
51
|
context.client.setQueryData(
|
|
51
52
|
context.queryKey,
|
|
52
|
-
(prev
|
|
53
|
-
return (0, import_utils.addToEnd)(prev, chunk, maxChunks);
|
|
54
|
-
}
|
|
53
|
+
(prev) => reducer(prev === void 0 ? initialValue : prev, chunk)
|
|
55
54
|
);
|
|
56
55
|
}
|
|
57
|
-
result = (
|
|
56
|
+
result = reducer(result, chunk);
|
|
58
57
|
}
|
|
59
58
|
if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
|
|
60
59
|
context.client.setQueryData(context.queryKey, result);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param
|
|
1
|
+
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\ntype BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {\n streamFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}\n\ntype SimpleStreamedQueryParams<\n TQueryFnData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer?: never\n initialValue?: never\n}\n\ntype ReducibleStreamedQueryParams<\n TQueryFnData,\n TData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer: (acc: TData, chunk: TQueryFnData) => TData\n initialValue: TData\n}\n\ntype StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> =\n | SimpleStreamedQueryParams<TQueryFnData, TQueryKey>\n | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param reducer - A function to reduce the streamed chunks into the final data.\n * Defaults to a function that appends chunks to the end of the array.\n * @param initialValue - Initial value to be used while the first chunk is being fetched.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TData = Array<TQueryFnData>,\n TQueryKey extends QueryKey = QueryKey,\n>({\n streamFn,\n refetchMode = 'reset',\n reducer = (items, chunk) =>\n addToEnd(items as Array<TQueryFnData>, chunk) as TData,\n initialValue = [] as TData,\n}: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<\n TData,\n TQueryKey\n> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result = initialValue\n\n const stream = await streamFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<TData>(context.queryKey, (prev) =>\n reducer(prev === undefined ? initialValue : prev, chunk),\n )\n }\n result = reducer(result, chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<TData>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AA6ClB,SAAS,cAId;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd,UAAU,CAAC,OAAO,cAChB,uBAAS,OAA8B,KAAK;AAAA,EAC9C,eAAe,CAAC;AAClB,GAGE;AACA,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAClD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAAS;AAEb,UAAM,SAAS,MAAM,SAAS,OAAO;AAErC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UAAoB,QAAQ;AAAA,UAAU,CAAC,SACpD,QAAQ,SAAS,SAAY,eAAe,MAAM,KAAK;AAAA,QACzD;AAAA,MACF;AACA,eAAS,QAAQ,QAAQ,KAAK;AAAA,IAChC;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAoB,QAAQ,UAAU,MAAM;AAAA,IAC7D;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
|
|
@@ -2,6 +2,19 @@ import { I as QueryKey, a1 as QueryFunctionContext, Y as QueryFunction } from '.
|
|
|
2
2
|
import './removable.cjs';
|
|
3
3
|
import './subscribable.cjs';
|
|
4
4
|
|
|
5
|
+
type BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {
|
|
6
|
+
streamFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
7
|
+
refetchMode?: 'append' | 'reset' | 'replace';
|
|
8
|
+
};
|
|
9
|
+
type SimpleStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
10
|
+
reducer?: never;
|
|
11
|
+
initialValue?: never;
|
|
12
|
+
};
|
|
13
|
+
type ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
14
|
+
reducer: (acc: TData, chunk: TQueryFnData) => TData;
|
|
15
|
+
initialValue: TData;
|
|
16
|
+
};
|
|
17
|
+
type StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = SimpleStreamedQueryParams<TQueryFnData, TQueryKey> | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>;
|
|
5
18
|
/**
|
|
6
19
|
* This is a helper function to create a query function that streams data from an AsyncIterable.
|
|
7
20
|
* Data will be an Array of all the chunks received.
|
|
@@ -12,15 +25,10 @@ import './subscribable.cjs';
|
|
|
12
25
|
* Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
|
|
13
26
|
* Set to `'append'` to append new data to the existing data.
|
|
14
27
|
* Set to `'replace'` to write all data to the cache once the stream ends.
|
|
15
|
-
* @param
|
|
16
|
-
* Defaults to
|
|
17
|
-
*
|
|
18
|
-
* If the number of chunks exceeds this number, the oldest chunk will be removed.
|
|
28
|
+
* @param reducer - A function to reduce the streamed chunks into the final data.
|
|
29
|
+
* Defaults to a function that appends chunks to the end of the array.
|
|
30
|
+
* @param initialValue - Initial value to be used while the first chunk is being fetched.
|
|
19
31
|
*/
|
|
20
|
-
declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({
|
|
21
|
-
queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
22
|
-
refetchMode?: 'append' | 'reset' | 'replace';
|
|
23
|
-
maxChunks?: number;
|
|
24
|
-
}): QueryFunction<Array<TQueryFnData>, TQueryKey>;
|
|
32
|
+
declare function streamedQuery<TQueryFnData = unknown, TData = Array<TQueryFnData>, TQueryKey extends QueryKey = QueryKey>({ streamFn, refetchMode, reducer, initialValue, }: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<TData, TQueryKey>;
|
|
25
33
|
|
|
26
34
|
export { streamedQuery };
|
|
@@ -2,6 +2,19 @@ import { I as QueryKey, a1 as QueryFunctionContext, Y as QueryFunction } from '.
|
|
|
2
2
|
import './removable.js';
|
|
3
3
|
import './subscribable.js';
|
|
4
4
|
|
|
5
|
+
type BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {
|
|
6
|
+
streamFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
7
|
+
refetchMode?: 'append' | 'reset' | 'replace';
|
|
8
|
+
};
|
|
9
|
+
type SimpleStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
10
|
+
reducer?: never;
|
|
11
|
+
initialValue?: never;
|
|
12
|
+
};
|
|
13
|
+
type ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
14
|
+
reducer: (acc: TData, chunk: TQueryFnData) => TData;
|
|
15
|
+
initialValue: TData;
|
|
16
|
+
};
|
|
17
|
+
type StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> = SimpleStreamedQueryParams<TQueryFnData, TQueryKey> | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>;
|
|
5
18
|
/**
|
|
6
19
|
* This is a helper function to create a query function that streams data from an AsyncIterable.
|
|
7
20
|
* Data will be an Array of all the chunks received.
|
|
@@ -12,15 +25,10 @@ import './subscribable.js';
|
|
|
12
25
|
* Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
|
|
13
26
|
* Set to `'append'` to append new data to the existing data.
|
|
14
27
|
* Set to `'replace'` to write all data to the cache once the stream ends.
|
|
15
|
-
* @param
|
|
16
|
-
* Defaults to
|
|
17
|
-
*
|
|
18
|
-
* If the number of chunks exceeds this number, the oldest chunk will be removed.
|
|
28
|
+
* @param reducer - A function to reduce the streamed chunks into the final data.
|
|
29
|
+
* Defaults to a function that appends chunks to the end of the array.
|
|
30
|
+
* @param initialValue - Initial value to be used while the first chunk is being fetched.
|
|
19
31
|
*/
|
|
20
|
-
declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({
|
|
21
|
-
queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
|
|
22
|
-
refetchMode?: 'append' | 'reset' | 'replace';
|
|
23
|
-
maxChunks?: number;
|
|
24
|
-
}): QueryFunction<Array<TQueryFnData>, TQueryKey>;
|
|
32
|
+
declare function streamedQuery<TQueryFnData = unknown, TData = Array<TQueryFnData>, TQueryKey extends QueryKey = QueryKey>({ streamFn, refetchMode, reducer, initialValue, }: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<TData, TQueryKey>;
|
|
25
33
|
|
|
26
34
|
export { streamedQuery };
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
// src/streamedQuery.ts
|
|
2
2
|
import { addToEnd } from "./utils.js";
|
|
3
3
|
function streamedQuery({
|
|
4
|
-
|
|
4
|
+
streamFn,
|
|
5
5
|
refetchMode = "reset",
|
|
6
|
-
|
|
6
|
+
reducer = (items, chunk) => addToEnd(items, chunk),
|
|
7
|
+
initialValue = []
|
|
7
8
|
}) {
|
|
8
9
|
return async (context) => {
|
|
9
10
|
const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
|
|
@@ -16,8 +17,8 @@ function streamedQuery({
|
|
|
16
17
|
fetchStatus: "fetching"
|
|
17
18
|
});
|
|
18
19
|
}
|
|
19
|
-
let result =
|
|
20
|
-
const stream = await
|
|
20
|
+
let result = initialValue;
|
|
21
|
+
const stream = await streamFn(context);
|
|
21
22
|
for await (const chunk of stream) {
|
|
22
23
|
if (context.signal.aborted) {
|
|
23
24
|
break;
|
|
@@ -25,12 +26,10 @@ function streamedQuery({
|
|
|
25
26
|
if (!isRefetch || refetchMode !== "replace") {
|
|
26
27
|
context.client.setQueryData(
|
|
27
28
|
context.queryKey,
|
|
28
|
-
(prev
|
|
29
|
-
return addToEnd(prev, chunk, maxChunks);
|
|
30
|
-
}
|
|
29
|
+
(prev) => reducer(prev === void 0 ? initialValue : prev, chunk)
|
|
31
30
|
);
|
|
32
31
|
}
|
|
33
|
-
result =
|
|
32
|
+
result = reducer(result, chunk);
|
|
34
33
|
}
|
|
35
34
|
if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
|
|
36
35
|
context.client.setQueryData(context.queryKey, result);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param
|
|
1
|
+
{"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\ntype BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {\n streamFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}\n\ntype SimpleStreamedQueryParams<\n TQueryFnData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer?: never\n initialValue?: never\n}\n\ntype ReducibleStreamedQueryParams<\n TQueryFnData,\n TData,\n TQueryKey extends QueryKey,\n> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {\n reducer: (acc: TData, chunk: TQueryFnData) => TData\n initialValue: TData\n}\n\ntype StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> =\n | SimpleStreamedQueryParams<TQueryFnData, TQueryKey>\n | ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param reducer - A function to reduce the streamed chunks into the final data.\n * Defaults to a function that appends chunks to the end of the array.\n * @param initialValue - Initial value to be used while the first chunk is being fetched.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TData = Array<TQueryFnData>,\n TQueryKey extends QueryKey = QueryKey,\n>({\n streamFn,\n refetchMode = 'reset',\n reducer = (items, chunk) =>\n addToEnd(items as Array<TQueryFnData>, chunk) as TData,\n initialValue = [] as TData,\n}: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<\n TData,\n TQueryKey\n> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result = initialValue\n\n const stream = await streamFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<TData>(context.queryKey, (prev) =>\n reducer(prev === undefined ? initialValue : prev, chunk),\n )\n }\n result = reducer(result, chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<TData>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AA6ClB,SAAS,cAId;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd,UAAU,CAAC,OAAO,UAChB,SAAS,OAA8B,KAAK;AAAA,EAC9C,eAAe,CAAC;AAClB,GAGE;AACA,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAClD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAAS;AAEb,UAAM,SAAS,MAAM,SAAS,OAAO;AAErC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UAAoB,QAAQ;AAAA,UAAU,CAAC,SACpD,QAAQ,SAAS,SAAY,eAAe,MAAM,KAAK;AAAA,QACzD;AAAA,MACF;AACA,eAAS,QAAQ,QAAQ,KAAK;AAAA,IAChC;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAoB,QAAQ,UAAU,MAAM;AAAA,IAC7D;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
|
package/package.json
CHANGED
package/src/streamedQuery.ts
CHANGED
|
@@ -1,6 +1,34 @@
|
|
|
1
1
|
import { addToEnd } from './utils'
|
|
2
2
|
import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'
|
|
3
3
|
|
|
4
|
+
type BaseStreamedQueryParams<TQueryFnData, TQueryKey extends QueryKey> = {
|
|
5
|
+
streamFn: (
|
|
6
|
+
context: QueryFunctionContext<TQueryKey>,
|
|
7
|
+
) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>
|
|
8
|
+
refetchMode?: 'append' | 'reset' | 'replace'
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
type SimpleStreamedQueryParams<
|
|
12
|
+
TQueryFnData,
|
|
13
|
+
TQueryKey extends QueryKey,
|
|
14
|
+
> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
15
|
+
reducer?: never
|
|
16
|
+
initialValue?: never
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
type ReducibleStreamedQueryParams<
|
|
20
|
+
TQueryFnData,
|
|
21
|
+
TData,
|
|
22
|
+
TQueryKey extends QueryKey,
|
|
23
|
+
> = BaseStreamedQueryParams<TQueryFnData, TQueryKey> & {
|
|
24
|
+
reducer: (acc: TData, chunk: TQueryFnData) => TData
|
|
25
|
+
initialValue: TData
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
type StreamedQueryParams<TQueryFnData, TData, TQueryKey extends QueryKey> =
|
|
29
|
+
| SimpleStreamedQueryParams<TQueryFnData, TQueryKey>
|
|
30
|
+
| ReducibleStreamedQueryParams<TQueryFnData, TData, TQueryKey>
|
|
31
|
+
|
|
4
32
|
/**
|
|
5
33
|
* This is a helper function to create a query function that streams data from an AsyncIterable.
|
|
6
34
|
* Data will be an Array of all the chunks received.
|
|
@@ -11,31 +39,29 @@ import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'
|
|
|
11
39
|
* Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
|
|
12
40
|
* Set to `'append'` to append new data to the existing data.
|
|
13
41
|
* Set to `'replace'` to write all data to the cache once the stream ends.
|
|
14
|
-
* @param
|
|
15
|
-
* Defaults to
|
|
16
|
-
*
|
|
17
|
-
* If the number of chunks exceeds this number, the oldest chunk will be removed.
|
|
42
|
+
* @param reducer - A function to reduce the streamed chunks into the final data.
|
|
43
|
+
* Defaults to a function that appends chunks to the end of the array.
|
|
44
|
+
* @param initialValue - Initial value to be used while the first chunk is being fetched.
|
|
18
45
|
*/
|
|
19
46
|
export function streamedQuery<
|
|
20
47
|
TQueryFnData = unknown,
|
|
48
|
+
TData = Array<TQueryFnData>,
|
|
21
49
|
TQueryKey extends QueryKey = QueryKey,
|
|
22
50
|
>({
|
|
23
|
-
|
|
51
|
+
streamFn,
|
|
24
52
|
refetchMode = 'reset',
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
}): QueryFunction<Array<TQueryFnData>, TQueryKey> {
|
|
53
|
+
reducer = (items, chunk) =>
|
|
54
|
+
addToEnd(items as Array<TQueryFnData>, chunk) as TData,
|
|
55
|
+
initialValue = [] as TData,
|
|
56
|
+
}: StreamedQueryParams<TQueryFnData, TData, TQueryKey>): QueryFunction<
|
|
57
|
+
TData,
|
|
58
|
+
TQueryKey
|
|
59
|
+
> {
|
|
33
60
|
return async (context) => {
|
|
34
61
|
const query = context.client
|
|
35
62
|
.getQueryCache()
|
|
36
63
|
.find({ queryKey: context.queryKey, exact: true })
|
|
37
64
|
const isRefetch = !!query && query.state.data !== undefined
|
|
38
|
-
|
|
39
65
|
if (isRefetch && refetchMode === 'reset') {
|
|
40
66
|
query.setState({
|
|
41
67
|
status: 'pending',
|
|
@@ -45,8 +71,9 @@ export function streamedQuery<
|
|
|
45
71
|
})
|
|
46
72
|
}
|
|
47
73
|
|
|
48
|
-
let result
|
|
49
|
-
|
|
74
|
+
let result = initialValue
|
|
75
|
+
|
|
76
|
+
const stream = await streamFn(context)
|
|
50
77
|
|
|
51
78
|
for await (const chunk of stream) {
|
|
52
79
|
if (context.signal.aborted) {
|
|
@@ -55,19 +82,16 @@ export function streamedQuery<
|
|
|
55
82
|
|
|
56
83
|
// don't append to the cache directly when replace-refetching
|
|
57
84
|
if (!isRefetch || refetchMode !== 'replace') {
|
|
58
|
-
context.client.setQueryData<
|
|
59
|
-
|
|
60
|
-
(prev = []) => {
|
|
61
|
-
return addToEnd(prev, chunk, maxChunks)
|
|
62
|
-
},
|
|
85
|
+
context.client.setQueryData<TData>(context.queryKey, (prev) =>
|
|
86
|
+
reducer(prev === undefined ? initialValue : prev, chunk),
|
|
63
87
|
)
|
|
64
88
|
}
|
|
65
|
-
result =
|
|
89
|
+
result = reducer(result, chunk)
|
|
66
90
|
}
|
|
67
91
|
|
|
68
92
|
// finalize result: replace-refetching needs to write to the cache
|
|
69
93
|
if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {
|
|
70
|
-
context.client.setQueryData<
|
|
94
|
+
context.client.setQueryData<TData>(context.queryKey, result)
|
|
71
95
|
}
|
|
72
96
|
|
|
73
97
|
return context.client.getQueryData(context.queryKey)!
|