@tanstack/query-core 5.76.2 → 5.77.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -119,6 +119,7 @@ function hydrate(client, dehydratedState, options) {
119
119
  const data = rawData === void 0 ? rawData : deserializeData(rawData);
120
120
  let query = queryCache.get(queryHash);
121
121
  const existingQueryIsPending = (query == null ? void 0 : query.state.status) === "pending";
122
+ const existingQueryIsFetching = (query == null ? void 0 : query.state.fetchStatus) === "fetching";
122
123
  if (query) {
123
124
  const hasNewerSyncData = syncData && // We only need this undefined check to handle older dehydration
124
125
  // payloads that might not have dehydratedAt
@@ -150,7 +151,7 @@ function hydrate(client, dehydratedState, options) {
150
151
  }
151
152
  );
152
153
  }
153
- if (promise && !existingQueryIsPending && // Only hydrate if dehydration is newer than any existing data,
154
+ if (promise && !existingQueryIsPending && !existingQueryIsFetching && // Only hydrate if dehydration is newer than any existing data,
154
155
  // this is always true for new queries
155
156
  (dehydratedAt === void 0 || dehydratedAt > query.state.dataUpdatedAt)) {
156
157
  void query.fetch(void 0, {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+B;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AA/EnB;AAgFE,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,UAAS,WAAM,YAAN,mBAAe,KAAK,eAAe,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AA5HnB;AA6HE,QAAM,iBACJ,QAAQ,6BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,4BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,0BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,yBACtC;AAEF,QAAM,qBACJ,QAAQ,wBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,uBACtC;AAEF,QAAM,gBACJ,QAAQ,mBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,kBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AAxKR;AAyKE,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,oBACJ,wCAAS,mBAAT,mBAAyB,sBACzB,YAAO,kBAAkB,EAAE,YAA3B,mBAAoC,oBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AAzLvD,QAAAA,KAAAC;AA0LI,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,QACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAtMrE,UAAAD,KAAAC;AAuMM,YAAM,WAAW,cAAU,gCAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,qCAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,0BAAyB,+BAAO,MAAM,YAAW;AAGvD,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,YACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":["_a","_b"]}
1
+ {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n const existingQueryIsFetching = query?.state.fetchStatus === 'fetching'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n !existingQueryIsFetching &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+B;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AA/EnB;AAgFE,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,UAAS,WAAM,YAAN,mBAAe,KAAK,eAAe,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AA5HnB;AA6HE,QAAM,iBACJ,QAAQ,6BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,4BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,0BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,yBACtC;AAEF,QAAM,qBACJ,QAAQ,wBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,uBACtC;AAEF,QAAM,gBACJ,QAAQ,mBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,kBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AAxKR;AAyKE,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,oBACJ,wCAAS,mBAAT,mBAAyB,sBACzB,YAAO,kBAAkB,EAAE,YAA3B,mBAAoC,oBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AAzLvD,QAAAA,KAAAC;AA0LI,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,QACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAtMrE,UAAAD,KAAAC;AAuMM,YAAM,WAAW,cAAU,gCAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,qCAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,0BAAyB,+BAAO,MAAM,YAAW;AACvD,YAAM,2BAA0B,+BAAO,MAAM,iBAAgB;AAG7D,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,YACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC,0BACD,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":["_a","_b"]}
@@ -94,6 +94,7 @@ function hydrate(client, dehydratedState, options) {
94
94
  const data = rawData === void 0 ? rawData : deserializeData(rawData);
95
95
  let query = queryCache.get(queryHash);
96
96
  const existingQueryIsPending = (query == null ? void 0 : query.state.status) === "pending";
97
+ const existingQueryIsFetching = (query == null ? void 0 : query.state.fetchStatus) === "fetching";
97
98
  if (query) {
98
99
  const hasNewerSyncData = syncData && // We only need this undefined check to handle older dehydration
99
100
  // payloads that might not have dehydratedAt
@@ -125,7 +126,7 @@ function hydrate(client, dehydratedState, options) {
125
126
  }
126
127
  );
127
128
  }
128
- if (promise && !existingQueryIsPending && // Only hydrate if dehydration is newer than any existing data,
129
+ if (promise && !existingQueryIsPending && !existingQueryIsFetching && // Only hydrate if dehydration is newer than any existing data,
129
130
  // this is always true for new queries
130
131
  (dehydratedAt === void 0 || dehydratedAt > query.state.dataUpdatedAt)) {
131
132
  void query.fetch(void 0, {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;AAAA,SAAS,sBAAsB;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AA/EnB;AAgFE,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,UAAS,WAAM,YAAN,mBAAe,KAAK,eAAe,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AA5HnB;AA6HE,QAAM,iBACJ,QAAQ,6BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,4BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,0BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,yBACtC;AAEF,QAAM,qBACJ,QAAQ,wBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,uBACtC;AAEF,QAAM,gBACJ,QAAQ,mBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,kBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AAxKR;AAyKE,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,oBACJ,wCAAS,mBAAT,mBAAyB,sBACzB,YAAO,kBAAkB,EAAE,YAA3B,mBAAoC,oBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AAzLvD,QAAAA,KAAAC;AA0LI,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,QACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAtMrE,UAAAD,KAAAC;AAuMM,YAAM,WAAW,UAAU,eAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,qCAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,0BAAyB,+BAAO,MAAM,YAAW;AAGvD,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,YACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":["_a","_b"]}
1
+ {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n const existingQueryIsFetching = query?.state.fetchStatus === 'fetching'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n !existingQueryIsFetching &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;AAAA,SAAS,sBAAsB;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AA/EnB;AAgFE,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,UAAS,WAAM,YAAN,mBAAe,KAAK,eAAe,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AA5HnB;AA6HE,QAAM,iBACJ,QAAQ,6BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,4BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,0BACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,yBACtC;AAEF,QAAM,qBACJ,QAAQ,wBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,uBACtC;AAEF,QAAM,gBACJ,QAAQ,mBACR,YAAO,kBAAkB,EAAE,cAA3B,mBAAsC,kBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AAxKR;AAyKE,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,oBACJ,wCAAS,mBAAT,mBAAyB,sBACzB,YAAO,kBAAkB,EAAE,YAA3B,mBAAoC,oBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AAzLvD,QAAAA,KAAAC;AA0LI,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,QACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAtMrE,UAAAD,KAAAC;AAuMM,YAAM,WAAW,UAAU,eAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,qCAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,0BAAyB,+BAAO,MAAM,YAAW;AACvD,YAAM,2BAA0B,+BAAO,MAAM,iBAAgB;AAG7D,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,IAAGD,MAAA,OAAO,kBAAkB,EAAE,YAA3B,gBAAAA,IAAoC;AAAA,YACvC,IAAGC,MAAA,mCAAS,mBAAT,gBAAAA,IAAyB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC,0BACD,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":["_a","_b"]}
@@ -23,9 +23,11 @@ __export(streamedQuery_exports, {
23
23
  streamedQuery: () => streamedQuery
24
24
  });
25
25
  module.exports = __toCommonJS(streamedQuery_exports);
26
+ var import_utils = require("./utils.cjs");
26
27
  function streamedQuery({
27
28
  queryFn,
28
- refetchMode = "reset"
29
+ refetchMode = "reset",
30
+ maxChunks
29
31
  }) {
30
32
  return async (context) => {
31
33
  const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
@@ -38,7 +40,7 @@ function streamedQuery({
38
40
  fetchStatus: "fetching"
39
41
  });
40
42
  }
41
- const result = [];
43
+ let result = [];
42
44
  const stream = await queryFn(context);
43
45
  for await (const chunk of stream) {
44
46
  if (context.signal.aborted) {
@@ -48,11 +50,11 @@ function streamedQuery({
48
50
  context.client.setQueryData(
49
51
  context.queryKey,
50
52
  (prev = []) => {
51
- return prev.concat([chunk]);
53
+ return (0, import_utils.addToEnd)(prev, chunk, maxChunks);
52
54
  }
53
55
  );
54
56
  }
55
- result.push(chunk);
57
+ result = (0, import_utils.addToEnd)(result, chunk, maxChunks);
56
58
  }
57
59
  if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
58
60
  context.client.setQueryData(context.queryKey, result);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write the data to the cache at the end of the stream.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n const result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return prev.concat([chunk])\n },\n )\n }\n result.push(chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAaO,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAKkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,UAAM,SAA8B,CAAC;AACrC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,KAAK,OAAO,CAAC,KAAK,CAAC;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param maxChunks - The maximum number of chunks to keep in the cache.\n * Defaults to `undefined`, meaning all chunks will be kept.\n * If `undefined` or `0`, the number of chunks is unlimited.\n * If the number of chunks exceeds this number, the oldest chunk will be removed.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n maxChunks,\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n maxChunks?: number\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return addToEnd(prev, chunk, maxChunks)\n },\n )\n }\n result = addToEnd(result, chunk, maxChunks)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AAkBlB,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AACF,GAMkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAA8B,CAAC;AACnC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,uBAAO,uBAAS,MAAM,OAAO,SAAS;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AACA,mBAAS,uBAAS,QAAQ,OAAO,SAAS;AAAA,IAC5C;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
@@ -11,11 +11,16 @@ import './subscribable.cjs';
11
11
  * @param refetchMode - Defines how re-fetches are handled.
12
12
  * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
13
13
  * Set to `'append'` to append new data to the existing data.
14
- * Set to `'replace'` to write the data to the cache at the end of the stream.
14
+ * Set to `'replace'` to write all data to the cache once the stream ends.
15
+ * @param maxChunks - The maximum number of chunks to keep in the cache.
16
+ * Defaults to `undefined`, meaning all chunks will be kept.
17
+ * If `undefined` or `0`, the number of chunks is unlimited.
18
+ * If the number of chunks exceeds this number, the oldest chunk will be removed.
15
19
  */
16
- declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, }: {
20
+ declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, maxChunks, }: {
17
21
  queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
18
22
  refetchMode?: 'append' | 'reset' | 'replace';
23
+ maxChunks?: number;
19
24
  }): QueryFunction<Array<TQueryFnData>, TQueryKey>;
20
25
 
21
26
  export { streamedQuery };
@@ -11,11 +11,16 @@ import './subscribable.js';
11
11
  * @param refetchMode - Defines how re-fetches are handled.
12
12
  * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
13
13
  * Set to `'append'` to append new data to the existing data.
14
- * Set to `'replace'` to write the data to the cache at the end of the stream.
14
+ * Set to `'replace'` to write all data to the cache once the stream ends.
15
+ * @param maxChunks - The maximum number of chunks to keep in the cache.
16
+ * Defaults to `undefined`, meaning all chunks will be kept.
17
+ * If `undefined` or `0`, the number of chunks is unlimited.
18
+ * If the number of chunks exceeds this number, the oldest chunk will be removed.
15
19
  */
16
- declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, }: {
20
+ declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, maxChunks, }: {
17
21
  queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
18
22
  refetchMode?: 'append' | 'reset' | 'replace';
23
+ maxChunks?: number;
19
24
  }): QueryFunction<Array<TQueryFnData>, TQueryKey>;
20
25
 
21
26
  export { streamedQuery };
@@ -1,9 +1,11 @@
1
1
  import "./chunk-PXG64RU4.js";
2
2
 
3
3
  // src/streamedQuery.ts
4
+ import { addToEnd } from "./utils.js";
4
5
  function streamedQuery({
5
6
  queryFn,
6
- refetchMode = "reset"
7
+ refetchMode = "reset",
8
+ maxChunks
7
9
  }) {
8
10
  return async (context) => {
9
11
  const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
@@ -16,7 +18,7 @@ function streamedQuery({
16
18
  fetchStatus: "fetching"
17
19
  });
18
20
  }
19
- const result = [];
21
+ let result = [];
20
22
  const stream = await queryFn(context);
21
23
  for await (const chunk of stream) {
22
24
  if (context.signal.aborted) {
@@ -26,11 +28,11 @@ function streamedQuery({
26
28
  context.client.setQueryData(
27
29
  context.queryKey,
28
30
  (prev = []) => {
29
- return prev.concat([chunk]);
31
+ return addToEnd(prev, chunk, maxChunks);
30
32
  }
31
33
  );
32
34
  }
33
- result.push(chunk);
35
+ result = addToEnd(result, chunk, maxChunks);
34
36
  }
35
37
  if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
36
38
  context.client.setQueryData(context.queryKey, result);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write the data to the cache at the end of the stream.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n const result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return prev.concat([chunk])\n },\n )\n }\n result.push(chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;AAaO,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAKkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,UAAM,SAA8B,CAAC;AACrC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,KAAK,OAAO,CAAC,KAAK,CAAC;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param maxChunks - The maximum number of chunks to keep in the cache.\n * Defaults to `undefined`, meaning all chunks will be kept.\n * If `undefined` or `0`, the number of chunks is unlimited.\n * If the number of chunks exceeds this number, the oldest chunk will be removed.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n maxChunks,\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n maxChunks?: number\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return addToEnd(prev, chunk, maxChunks)\n },\n )\n }\n result = addToEnd(result, chunk, maxChunks)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;AAAA,SAAS,gBAAgB;AAkBlB,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AACF,GAMkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAA8B,CAAC;AACnC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,SAAS,MAAM,OAAO,SAAS;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,QAAQ,OAAO,SAAS;AAAA,IAC5C;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
@@ -114,6 +114,7 @@ function hydrate(client, dehydratedState, options) {
114
114
  const data = rawData === void 0 ? rawData : deserializeData(rawData);
115
115
  let query = queryCache.get(queryHash);
116
116
  const existingQueryIsPending = query?.state.status === "pending";
117
+ const existingQueryIsFetching = query?.state.fetchStatus === "fetching";
117
118
  if (query) {
118
119
  const hasNewerSyncData = syncData && // We only need this undefined check to handle older dehydration
119
120
  // payloads that might not have dehydratedAt
@@ -145,7 +146,7 @@ function hydrate(client, dehydratedState, options) {
145
146
  }
146
147
  );
147
148
  }
148
- if (promise && !existingQueryIsPending && // Only hydrate if dehydration is newer than any existing data,
149
+ if (promise && !existingQueryIsPending && !existingQueryIsFetching && // Only hydrate if dehydration is newer than any existing data,
149
150
  // this is always true for new queries
150
151
  (dehydratedAt === void 0 || dehydratedAt > query.state.dataUpdatedAt)) {
151
152
  void query.fetch(void 0, {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+B;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AACjB,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,SAAS,MAAM,SAAS,KAAK,aAAa,EAAE,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C,CAAC;AAAA,IACH;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AACjB,QAAM,iBACJ,QAAQ,2BACR,OAAO,kBAAkB,EAAE,WAAW,2BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,wBACR,OAAO,kBAAkB,EAAE,WAAW,wBACtC;AAEF,QAAM,qBACJ,QAAQ,sBACR,OAAO,kBAAkB,EAAE,WAAW,sBACtC;AAEF,QAAM,gBACJ,QAAQ,iBACR,OAAO,kBAAkB,EAAE,WAAW,iBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AACN,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,kBACJ,SAAS,gBAAgB,mBACzB,OAAO,kBAAkB,EAAE,SAAS,mBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AACnD,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,QACvC,GAAG,SAAS,gBAAgB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAC/D,YAAM,WAAW,cAAU,gCAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,UAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,yBAAyB,OAAO,MAAM,WAAW;AAGvD,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,YACvC,GAAG,SAAS,gBAAgB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n const existingQueryIsFetching = query?.state.fetchStatus === 'fetching'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n !existingQueryIsFetching &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+B;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AACjB,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,SAAS,MAAM,SAAS,KAAK,aAAa,EAAE,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C,CAAC;AAAA,IACH;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AACjB,QAAM,iBACJ,QAAQ,2BACR,OAAO,kBAAkB,EAAE,WAAW,2BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,wBACR,OAAO,kBAAkB,EAAE,WAAW,wBACtC;AAEF,QAAM,qBACJ,QAAQ,sBACR,OAAO,kBAAkB,EAAE,WAAW,sBACtC;AAEF,QAAM,gBACJ,QAAQ,iBACR,OAAO,kBAAkB,EAAE,WAAW,iBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AACN,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,kBACJ,SAAS,gBAAgB,mBACzB,OAAO,kBAAkB,EAAE,SAAS,mBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AACnD,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,QACvC,GAAG,SAAS,gBAAgB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAC/D,YAAM,WAAW,cAAU,gCAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,UAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,yBAAyB,OAAO,MAAM,WAAW;AACvD,YAAM,0BAA0B,OAAO,MAAM,gBAAgB;AAG7D,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,YACvC,GAAG,SAAS,gBAAgB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC,0BACD,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -87,6 +87,7 @@ function hydrate(client, dehydratedState, options) {
87
87
  const data = rawData === void 0 ? rawData : deserializeData(rawData);
88
88
  let query = queryCache.get(queryHash);
89
89
  const existingQueryIsPending = query?.state.status === "pending";
90
+ const existingQueryIsFetching = query?.state.fetchStatus === "fetching";
90
91
  if (query) {
91
92
  const hasNewerSyncData = syncData && // We only need this undefined check to handle older dehydration
92
93
  // payloads that might not have dehydratedAt
@@ -118,7 +119,7 @@ function hydrate(client, dehydratedState, options) {
118
119
  }
119
120
  );
120
121
  }
121
- if (promise && !existingQueryIsPending && // Only hydrate if dehydration is newer than any existing data,
122
+ if (promise && !existingQueryIsPending && !existingQueryIsFetching && // Only hydrate if dehydration is newer than any existing data,
122
123
  // this is always true for new queries
123
124
  (dehydratedAt === void 0 || dehydratedAt > query.state.dataUpdatedAt)) {
124
125
  void query.fetch(void 0, {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";AAAA,SAAS,sBAAsB;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AACjB,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,SAAS,MAAM,SAAS,KAAK,aAAa,EAAE,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C,CAAC;AAAA,IACH;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AACjB,QAAM,iBACJ,QAAQ,2BACR,OAAO,kBAAkB,EAAE,WAAW,2BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,wBACR,OAAO,kBAAkB,EAAE,WAAW,wBACtC;AAEF,QAAM,qBACJ,QAAQ,sBACR,OAAO,kBAAkB,EAAE,WAAW,sBACtC;AAEF,QAAM,gBACJ,QAAQ,iBACR,OAAO,kBAAkB,EAAE,WAAW,iBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AACN,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,kBACJ,SAAS,gBAAgB,mBACzB,OAAO,kBAAkB,EAAE,SAAS,mBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AACnD,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,QACvC,GAAG,SAAS,gBAAgB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAC/D,YAAM,WAAW,UAAU,eAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,UAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,yBAAyB,OAAO,MAAM,WAAW;AAGvD,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,YACvC,GAAG,SAAS,gBAAgB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/hydration.ts"],"sourcesContent":["import { tryResolveSync } from './thenable'\nimport type {\n DefaultError,\n MutationKey,\n MutationMeta,\n MutationOptions,\n MutationScope,\n QueryKey,\n QueryMeta,\n QueryOptions,\n} from './types'\nimport type { QueryClient } from './queryClient'\nimport type { Query, QueryState } from './query'\nimport type { Mutation, MutationState } from './mutation'\n\n// TYPES\ntype TransformerFn = (data: any) => any\nfunction defaultTransformerFn(data: any): any {\n return data\n}\n\nexport interface DehydrateOptions {\n serializeData?: TransformerFn\n shouldDehydrateMutation?: (mutation: Mutation) => boolean\n shouldDehydrateQuery?: (query: Query) => boolean\n shouldRedactErrors?: (error: unknown) => boolean\n}\n\nexport interface HydrateOptions {\n defaultOptions?: {\n deserializeData?: TransformerFn\n queries?: QueryOptions\n mutations?: MutationOptions<unknown, DefaultError, unknown, unknown>\n }\n}\n\ninterface DehydratedMutation {\n mutationKey?: MutationKey\n state: MutationState\n meta?: MutationMeta\n scope?: MutationScope\n}\n\ninterface DehydratedQuery {\n queryHash: string\n queryKey: QueryKey\n state: QueryState\n promise?: Promise<unknown>\n meta?: QueryMeta\n // This is only optional because older versions of Query might have dehydrated\n // without it which we need to handle for backwards compatibility.\n // This should be changed to required in the future.\n dehydratedAt?: number\n}\n\nexport interface DehydratedState {\n mutations: Array<DehydratedMutation>\n queries: Array<DehydratedQuery>\n}\n\n// FUNCTIONS\n\nfunction dehydrateMutation(mutation: Mutation): DehydratedMutation {\n return {\n mutationKey: mutation.options.mutationKey,\n state: mutation.state,\n ...(mutation.options.scope && { scope: mutation.options.scope }),\n ...(mutation.meta && { meta: mutation.meta }),\n }\n}\n\n// Most config is not dehydrated but instead meant to configure again when\n// consuming the de/rehydrated data, typically with useQuery on the client.\n// Sometimes it might make sense to prefetch data on the server and include\n// in the html-payload, but not consume it on the initial render.\nfunction dehydrateQuery(\n query: Query,\n serializeData: TransformerFn,\n shouldRedactErrors: (error: unknown) => boolean,\n): DehydratedQuery {\n return {\n dehydratedAt: Date.now(),\n state: {\n ...query.state,\n ...(query.state.data !== undefined && {\n data: serializeData(query.state.data),\n }),\n },\n queryKey: query.queryKey,\n queryHash: query.queryHash,\n ...(query.state.status === 'pending' && {\n promise: query.promise?.then(serializeData).catch((error) => {\n if (!shouldRedactErrors(error)) {\n // Reject original error if it should not be redacted\n return Promise.reject(error)\n }\n // If not in production, log original error before rejecting redacted error\n if (process.env.NODE_ENV !== 'production') {\n console.error(\n `A query that was dehydrated as pending ended up rejecting. [${query.queryHash}]: ${error}; The error will be redacted in production builds`,\n )\n }\n return Promise.reject(new Error('redacted'))\n }),\n }),\n ...(query.meta && { meta: query.meta }),\n }\n}\n\nexport function defaultShouldDehydrateMutation(mutation: Mutation) {\n return mutation.state.isPaused\n}\n\nexport function defaultShouldDehydrateQuery(query: Query) {\n return query.state.status === 'success'\n}\n\nfunction defaultShouldRedactErrors(_: unknown) {\n return true\n}\n\nexport function dehydrate(\n client: QueryClient,\n options: DehydrateOptions = {},\n): DehydratedState {\n const filterMutation =\n options.shouldDehydrateMutation ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateMutation ??\n defaultShouldDehydrateMutation\n\n const mutations = client\n .getMutationCache()\n .getAll()\n .flatMap((mutation) =>\n filterMutation(mutation) ? [dehydrateMutation(mutation)] : [],\n )\n\n const filterQuery =\n options.shouldDehydrateQuery ??\n client.getDefaultOptions().dehydrate?.shouldDehydrateQuery ??\n defaultShouldDehydrateQuery\n\n const shouldRedactErrors =\n options.shouldRedactErrors ??\n client.getDefaultOptions().dehydrate?.shouldRedactErrors ??\n defaultShouldRedactErrors\n\n const serializeData =\n options.serializeData ??\n client.getDefaultOptions().dehydrate?.serializeData ??\n defaultTransformerFn\n\n const queries = client\n .getQueryCache()\n .getAll()\n .flatMap((query) =>\n filterQuery(query)\n ? [dehydrateQuery(query, serializeData, shouldRedactErrors)]\n : [],\n )\n\n return { mutations, queries }\n}\n\nexport function hydrate(\n client: QueryClient,\n dehydratedState: unknown,\n options?: HydrateOptions,\n): void {\n if (typeof dehydratedState !== 'object' || dehydratedState === null) {\n return\n }\n\n const mutationCache = client.getMutationCache()\n const queryCache = client.getQueryCache()\n const deserializeData =\n options?.defaultOptions?.deserializeData ??\n client.getDefaultOptions().hydrate?.deserializeData ??\n defaultTransformerFn\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const mutations = (dehydratedState as DehydratedState).mutations || []\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const queries = (dehydratedState as DehydratedState).queries || []\n\n mutations.forEach(({ state, ...mutationOptions }) => {\n mutationCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.mutations,\n ...options?.defaultOptions?.mutations,\n ...mutationOptions,\n },\n state,\n )\n })\n\n queries.forEach(\n ({ queryKey, state, queryHash, meta, promise, dehydratedAt }) => {\n const syncData = promise ? tryResolveSync(promise) : undefined\n const rawData = state.data === undefined ? syncData?.data : state.data\n const data = rawData === undefined ? rawData : deserializeData(rawData)\n\n let query = queryCache.get(queryHash)\n const existingQueryIsPending = query?.state.status === 'pending'\n const existingQueryIsFetching = query?.state.fetchStatus === 'fetching'\n\n // Do not hydrate if an existing query exists with newer data\n if (query) {\n const hasNewerSyncData =\n syncData &&\n // We only need this undefined check to handle older dehydration\n // payloads that might not have dehydratedAt\n dehydratedAt !== undefined &&\n dehydratedAt > query.state.dataUpdatedAt\n if (\n state.dataUpdatedAt > query.state.dataUpdatedAt ||\n hasNewerSyncData\n ) {\n // omit fetchStatus from dehydrated state\n // so that query stays in its current fetchStatus\n const { fetchStatus: _ignored, ...serializedState } = state\n query.setState({\n ...serializedState,\n data,\n })\n }\n } else {\n // Restore query\n query = queryCache.build(\n client,\n {\n ...client.getDefaultOptions().hydrate?.queries,\n ...options?.defaultOptions?.queries,\n queryKey,\n queryHash,\n meta,\n },\n // Reset fetch status to idle to avoid\n // query being stuck in fetching state upon hydration\n {\n ...state,\n data,\n fetchStatus: 'idle',\n status: data !== undefined ? 'success' : state.status,\n },\n )\n }\n\n if (\n promise &&\n !existingQueryIsPending &&\n !existingQueryIsFetching &&\n // Only hydrate if dehydration is newer than any existing data,\n // this is always true for new queries\n (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)\n ) {\n // This doesn't actually fetch - it just creates a retryer\n // which will re-use the passed `initialPromise`\n // Note that we need to call these even when data was synchronously\n // available, as we still need to set up the retryer\n void query.fetch(undefined, {\n // RSC transformed promises are not thenable\n initialPromise: Promise.resolve(promise).then(deserializeData),\n })\n }\n },\n )\n}\n"],"mappings":";AAAA,SAAS,sBAAsB;AAiB/B,SAAS,qBAAqB,MAAgB;AAC5C,SAAO;AACT;AA2CA,SAAS,kBAAkB,UAAwC;AACjE,SAAO;AAAA,IACL,aAAa,SAAS,QAAQ;AAAA,IAC9B,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,QAAQ,SAAS,EAAE,OAAO,SAAS,QAAQ,MAAM;AAAA,IAC9D,GAAI,SAAS,QAAQ,EAAE,MAAM,SAAS,KAAK;AAAA,EAC7C;AACF;AAMA,SAAS,eACP,OACA,eACA,oBACiB;AACjB,SAAO;AAAA,IACL,cAAc,KAAK,IAAI;AAAA,IACvB,OAAO;AAAA,MACL,GAAG,MAAM;AAAA,MACT,GAAI,MAAM,MAAM,SAAS,UAAa;AAAA,QACpC,MAAM,cAAc,MAAM,MAAM,IAAI;AAAA,MACtC;AAAA,IACF;AAAA,IACA,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,GAAI,MAAM,MAAM,WAAW,aAAa;AAAA,MACtC,SAAS,MAAM,SAAS,KAAK,aAAa,EAAE,MAAM,CAAC,UAAU;AAC3D,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAE9B,iBAAO,QAAQ,OAAO,KAAK;AAAA,QAC7B;AAEA,YAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAQ;AAAA,YACN,+DAA+D,MAAM,SAAS,MAAM,KAAK;AAAA,UAC3F;AAAA,QACF;AACA,eAAO,QAAQ,OAAO,IAAI,MAAM,UAAU,CAAC;AAAA,MAC7C,CAAC;AAAA,IACH;AAAA,IACA,GAAI,MAAM,QAAQ,EAAE,MAAM,MAAM,KAAK;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,UAAoB;AACjE,SAAO,SAAS,MAAM;AACxB;AAEO,SAAS,4BAA4B,OAAc;AACxD,SAAO,MAAM,MAAM,WAAW;AAChC;AAEA,SAAS,0BAA0B,GAAY;AAC7C,SAAO;AACT;AAEO,SAAS,UACd,QACA,UAA4B,CAAC,GACZ;AACjB,QAAM,iBACJ,QAAQ,2BACR,OAAO,kBAAkB,EAAE,WAAW,2BACtC;AAEF,QAAM,YAAY,OACf,iBAAiB,EACjB,OAAO,EACP;AAAA,IAAQ,CAAC,aACR,eAAe,QAAQ,IAAI,CAAC,kBAAkB,QAAQ,CAAC,IAAI,CAAC;AAAA,EAC9D;AAEF,QAAM,cACJ,QAAQ,wBACR,OAAO,kBAAkB,EAAE,WAAW,wBACtC;AAEF,QAAM,qBACJ,QAAQ,sBACR,OAAO,kBAAkB,EAAE,WAAW,sBACtC;AAEF,QAAM,gBACJ,QAAQ,iBACR,OAAO,kBAAkB,EAAE,WAAW,iBACtC;AAEF,QAAM,UAAU,OACb,cAAc,EACd,OAAO,EACP;AAAA,IAAQ,CAAC,UACR,YAAY,KAAK,IACb,CAAC,eAAe,OAAO,eAAe,kBAAkB,CAAC,IACzD,CAAC;AAAA,EACP;AAEF,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAEO,SAAS,QACd,QACA,iBACA,SACM;AACN,MAAI,OAAO,oBAAoB,YAAY,oBAAoB,MAAM;AACnE;AAAA,EACF;AAEA,QAAM,gBAAgB,OAAO,iBAAiB;AAC9C,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,kBACJ,SAAS,gBAAgB,mBACzB,OAAO,kBAAkB,EAAE,SAAS,mBACpC;AAGF,QAAM,YAAa,gBAAoC,aAAa,CAAC;AAErE,QAAM,UAAW,gBAAoC,WAAW,CAAC;AAEjE,YAAU,QAAQ,CAAC,EAAE,OAAO,GAAG,gBAAgB,MAAM;AACnD,kBAAc;AAAA,MACZ;AAAA,MACA;AAAA,QACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,QACvC,GAAG,SAAS,gBAAgB;AAAA,QAC5B,GAAG;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ;AAAA,IACN,CAAC,EAAE,UAAU,OAAO,WAAW,MAAM,SAAS,aAAa,MAAM;AAC/D,YAAM,WAAW,UAAU,eAAe,OAAO,IAAI;AACrD,YAAM,UAAU,MAAM,SAAS,SAAY,UAAU,OAAO,MAAM;AAClE,YAAM,OAAO,YAAY,SAAY,UAAU,gBAAgB,OAAO;AAEtE,UAAI,QAAQ,WAAW,IAAI,SAAS;AACpC,YAAM,yBAAyB,OAAO,MAAM,WAAW;AACvD,YAAM,0BAA0B,OAAO,MAAM,gBAAgB;AAG7D,UAAI,OAAO;AACT,cAAM,mBACJ;AAAA;AAAA,QAGA,iBAAiB,UACjB,eAAe,MAAM,MAAM;AAC7B,YACE,MAAM,gBAAgB,MAAM,MAAM,iBAClC,kBACA;AAGA,gBAAM,EAAE,aAAa,UAAU,GAAG,gBAAgB,IAAI;AACtD,gBAAM,SAAS;AAAA,YACb,GAAG;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AAEL,gBAAQ,WAAW;AAAA,UACjB;AAAA,UACA;AAAA,YACE,GAAG,OAAO,kBAAkB,EAAE,SAAS;AAAA,YACvC,GAAG,SAAS,gBAAgB;AAAA,YAC5B;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA;AAAA;AAAA,UAGA;AAAA,YACE,GAAG;AAAA,YACH;AAAA,YACA,aAAa;AAAA,YACb,QAAQ,SAAS,SAAY,YAAY,MAAM;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAEA,UACE,WACA,CAAC,0BACD,CAAC;AAAA;AAAA,OAGA,iBAAiB,UAAa,eAAe,MAAM,MAAM,gBAC1D;AAKA,aAAK,MAAM,MAAM,QAAW;AAAA;AAAA,UAE1B,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,KAAK,eAAe;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -23,9 +23,11 @@ __export(streamedQuery_exports, {
23
23
  streamedQuery: () => streamedQuery
24
24
  });
25
25
  module.exports = __toCommonJS(streamedQuery_exports);
26
+ var import_utils = require("./utils.cjs");
26
27
  function streamedQuery({
27
28
  queryFn,
28
- refetchMode = "reset"
29
+ refetchMode = "reset",
30
+ maxChunks
29
31
  }) {
30
32
  return async (context) => {
31
33
  const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
@@ -38,7 +40,7 @@ function streamedQuery({
38
40
  fetchStatus: "fetching"
39
41
  });
40
42
  }
41
- const result = [];
43
+ let result = [];
42
44
  const stream = await queryFn(context);
43
45
  for await (const chunk of stream) {
44
46
  if (context.signal.aborted) {
@@ -48,11 +50,11 @@ function streamedQuery({
48
50
  context.client.setQueryData(
49
51
  context.queryKey,
50
52
  (prev = []) => {
51
- return prev.concat([chunk]);
53
+ return (0, import_utils.addToEnd)(prev, chunk, maxChunks);
52
54
  }
53
55
  );
54
56
  }
55
- result.push(chunk);
57
+ result = (0, import_utils.addToEnd)(result, chunk, maxChunks);
56
58
  }
57
59
  if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
58
60
  context.client.setQueryData(context.queryKey, result);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write the data to the cache at the end of the stream.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n const result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return prev.concat([chunk])\n },\n )\n }\n result.push(chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAaO,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAKkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,UAAM,SAA8B,CAAC;AACrC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,KAAK,OAAO,CAAC,KAAK,CAAC;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param maxChunks - The maximum number of chunks to keep in the cache.\n * Defaults to `undefined`, meaning all chunks will be kept.\n * If `undefined` or `0`, the number of chunks is unlimited.\n * If the number of chunks exceeds this number, the oldest chunk will be removed.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n maxChunks,\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n maxChunks?: number\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return addToEnd(prev, chunk, maxChunks)\n },\n )\n }\n result = addToEnd(result, chunk, maxChunks)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AAkBlB,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AACF,GAMkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAA8B,CAAC;AACnC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,uBAAO,uBAAS,MAAM,OAAO,SAAS;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AACA,mBAAS,uBAAS,QAAQ,OAAO,SAAS;AAAA,IAC5C;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
@@ -11,11 +11,16 @@ import './subscribable.cjs';
11
11
  * @param refetchMode - Defines how re-fetches are handled.
12
12
  * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
13
13
  * Set to `'append'` to append new data to the existing data.
14
- * Set to `'replace'` to write the data to the cache at the end of the stream.
14
+ * Set to `'replace'` to write all data to the cache once the stream ends.
15
+ * @param maxChunks - The maximum number of chunks to keep in the cache.
16
+ * Defaults to `undefined`, meaning all chunks will be kept.
17
+ * If `undefined` or `0`, the number of chunks is unlimited.
18
+ * If the number of chunks exceeds this number, the oldest chunk will be removed.
15
19
  */
16
- declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, }: {
20
+ declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, maxChunks, }: {
17
21
  queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
18
22
  refetchMode?: 'append' | 'reset' | 'replace';
23
+ maxChunks?: number;
19
24
  }): QueryFunction<Array<TQueryFnData>, TQueryKey>;
20
25
 
21
26
  export { streamedQuery };
@@ -11,11 +11,16 @@ import './subscribable.js';
11
11
  * @param refetchMode - Defines how re-fetches are handled.
12
12
  * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
13
13
  * Set to `'append'` to append new data to the existing data.
14
- * Set to `'replace'` to write the data to the cache at the end of the stream.
14
+ * Set to `'replace'` to write all data to the cache once the stream ends.
15
+ * @param maxChunks - The maximum number of chunks to keep in the cache.
16
+ * Defaults to `undefined`, meaning all chunks will be kept.
17
+ * If `undefined` or `0`, the number of chunks is unlimited.
18
+ * If the number of chunks exceeds this number, the oldest chunk will be removed.
15
19
  */
16
- declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, }: {
20
+ declare function streamedQuery<TQueryFnData = unknown, TQueryKey extends QueryKey = QueryKey>({ queryFn, refetchMode, maxChunks, }: {
17
21
  queryFn: (context: QueryFunctionContext<TQueryKey>) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>;
18
22
  refetchMode?: 'append' | 'reset' | 'replace';
23
+ maxChunks?: number;
19
24
  }): QueryFunction<Array<TQueryFnData>, TQueryKey>;
20
25
 
21
26
  export { streamedQuery };
@@ -1,7 +1,9 @@
1
1
  // src/streamedQuery.ts
2
+ import { addToEnd } from "./utils.js";
2
3
  function streamedQuery({
3
4
  queryFn,
4
- refetchMode = "reset"
5
+ refetchMode = "reset",
6
+ maxChunks
5
7
  }) {
6
8
  return async (context) => {
7
9
  const query = context.client.getQueryCache().find({ queryKey: context.queryKey, exact: true });
@@ -14,7 +16,7 @@ function streamedQuery({
14
16
  fetchStatus: "fetching"
15
17
  });
16
18
  }
17
- const result = [];
19
+ let result = [];
18
20
  const stream = await queryFn(context);
19
21
  for await (const chunk of stream) {
20
22
  if (context.signal.aborted) {
@@ -24,11 +26,11 @@ function streamedQuery({
24
26
  context.client.setQueryData(
25
27
  context.queryKey,
26
28
  (prev = []) => {
27
- return prev.concat([chunk]);
29
+ return addToEnd(prev, chunk, maxChunks);
28
30
  }
29
31
  );
30
32
  }
31
- result.push(chunk);
33
+ result = addToEnd(result, chunk, maxChunks);
32
34
  }
33
35
  if (isRefetch && refetchMode === "replace" && !context.signal.aborted) {
34
36
  context.client.setQueryData(context.queryKey, result);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write the data to the cache at the end of the stream.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n const result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return prev.concat([chunk])\n },\n )\n }\n result.push(chunk)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";AAaO,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAChB,GAKkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,UAAM,SAA8B,CAAC;AACrC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,KAAK,OAAO,CAAC,KAAK,CAAC;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/streamedQuery.ts"],"sourcesContent":["import { addToEnd } from './utils'\nimport type { QueryFunction, QueryFunctionContext, QueryKey } from './types'\n\n/**\n * This is a helper function to create a query function that streams data from an AsyncIterable.\n * Data will be an Array of all the chunks received.\n * The query will be in a 'pending' state until the first chunk of data is received, but will go to 'success' after that.\n * The query will stay in fetchStatus 'fetching' until the stream ends.\n * @param queryFn - The function that returns an AsyncIterable to stream data from.\n * @param refetchMode - Defines how re-fetches are handled.\n * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.\n * Set to `'append'` to append new data to the existing data.\n * Set to `'replace'` to write all data to the cache once the stream ends.\n * @param maxChunks - The maximum number of chunks to keep in the cache.\n * Defaults to `undefined`, meaning all chunks will be kept.\n * If `undefined` or `0`, the number of chunks is unlimited.\n * If the number of chunks exceeds this number, the oldest chunk will be removed.\n */\nexport function streamedQuery<\n TQueryFnData = unknown,\n TQueryKey extends QueryKey = QueryKey,\n>({\n queryFn,\n refetchMode = 'reset',\n maxChunks,\n}: {\n queryFn: (\n context: QueryFunctionContext<TQueryKey>,\n ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>\n refetchMode?: 'append' | 'reset' | 'replace'\n maxChunks?: number\n}): QueryFunction<Array<TQueryFnData>, TQueryKey> {\n return async (context) => {\n const query = context.client\n .getQueryCache()\n .find({ queryKey: context.queryKey, exact: true })\n const isRefetch = !!query && query.state.data !== undefined\n\n if (isRefetch && refetchMode === 'reset') {\n query.setState({\n status: 'pending',\n data: undefined,\n error: null,\n fetchStatus: 'fetching',\n })\n }\n\n let result: Array<TQueryFnData> = []\n const stream = await queryFn(context)\n\n for await (const chunk of stream) {\n if (context.signal.aborted) {\n break\n }\n\n // don't append to the cache directly when replace-refetching\n if (!isRefetch || refetchMode !== 'replace') {\n context.client.setQueryData<Array<TQueryFnData>>(\n context.queryKey,\n (prev = []) => {\n return addToEnd(prev, chunk, maxChunks)\n },\n )\n }\n result = addToEnd(result, chunk, maxChunks)\n }\n\n // finalize result: replace-refetching needs to write to the cache\n if (isRefetch && refetchMode === 'replace' && !context.signal.aborted) {\n context.client.setQueryData<Array<TQueryFnData>>(context.queryKey, result)\n }\n\n return context.client.getQueryData(context.queryKey)!\n }\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AAkBlB,SAAS,cAGd;AAAA,EACA;AAAA,EACA,cAAc;AAAA,EACd;AACF,GAMkD;AAChD,SAAO,OAAO,YAAY;AACxB,UAAM,QAAQ,QAAQ,OACnB,cAAc,EACd,KAAK,EAAE,UAAU,QAAQ,UAAU,OAAO,KAAK,CAAC;AACnD,UAAM,YAAY,CAAC,CAAC,SAAS,MAAM,MAAM,SAAS;AAElD,QAAI,aAAa,gBAAgB,SAAS;AACxC,YAAM,SAAS;AAAA,QACb,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,OAAO;AAAA,QACP,aAAa;AAAA,MACf,CAAC;AAAA,IACH;AAEA,QAAI,SAA8B,CAAC;AACnC,UAAM,SAAS,MAAM,QAAQ,OAAO;AAEpC,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AAGA,UAAI,CAAC,aAAa,gBAAgB,WAAW;AAC3C,gBAAQ,OAAO;AAAA,UACb,QAAQ;AAAA,UACR,CAAC,OAAO,CAAC,MAAM;AACb,mBAAO,SAAS,MAAM,OAAO,SAAS;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,QAAQ,OAAO,SAAS;AAAA,IAC5C;AAGA,QAAI,aAAa,gBAAgB,aAAa,CAAC,QAAQ,OAAO,SAAS;AACrE,cAAQ,OAAO,aAAkC,QAAQ,UAAU,MAAM;AAAA,IAC3E;AAEA,WAAO,QAAQ,OAAO,aAAa,QAAQ,QAAQ;AAAA,EACrD;AACF;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tanstack/query-core",
3
- "version": "5.76.2",
3
+ "version": "5.77.1",
4
4
  "description": "The framework agnostic core that powers TanStack Query",
5
5
  "author": "tannerlinsley",
6
6
  "license": "MIT",
package/src/hydration.ts CHANGED
@@ -203,6 +203,7 @@ export function hydrate(
203
203
 
204
204
  let query = queryCache.get(queryHash)
205
205
  const existingQueryIsPending = query?.state.status === 'pending'
206
+ const existingQueryIsFetching = query?.state.fetchStatus === 'fetching'
206
207
 
207
208
  // Do not hydrate if an existing query exists with newer data
208
209
  if (query) {
@@ -249,6 +250,7 @@ export function hydrate(
249
250
  if (
250
251
  promise &&
251
252
  !existingQueryIsPending &&
253
+ !existingQueryIsFetching &&
252
254
  // Only hydrate if dehydration is newer than any existing data,
253
255
  // this is always true for new queries
254
256
  (dehydratedAt === undefined || dehydratedAt > query.state.dataUpdatedAt)
@@ -1,3 +1,4 @@
1
+ import { addToEnd } from './utils'
1
2
  import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'
2
3
 
3
4
  /**
@@ -9,7 +10,11 @@ import type { QueryFunction, QueryFunctionContext, QueryKey } from './types'
9
10
  * @param refetchMode - Defines how re-fetches are handled.
10
11
  * Defaults to `'reset'`, erases all data and puts the query back into `pending` state.
11
12
  * Set to `'append'` to append new data to the existing data.
12
- * Set to `'replace'` to write the data to the cache at the end of the stream.
13
+ * Set to `'replace'` to write all data to the cache once the stream ends.
14
+ * @param maxChunks - The maximum number of chunks to keep in the cache.
15
+ * Defaults to `undefined`, meaning all chunks will be kept.
16
+ * If `undefined` or `0`, the number of chunks is unlimited.
17
+ * If the number of chunks exceeds this number, the oldest chunk will be removed.
13
18
  */
14
19
  export function streamedQuery<
15
20
  TQueryFnData = unknown,
@@ -17,11 +22,13 @@ export function streamedQuery<
17
22
  >({
18
23
  queryFn,
19
24
  refetchMode = 'reset',
25
+ maxChunks,
20
26
  }: {
21
27
  queryFn: (
22
28
  context: QueryFunctionContext<TQueryKey>,
23
29
  ) => AsyncIterable<TQueryFnData> | Promise<AsyncIterable<TQueryFnData>>
24
30
  refetchMode?: 'append' | 'reset' | 'replace'
31
+ maxChunks?: number
25
32
  }): QueryFunction<Array<TQueryFnData>, TQueryKey> {
26
33
  return async (context) => {
27
34
  const query = context.client
@@ -38,7 +45,7 @@ export function streamedQuery<
38
45
  })
39
46
  }
40
47
 
41
- const result: Array<TQueryFnData> = []
48
+ let result: Array<TQueryFnData> = []
42
49
  const stream = await queryFn(context)
43
50
 
44
51
  for await (const chunk of stream) {
@@ -51,11 +58,11 @@ export function streamedQuery<
51
58
  context.client.setQueryData<Array<TQueryFnData>>(
52
59
  context.queryKey,
53
60
  (prev = []) => {
54
- return prev.concat([chunk])
61
+ return addToEnd(prev, chunk, maxChunks)
55
62
  },
56
63
  )
57
64
  }
58
- result.push(chunk)
65
+ result = addToEnd(result, chunk, maxChunks)
59
66
  }
60
67
 
61
68
  // finalize result: replace-refetching needs to write to the cache