houdini-core 2.0.0-go.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +24 -0
- package/postInstall.js +117 -0
- package/runtime/cache.ts +5 -0
- package/runtime/client.ts +181 -0
- package/runtime/config.ts +79 -0
- package/runtime/generated.ts +1 -0
- package/runtime/imports/config.ts +3 -0
- package/runtime/imports/pluginConfig.ts +5 -0
- package/runtime/index.ts +38 -0
- package/runtime/package.json +1 -0
- package/runtime/plugins/cache.ts +178 -0
- package/runtime/plugins/fetch.ts +337 -0
- package/runtime/plugins/fetchParams.ts +36 -0
- package/runtime/plugins/fragment.ts +80 -0
- package/runtime/plugins/index.ts +9 -0
- package/runtime/plugins/injectedPlugins.ts +9 -0
- package/runtime/plugins/mutation.ts +98 -0
- package/runtime/plugins/optimisticKeys.ts +455 -0
- package/runtime/plugins/query.ts +93 -0
- package/runtime/plugins/subscription.ts +153 -0
- package/runtime/plugins/throwOnError.ts +44 -0
- package/runtime/plugins/utils/documentPlugins.ts +54 -0
- package/runtime/plugins/utils/index.ts +1 -0
- package/runtime/public/cache.ts +121 -0
- package/runtime/public/index.ts +1 -0
- package/runtime/public/list.ts +164 -0
- package/runtime/public/record.ts +113 -0
- package/runtime/public/types.ts +166 -0
- package/runtime/server/index.ts +1 -0
- package/shim.cjs +64 -0
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
import { computeID, getFieldsForType, keyFieldsForType, marshalSelection } from 'houdini/runtime'
|
|
2
|
+
import type { Cache } from 'houdini/runtime/cache'
|
|
3
|
+
import type { ClientPlugin } from 'houdini/runtime/documentStore'
|
|
4
|
+
import type {
|
|
5
|
+
GraphQLObject,
|
|
6
|
+
NestedList,
|
|
7
|
+
GraphQLValue,
|
|
8
|
+
SubscriptionSelection,
|
|
9
|
+
} from 'houdini/runtime/types'
|
|
10
|
+
import { ArtifactKind } from 'houdini/runtime/types'
|
|
11
|
+
|
|
12
|
+
import configFile from '../imports/config'
|
|
13
|
+
|
|
14
|
+
// This plugin is responsible for coordinating requests that have optimistic keys.
|
|
15
|
+
// When a mutation contains optimistically generated keys as inputs, we need to block
|
|
16
|
+
// the request pipeline until we have a true value for the key. This means that we need
|
|
17
|
+
// a way to keep track of the pending keys and then notify other request chains.
|
|
18
|
+
//
|
|
19
|
+
// The major constraint here is that a document could be invoked multiple times, each of which
|
|
20
|
+
// can put the corresponding chain into a pending state. A document can also contain multiple
|
|
21
|
+
// keys in its response so we need to keep track of the query path in our data where we encounter the key.
|
|
22
|
+
//
|
|
23
|
+
// Therefore, we have 2 different mappings we need to track:
|
|
24
|
+
// a mapping from optimistic key to the list of callbacks that need to be notified
|
|
25
|
+
// a mapping of invocation id and path to the generated optimistic key
|
|
26
|
+
// NOTE: we need 2 different indexes so even though ^ could be merged into a single map.
|
|
27
|
+
// since we need to know if an input is a generated key and if a path is a generated key
|
|
28
|
+
|
|
29
|
+
export type CallbackMap = Record<string | number, Array<(newID: any) => void>>
|
|
30
|
+
export type KeyMap = Record<number, Record<string, keyof CallbackMap>>
|
|
31
|
+
type OptimisticObjectIDMap = Record<number, Record<string, string>>
|
|
32
|
+
|
|
33
|
+
const keys: KeyMap = {}
|
|
34
|
+
const callbacks: CallbackMap = {}
|
|
35
|
+
const objectIDMap: OptimisticObjectIDMap = {}
|
|
36
|
+
|
|
37
|
+
export const optimisticKeys =
|
|
38
|
+
(
|
|
39
|
+
cache: Cache,
|
|
40
|
+
callbackCache: CallbackMap = callbacks,
|
|
41
|
+
keyCache: KeyMap = keys,
|
|
42
|
+
objectIDs: OptimisticObjectIDMap = objectIDMap,
|
|
43
|
+
invocationCounter: number = 1
|
|
44
|
+
): ClientPlugin =>
|
|
45
|
+
() => {
|
|
46
|
+
return {
|
|
47
|
+
async start(ctx, { next }) {
|
|
48
|
+
// the optimistic response gets passed in the context's stuff bag
|
|
49
|
+
const optimisticResponse = ctx.stuff.optimisticResponse
|
|
50
|
+
|
|
51
|
+
const newCtx = { ...ctx }
|
|
52
|
+
|
|
53
|
+
// if the request has an optimistic response with optimistic keys embedded inside, we need to
|
|
54
|
+
// add them to the response and register the values in our global state (only on the client)
|
|
55
|
+
if (
|
|
56
|
+
optimisticResponse &&
|
|
57
|
+
ctx.artifact.kind === ArtifactKind.Mutation &&
|
|
58
|
+
ctx.artifact.optimisticKeys
|
|
59
|
+
) {
|
|
60
|
+
newCtx.stuff.mutationID = invocationCounter++
|
|
61
|
+
|
|
62
|
+
// add the keys to the response
|
|
63
|
+
addKeysToResponse({
|
|
64
|
+
selection: ctx.artifact.selection,
|
|
65
|
+
response: optimisticResponse,
|
|
66
|
+
callbackStore: callbackCache,
|
|
67
|
+
keyStore: keyCache,
|
|
68
|
+
objectIDs,
|
|
69
|
+
mutationID: newCtx.stuff.mutationID,
|
|
70
|
+
})
|
|
71
|
+
|
|
72
|
+
// use the updated optimistic response for the rest of the chain
|
|
73
|
+
newCtx.stuff.optimisticResponse = optimisticResponse
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// make sure we write to the correct layer in the cache
|
|
77
|
+
next(newCtx)
|
|
78
|
+
},
|
|
79
|
+
// if a request has variables that contain an optimistic key we need to block the
|
|
80
|
+
// request before it is sent to the server
|
|
81
|
+
beforeNetwork(ctx, { next }) {
|
|
82
|
+
// if there are no optimistic keys, just move onto the next step
|
|
83
|
+
if (Object.keys(keyCache).length === 0) {
|
|
84
|
+
return next(ctx)
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// look through the outgoing variables for ones that we have flagged as optimistic
|
|
88
|
+
const pendingVariables: Record<string, string | null> = extractInputKeys(
|
|
89
|
+
ctx.variables ?? {},
|
|
90
|
+
callbackCache
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
// if there aren't any pending variables in the query, just move along
|
|
94
|
+
if (Object.keys(pendingVariables).length === 0) {
|
|
95
|
+
return next(ctx)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// we need to register a callback with each pending variable
|
|
99
|
+
for (const key of Object.keys(pendingVariables)) {
|
|
100
|
+
callbackCache[key].push((newID) => {
|
|
101
|
+
pendingVariables[key] = newID
|
|
102
|
+
|
|
103
|
+
// if that was the last variable that we needed to wait for, we can move on
|
|
104
|
+
if (Object.values(pendingVariables).every((value) => value !== null)) {
|
|
105
|
+
// add the optimistic keys back into the input variables
|
|
106
|
+
next({
|
|
107
|
+
...ctx,
|
|
108
|
+
variables: replaceKeyWithVariable(
|
|
109
|
+
{ ...ctx.variables },
|
|
110
|
+
pendingVariables as Record<string, string>
|
|
111
|
+
),
|
|
112
|
+
})
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
}
|
|
116
|
+
},
|
|
117
|
+
afterNetwork(ctx, { value, resolve }) {
|
|
118
|
+
// if the artifact contained optimistic keys we need to extract them from the response
|
|
119
|
+
// and notify any dependent chains
|
|
120
|
+
if (
|
|
121
|
+
ctx.artifact.kind === ArtifactKind.Mutation &&
|
|
122
|
+
ctx.artifact.optimisticKeys &&
|
|
123
|
+
typeof ctx.stuff.mutationID !== 'undefined'
|
|
124
|
+
) {
|
|
125
|
+
// look for any values in the response that correspond to values in the keyCache
|
|
126
|
+
extractResponseKeys(
|
|
127
|
+
cache,
|
|
128
|
+
value.data ?? {},
|
|
129
|
+
ctx.artifact.selection,
|
|
130
|
+
keyCache,
|
|
131
|
+
ctx.stuff.mutationID,
|
|
132
|
+
{
|
|
133
|
+
onNewKey: (optimisticValue, realValue) => {
|
|
134
|
+
callbackCache[optimisticValue].forEach((cb) => {
|
|
135
|
+
cb(realValue)
|
|
136
|
+
})
|
|
137
|
+
|
|
138
|
+
// clean up the caches since we're done with this key
|
|
139
|
+
delete callbackCache[optimisticValue]
|
|
140
|
+
},
|
|
141
|
+
onIDChange: (optimisticValue, realValue) => {
|
|
142
|
+
cache.registerKeyMap(optimisticValue, realValue)
|
|
143
|
+
},
|
|
144
|
+
}
|
|
145
|
+
)
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// we're done
|
|
149
|
+
resolve(ctx)
|
|
150
|
+
},
|
|
151
|
+
|
|
152
|
+
// when the mutation ends, we no longer have any dependents that we have to track
|
|
153
|
+
end(ctx, { resolve }) {
|
|
154
|
+
if (typeof ctx.stuff.mutationID !== 'undefined') {
|
|
155
|
+
delete keyCache[ctx.stuff.mutationID]
|
|
156
|
+
delete objectIDs[ctx.stuff.mutationID]
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
resolve(ctx)
|
|
160
|
+
},
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function addKeysToResponse(args: {
|
|
165
|
+
selection: SubscriptionSelection
|
|
166
|
+
response: GraphQLObject
|
|
167
|
+
callbackStore: CallbackMap
|
|
168
|
+
keyStore: KeyMap
|
|
169
|
+
type?: string
|
|
170
|
+
path?: string
|
|
171
|
+
mutationID: number
|
|
172
|
+
objectIDs: OptimisticObjectIDMap
|
|
173
|
+
}): any {
|
|
174
|
+
// we need to walk the selection and inject the optimistic keys into the response
|
|
175
|
+
// collect all of the fields that we need to write
|
|
176
|
+
let targetSelection = getFieldsForType(
|
|
177
|
+
args.selection,
|
|
178
|
+
args.response['__typename'] as string | undefined,
|
|
179
|
+
false
|
|
180
|
+
)
|
|
181
|
+
const newKeys = []
|
|
182
|
+
|
|
183
|
+
// data is an object with fields that we need to write to the store
|
|
184
|
+
for (const [field, { type, selection: fieldSelection, optimisticKey }] of Object.entries(
|
|
185
|
+
targetSelection
|
|
186
|
+
)) {
|
|
187
|
+
const value = args.response[field]
|
|
188
|
+
const pathSoFar = `${args.path ?? ''}.${field}`
|
|
189
|
+
|
|
190
|
+
// if this field is marked as an optimistic key, add it to the obj
|
|
191
|
+
if (optimisticKey) {
|
|
192
|
+
// figure out the value we should use for the optimistic key
|
|
193
|
+
let keyValue
|
|
194
|
+
|
|
195
|
+
// if there is a value already in the response then we should use that
|
|
196
|
+
if (value) {
|
|
197
|
+
// marshal the value into something we can use for an id
|
|
198
|
+
const { marshaled } = marshalSelection({
|
|
199
|
+
data: { marshaled: value },
|
|
200
|
+
selection: {
|
|
201
|
+
fields: {
|
|
202
|
+
value: {
|
|
203
|
+
type,
|
|
204
|
+
keyRaw: 'value',
|
|
205
|
+
},
|
|
206
|
+
},
|
|
207
|
+
},
|
|
208
|
+
config: configFile,
|
|
209
|
+
}) as { marshaled: string }
|
|
210
|
+
|
|
211
|
+
// use the marshaled value as the key
|
|
212
|
+
keyValue = marshaled
|
|
213
|
+
}
|
|
214
|
+
// if the field isn't present in the optimistic payload then we need to come up
|
|
215
|
+
// with our own value for the key based on the type
|
|
216
|
+
else {
|
|
217
|
+
keyValue = generateKey(type)
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// we need to populate the various stores that we use to track the keys
|
|
221
|
+
newKeys.push(keyValue)
|
|
222
|
+
args.response[field] = keyValue
|
|
223
|
+
args.callbackStore[keyValue] = []
|
|
224
|
+
args.keyStore[args.mutationID] = {
|
|
225
|
+
[pathSoFar]: keyValue,
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// keep walking down the selection
|
|
230
|
+
if (fieldSelection) {
|
|
231
|
+
if (Array.isArray(value)) {
|
|
232
|
+
for (const [index, item] of flattenList(value).entries()) {
|
|
233
|
+
if (item && typeof item === 'object' && !Array.isArray(item)) {
|
|
234
|
+
addKeysToResponse({
|
|
235
|
+
...args,
|
|
236
|
+
selection: fieldSelection,
|
|
237
|
+
response: item as GraphQLObject,
|
|
238
|
+
type,
|
|
239
|
+
path: `${pathSoFar}[${index}]`,
|
|
240
|
+
})
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
} else if (value && typeof value == 'object') {
|
|
244
|
+
addKeysToResponse({
|
|
245
|
+
...args,
|
|
246
|
+
selection: fieldSelection,
|
|
247
|
+
response: value as GraphQLObject,
|
|
248
|
+
type,
|
|
249
|
+
path: pathSoFar,
|
|
250
|
+
})
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// if there were optimistic keys added to the response, we need to
|
|
256
|
+
// track the ID holding the new value
|
|
257
|
+
if (newKeys.length > 0) {
|
|
258
|
+
const objID = `${args.type}:${computeID(configFile, args.type ?? '', args.response)}`
|
|
259
|
+
for (const key of newKeys) {
|
|
260
|
+
args.objectIDs[args.mutationID] = {
|
|
261
|
+
...args.objectIDs[args.mutationID],
|
|
262
|
+
[key]: objID,
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
return args.response
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
function extractInputKeys(
|
|
271
|
+
obj: GraphQLObject,
|
|
272
|
+
store: CallbackMap,
|
|
273
|
+
found: Record<string, string | null> = {}
|
|
274
|
+
) {
|
|
275
|
+
for (const value of Object.values(obj)) {
|
|
276
|
+
if (typeof value === 'string' && store[value]) {
|
|
277
|
+
found[value] = null
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
if (Array.isArray(value)) {
|
|
281
|
+
for (const item of flattenList(value)) {
|
|
282
|
+
if (item && typeof item === 'object') {
|
|
283
|
+
extractInputKeys(item as GraphQLObject, store, found)
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
} else if (value && typeof value === 'object') {
|
|
287
|
+
extractInputKeys(value as GraphQLObject, store, found)
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
return found
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
function extractResponseKeys(
|
|
295
|
+
cache: Cache,
|
|
296
|
+
response: GraphQLObject,
|
|
297
|
+
selection: SubscriptionSelection,
|
|
298
|
+
keyMap: KeyMap,
|
|
299
|
+
mutationID: number,
|
|
300
|
+
events: {
|
|
301
|
+
onNewKey: (optimisticValue: string | number, realValue: string | number) => void
|
|
302
|
+
onIDChange: (optimisticValue: string, realValue: string) => void
|
|
303
|
+
},
|
|
304
|
+
objectIDs: OptimisticObjectIDMap = objectIDMap,
|
|
305
|
+
path: string = '',
|
|
306
|
+
type: string = ''
|
|
307
|
+
) {
|
|
308
|
+
// collect all of the fields that we need to write
|
|
309
|
+
let targetSelection = getFieldsForType(
|
|
310
|
+
selection,
|
|
311
|
+
response['__typename'] as string | undefined,
|
|
312
|
+
false
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
let optimisticID: string | null = null
|
|
316
|
+
|
|
317
|
+
// data is an object with fields that we need to write to the store
|
|
318
|
+
for (const [field, value] of Object.entries(response)) {
|
|
319
|
+
// if the path corresponds to an optimistic key
|
|
320
|
+
const pathSoFar = `${path ?? ''}.${field}`
|
|
321
|
+
|
|
322
|
+
if (typeof value === 'string' && keyMap[mutationID][pathSoFar]) {
|
|
323
|
+
const newKey = keyMap[mutationID][pathSoFar]
|
|
324
|
+
// notify the listeners that the key has changed
|
|
325
|
+
events.onNewKey(newKey, value)
|
|
326
|
+
|
|
327
|
+
// grab the optimistic ID referenced by the path
|
|
328
|
+
optimisticID = objectIDs[mutationID][newKey]
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// grab the selection info we care about
|
|
332
|
+
if (!selection || !targetSelection[field]) {
|
|
333
|
+
continue
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// look up the field in our schema
|
|
337
|
+
let { type, selection: fieldSelection } = targetSelection[field]
|
|
338
|
+
|
|
339
|
+
// walk down lists in the response
|
|
340
|
+
if (Array.isArray(value)) {
|
|
341
|
+
for (const [index, item] of flattenList(value).entries()) {
|
|
342
|
+
if (item && typeof item === 'object' && fieldSelection) {
|
|
343
|
+
extractResponseKeys(
|
|
344
|
+
cache,
|
|
345
|
+
item as GraphQLObject,
|
|
346
|
+
fieldSelection,
|
|
347
|
+
keyMap,
|
|
348
|
+
mutationID,
|
|
349
|
+
events,
|
|
350
|
+
objectIDs,
|
|
351
|
+
`${pathSoFar}[${index}]`,
|
|
352
|
+
type
|
|
353
|
+
)
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
// walk down objects in the response
|
|
358
|
+
else if (value && typeof value === 'object' && fieldSelection) {
|
|
359
|
+
extractResponseKeys(
|
|
360
|
+
cache,
|
|
361
|
+
value as GraphQLObject,
|
|
362
|
+
fieldSelection,
|
|
363
|
+
keyMap,
|
|
364
|
+
mutationID,
|
|
365
|
+
events,
|
|
366
|
+
objectIDs,
|
|
367
|
+
pathSoFar,
|
|
368
|
+
type
|
|
369
|
+
)
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
// if we found an optimistic ID in the previous step
|
|
374
|
+
if (optimisticID) {
|
|
375
|
+
// once we're done walking down, we can compute the id
|
|
376
|
+
const id = computeID(configFile, type, response)
|
|
377
|
+
|
|
378
|
+
// if the id has changed, we need to tell the cache that the two ids are the same
|
|
379
|
+
events.onIDChange(`${type}:${id}`, optimisticID)
|
|
380
|
+
|
|
381
|
+
// we need to write new values for the key fields in the cache
|
|
382
|
+
// that are owned by the old key
|
|
383
|
+
cache.write({
|
|
384
|
+
selection: {
|
|
385
|
+
fields: Object.fromEntries(
|
|
386
|
+
keyFieldsForType(configFile, type).map((key) => [
|
|
387
|
+
key,
|
|
388
|
+
{
|
|
389
|
+
type: 'scalar',
|
|
390
|
+
keyRaw: key,
|
|
391
|
+
},
|
|
392
|
+
])
|
|
393
|
+
),
|
|
394
|
+
},
|
|
395
|
+
parent: optimisticID,
|
|
396
|
+
data: response,
|
|
397
|
+
})
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
function flattenList(source: NestedList<GraphQLValue>): Array<GraphQLValue> {
|
|
402
|
+
const result: Array<GraphQLValue> = []
|
|
403
|
+
const left = [...source]
|
|
404
|
+
while (left.length > 0) {
|
|
405
|
+
const head = left.shift()
|
|
406
|
+
if (Array.isArray(head)) {
|
|
407
|
+
left.push(...head)
|
|
408
|
+
} else {
|
|
409
|
+
result.push(head)
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
return result
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
function replaceKeyWithVariable(
|
|
417
|
+
variables: GraphQLObject,
|
|
418
|
+
keys: Record<string, string>
|
|
419
|
+
): GraphQLObject {
|
|
420
|
+
for (const [key, value] of Object.entries(variables)) {
|
|
421
|
+
if (typeof value === 'string' && keys[value]) {
|
|
422
|
+
variables[key] = keys[value]
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
if (Array.isArray(value)) {
|
|
426
|
+
for (const item of flattenList(value)) {
|
|
427
|
+
if (item && typeof item === 'object') {
|
|
428
|
+
replaceKeyWithVariable(item as GraphQLObject, keys)
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
} else if (value && typeof value === 'object') {
|
|
432
|
+
replaceKeyWithVariable(value as GraphQLObject, keys)
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
return variables
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
function generateKey(type: string) {
|
|
440
|
+
if (type === 'Int') {
|
|
441
|
+
return new Date().getTime()
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
if (type === 'String') {
|
|
445
|
+
return new Date().getTime().toString()
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
if (type === 'ID') {
|
|
449
|
+
return new Date().getTime().toString()
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
throw new Error(
|
|
453
|
+
`unsupported type for optimistic key: ${type}. Please provide a value in your mutation arguments.`
|
|
454
|
+
)
|
|
455
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import type { RuntimeScalarPayload } from 'houdini'
|
|
2
|
+
import type { Cache } from 'houdini/runtime/cache'
|
|
3
|
+
import { type SubscriptionSpec, ArtifactKind, DataSource } from 'houdini/runtime/types'
|
|
4
|
+
|
|
5
|
+
import { documentPlugin } from './utils'
|
|
6
|
+
|
|
7
|
+
export const query = (cache: Cache) =>
|
|
8
|
+
documentPlugin(ArtifactKind.Query, function () {
|
|
9
|
+
// track the bits of state we need to hold onto
|
|
10
|
+
let subscriptionSpec: SubscriptionSpec | null = null
|
|
11
|
+
|
|
12
|
+
// remember the last variables we were called with
|
|
13
|
+
let lastVariables: Record<string, any> | null = null
|
|
14
|
+
|
|
15
|
+
// the function to call when a query is sent
|
|
16
|
+
return {
|
|
17
|
+
start(ctx, { next }) {
|
|
18
|
+
const runtimeScalarPayload: RuntimeScalarPayload = {
|
|
19
|
+
session: ctx.session,
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// make sure to include the last variables as well as the new ones
|
|
23
|
+
ctx.variables = {
|
|
24
|
+
...lastVariables,
|
|
25
|
+
// we need to evaluate any runtime scalars but allow the user to overwrite them
|
|
26
|
+
// by explicitly passing variables
|
|
27
|
+
...Object.fromEntries(
|
|
28
|
+
Object.entries(ctx.artifact.input?.runtimeScalars ?? {}).map(
|
|
29
|
+
([field, type]) => {
|
|
30
|
+
const runtimeScalar = ctx.config.runtimeScalars?.[type]
|
|
31
|
+
// make typescript happy
|
|
32
|
+
if (!runtimeScalar) {
|
|
33
|
+
return [field, type]
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// resolve the runtime scalar
|
|
37
|
+
return [field, runtimeScalar.resolve(runtimeScalarPayload)]
|
|
38
|
+
}
|
|
39
|
+
)
|
|
40
|
+
),
|
|
41
|
+
...ctx.variables,
|
|
42
|
+
}
|
|
43
|
+
next(ctx)
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
// patch subscriptions on the way out so that we don't get a cache update
|
|
47
|
+
// before the promise resolves
|
|
48
|
+
end(ctx, { resolve, marshalVariables, variablesChanged }) {
|
|
49
|
+
// if the variables have changed we need to setup a new subscription with the cache
|
|
50
|
+
if (variablesChanged(ctx) && !ctx.cacheParams?.disableSubscriptions) {
|
|
51
|
+
// if the variables changed we need to unsubscribe from the old fields and
|
|
52
|
+
// listen to the new ones
|
|
53
|
+
if (subscriptionSpec) {
|
|
54
|
+
cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.() || {})
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// track the new variables
|
|
58
|
+
lastVariables = { ...marshalVariables(ctx) }
|
|
59
|
+
|
|
60
|
+
const variables = lastVariables
|
|
61
|
+
// save the new subscription spec
|
|
62
|
+
subscriptionSpec = {
|
|
63
|
+
rootType: ctx.artifact.rootType,
|
|
64
|
+
selection: ctx.artifact.selection,
|
|
65
|
+
variables: () => variables,
|
|
66
|
+
set: (newValue) => {
|
|
67
|
+
resolve(ctx, {
|
|
68
|
+
data: newValue,
|
|
69
|
+
errors: null,
|
|
70
|
+
fetching: false,
|
|
71
|
+
partial: false,
|
|
72
|
+
stale: false,
|
|
73
|
+
source: DataSource.Cache,
|
|
74
|
+
variables: ctx.variables ?? {},
|
|
75
|
+
})
|
|
76
|
+
},
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// make sure we subscribe to the new values
|
|
80
|
+
cache.subscribe(subscriptionSpec, lastVariables)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// we are done
|
|
84
|
+
resolve(ctx)
|
|
85
|
+
},
|
|
86
|
+
cleanup() {
|
|
87
|
+
if (subscriptionSpec) {
|
|
88
|
+
cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.())
|
|
89
|
+
lastVariables = null
|
|
90
|
+
}
|
|
91
|
+
},
|
|
92
|
+
}
|
|
93
|
+
})
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { deepEquals } from 'houdini/runtime'
|
|
2
|
+
import type { ClientPluginContext } from 'houdini/runtime/documentStore'
|
|
3
|
+
import { ArtifactKind, DataSource } from 'houdini/runtime/types'
|
|
4
|
+
|
|
5
|
+
import { documentPlugin } from './utils'
|
|
6
|
+
|
|
7
|
+
export function subscription(factory: SubscriptionHandler) {
|
|
8
|
+
return documentPlugin(ArtifactKind.Subscription, () => {
|
|
9
|
+
// the unsubscribe hook for the active subscription
|
|
10
|
+
let clearSubscription: null | (() => void) = null
|
|
11
|
+
|
|
12
|
+
// we need to re-run the subscription if the following object has changed.
|
|
13
|
+
// this is only safe because this plugin only operates on the client
|
|
14
|
+
let check: {
|
|
15
|
+
fetchParams: RequestInit
|
|
16
|
+
session: App.Session
|
|
17
|
+
metadata: App.Metadata
|
|
18
|
+
} | null = null
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
start(ctx, { resolve, next, initialValue }) {
|
|
22
|
+
// we can only start a websocket client if we're on the browser
|
|
23
|
+
if (typeof globalThis.window === 'undefined') {
|
|
24
|
+
resolve(ctx, initialValue)
|
|
25
|
+
return
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// its safe to keep going
|
|
29
|
+
next(ctx)
|
|
30
|
+
},
|
|
31
|
+
async network(ctx, { resolve, initialValue, variablesChanged, marshalVariables }) {
|
|
32
|
+
const checkValue = {
|
|
33
|
+
fetchParams: ctx.fetchParams ?? {},
|
|
34
|
+
session: ctx.session ?? {},
|
|
35
|
+
metadata: ctx.metadata ?? {},
|
|
36
|
+
}
|
|
37
|
+
// if the variables havent changed since the last time we ran this,
|
|
38
|
+
// there's nothing to do
|
|
39
|
+
const changed = variablesChanged(ctx)
|
|
40
|
+
const sessionChange = !deepEquals(check, checkValue)
|
|
41
|
+
if (!changed && !sessionChange) {
|
|
42
|
+
resolve(ctx, initialValue)
|
|
43
|
+
return
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// we need to use this as the new check value
|
|
47
|
+
check = checkValue
|
|
48
|
+
|
|
49
|
+
// if the session has changed then recreate the client
|
|
50
|
+
if (sessionChange) {
|
|
51
|
+
await loadClient(ctx, factory)
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// if we got this far, we need to clear the subscription before we
|
|
55
|
+
// create a new one
|
|
56
|
+
clearSubscription?.()
|
|
57
|
+
|
|
58
|
+
// start listening for the new subscription
|
|
59
|
+
clearSubscription = client.subscribe(
|
|
60
|
+
{
|
|
61
|
+
operationName: ctx.name,
|
|
62
|
+
query: ctx.text,
|
|
63
|
+
variables: marshalVariables(ctx),
|
|
64
|
+
},
|
|
65
|
+
{
|
|
66
|
+
next: ({ data, errors }) => {
|
|
67
|
+
resolve(ctx, {
|
|
68
|
+
data: data ?? null,
|
|
69
|
+
errors: [...(errors ?? [])],
|
|
70
|
+
fetching: false,
|
|
71
|
+
partial: true,
|
|
72
|
+
stale: false,
|
|
73
|
+
source: DataSource.Network,
|
|
74
|
+
variables: ctx.variables ?? {},
|
|
75
|
+
})
|
|
76
|
+
},
|
|
77
|
+
error(data) {
|
|
78
|
+
clearSubscription?.()
|
|
79
|
+
resolve(ctx, {
|
|
80
|
+
partial: true,
|
|
81
|
+
stale: false,
|
|
82
|
+
source: DataSource.Network,
|
|
83
|
+
data: null,
|
|
84
|
+
errors: [data as Error],
|
|
85
|
+
fetching: false,
|
|
86
|
+
variables: ctx.variables ?? {},
|
|
87
|
+
})
|
|
88
|
+
},
|
|
89
|
+
complete() {},
|
|
90
|
+
}
|
|
91
|
+
)
|
|
92
|
+
},
|
|
93
|
+
cleanup() {
|
|
94
|
+
clearSubscription?.()
|
|
95
|
+
// clear the check so we already recreate the connection next time
|
|
96
|
+
check = null
|
|
97
|
+
},
|
|
98
|
+
}
|
|
99
|
+
})
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export type SubscriptionHandler = (ctx: ClientPluginContext) => SubscriptionClient
|
|
103
|
+
|
|
104
|
+
export type SubscriptionClient = {
|
|
105
|
+
subscribe: (
|
|
106
|
+
payload: {
|
|
107
|
+
operationName?: string
|
|
108
|
+
query: string
|
|
109
|
+
variables?: Record<string, unknown>
|
|
110
|
+
extensions?: Record<'persistedQuery', string> | Record<string, unknown>
|
|
111
|
+
},
|
|
112
|
+
handlers: {
|
|
113
|
+
next: (payload: { data?: {} | null; errors?: readonly { message: string }[] }) => void
|
|
114
|
+
error: (data: {}) => void
|
|
115
|
+
complete: () => void
|
|
116
|
+
}
|
|
117
|
+
) => () => void
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// if 2 subscriptions start at the same time we don't want to create
|
|
121
|
+
// multiple clients. We'll make a global promise that we will use to
|
|
122
|
+
// coordinate across invocations of the plugin. This is only safe to do
|
|
123
|
+
// without considering user-sessions on the server because this plugin
|
|
124
|
+
// ensures that it only runs on the browser in the start phase
|
|
125
|
+
let pendingCreate: Promise<void> | null = null
|
|
126
|
+
|
|
127
|
+
// the actual client
|
|
128
|
+
let client: SubscriptionClient
|
|
129
|
+
|
|
130
|
+
function loadClient(
|
|
131
|
+
ctx: ClientPluginContext,
|
|
132
|
+
factory: (ctx: ClientPluginContext) => SubscriptionClient
|
|
133
|
+
): Promise<void> {
|
|
134
|
+
// if we are currently loading a client, just wait for that
|
|
135
|
+
if (pendingCreate) {
|
|
136
|
+
return pendingCreate
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// we aren't currently loading the client so we're safe to do that
|
|
140
|
+
// and register the effort to coordinate other subscriptions
|
|
141
|
+
pendingCreate = new Promise((resolve) => {
|
|
142
|
+
// update the client reference
|
|
143
|
+
client = factory(ctx)
|
|
144
|
+
|
|
145
|
+
// we're done
|
|
146
|
+
resolve()
|
|
147
|
+
|
|
148
|
+
// we're done with the create
|
|
149
|
+
pendingCreate = null
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
return pendingCreate
|
|
153
|
+
}
|