@sanity/cli 3.88.1-typegen-experimental.0 → 3.88.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,227 +1,229 @@
1
- /* eslint-disable max-statements */
2
- import {stat} from 'node:fs/promises'
3
1
  import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'
4
2
 
5
3
  import {
6
- DEFAULT_CONFIG,
7
4
  findQueriesInPath,
8
5
  getResolver,
9
6
  readSchema,
10
7
  registerBabel,
8
+ safeParseQuery,
11
9
  TypeGenerator,
12
10
  } from '@sanity/codegen'
13
- import {type SchemaType} from 'groq-js'
11
+ import createDebug from 'debug'
12
+ import {typeEvaluate, type TypeNode} from 'groq-js'
14
13
 
15
- import {
16
- createReporter,
17
- type WorkerChannel,
18
- type WorkerChannelEvent,
19
- type WorkerChannelStream,
20
- } from '../util/workerChannel'
21
-
22
- const DEFAULT_SCHEMA_PATH = DEFAULT_CONFIG.schemas[0].schemaPath
14
+ const $info = createDebug('sanity:codegen:generate:info')
15
+ const $warn = createDebug('sanity:codegen:generate:warn')
23
16
 
24
17
  export interface TypegenGenerateTypesWorkerData {
25
18
  workDir: string
26
- schemas: {schemaPath: string; schemaId: string}[]
19
+ workspaceName?: string
20
+ schemaPath: string
27
21
  searchPath: string | string[]
28
- overloadClientMethods: boolean
29
- augmentGroqModule: boolean
30
- }
31
-
32
- interface QueryProgress {
33
- queriesCount: number
34
- projectionsCount: number
35
- filesCount: number
22
+ overloadClientMethods?: boolean
36
23
  }
37
24
 
38
- /** @internal */
39
- export type TypegenWorkerChannel = WorkerChannel<{
40
- loadedSchemas: WorkerChannelEvent
41
- generatedSchemaDeclarations: WorkerChannelEvent<{
42
- code: string
43
- schemaStats: {
44
- schemaTypesCount: number
45
- schemaCount: number
25
+ export type TypegenGenerateTypesWorkerMessage =
26
+ | {
27
+ type: 'error'
28
+ error: Error
29
+ fatal: boolean
30
+ query?: string
31
+ filename?: string
46
32
  }
47
- }>
48
- fileCount: WorkerChannelEvent<{fileCount: number}>
49
- generatedQueryResultDeclaration: WorkerChannelStream<
50
- | {
51
- type: 'progress'
52
- progress: QueryProgress
53
- }
54
- | {
55
- type: 'declaration'
56
- code: string
57
- progress: QueryProgress
58
- }
59
- | {
60
- type: 'error'
61
- message: string
62
- progress: QueryProgress
63
- }
64
- >
65
- generationComplete: WorkerChannelEvent<{
66
- augmentedQueryResultDeclarations: {code: string}
67
- queryStats: {
68
- queriesCount: number
69
- projectionsCount: number
70
- totalScannedFilesCount: number
71
- queryFilesCount: number
72
- projectionFilesCount: number
73
- filesWithErrors: number
74
- errorCount: number
75
- typeNodesGenerated: number
76
- unknownTypeNodesGenerated: number
77
- unknownTypeNodesRatio: number
78
- emptyUnionTypeNodesGenerated: number
33
+ | {
34
+ type: 'types'
35
+ filename: string
36
+ types: {
37
+ queryName: string
38
+ query: string
39
+ type: string
40
+ unknownTypeNodesGenerated: number
41
+ typeNodesGenerated: number
42
+ emptyUnionTypeNodesGenerated: number
43
+ }[]
44
+ }
45
+ | {
46
+ type: 'schema'
47
+ filename: string
48
+ schema: string
49
+ length: number
50
+ }
51
+ | {
52
+ type: 'typemap'
53
+ typeMap: string
54
+ }
55
+ | {
56
+ type: 'complete'
79
57
  }
80
- }>
81
- }>
82
58
 
83
59
  if (isMainThread || !parentPort) {
84
60
  throw new Error('This module must be run as a worker thread')
85
61
  }
86
62
 
87
- const report = createReporter<TypegenWorkerChannel>(parentPort)
88
63
  const opts = _workerData as TypegenGenerateTypesWorkerData
89
64
 
65
+ registerBabel()
66
+
90
67
  async function main() {
91
- const schemas: {schema: SchemaType; schemaId: string; filename: string}[] = []
92
-
93
- for (const {schemaId, schemaPath} of opts.schemas) {
94
- try {
95
- const schemaStats = await stat(schemaPath)
96
- if (!schemaStats.isFile()) {
97
- throw new Error(
98
- `Failed to load schema "${schemaId}". Schema path is not a file: ${schemaPath}`,
99
- )
100
- }
68
+ const schema = await readSchema(opts.schemaPath)
69
+
70
+ const typeGenerator = new TypeGenerator(schema)
71
+ const schemaTypes = [typeGenerator.generateSchemaTypes(), TypeGenerator.generateKnownTypes()]
72
+ .join('\n')
73
+ .trim()
74
+ const resolver = getResolver()
75
+
76
+ parentPort?.postMessage({
77
+ type: 'schema',
78
+ schema: `${schemaTypes.trim()}\n`,
79
+ filename: 'schema.json',
80
+ length: schema.length,
81
+ } satisfies TypegenGenerateTypesWorkerMessage)
82
+
83
+ const queries = findQueriesInPath({
84
+ path: opts.searchPath,
85
+ resolver,
86
+ })
101
87
 
102
- const schema = await readSchema(schemaPath)
103
- schemas.push({schema, schemaId, filename: schemaPath})
104
- } catch (err) {
105
- if (err.code === 'ENOENT') {
106
- // If the user has not provided a specific schema path (eg we're using the default), give some help
107
- const hint =
108
- schemaPath === DEFAULT_SCHEMA_PATH ? ` - did you run "sanity schema extract"?` : ''
109
- throw new Error(`Schema file not found for schema "${schemaId}": ${schemaPath}${hint}`)
110
- } else {
111
- throw err
88
+ const allQueries = []
89
+
90
+ for await (const result of queries) {
91
+ if (result.type === 'error') {
92
+ parentPort?.postMessage({
93
+ type: 'error',
94
+ error: result.error,
95
+ fatal: false,
96
+ filename: result.filename,
97
+ } satisfies TypegenGenerateTypesWorkerMessage)
98
+ continue
99
+ }
100
+ $info(`Processing ${result.queries.length} queries in "${result.filename}"...`)
101
+
102
+ const fileQueryTypes: {
103
+ queryName: string
104
+ query: string
105
+ type: string
106
+ typeName: string
107
+ typeNode: TypeNode
108
+ unknownTypeNodesGenerated: number
109
+ typeNodesGenerated: number
110
+ emptyUnionTypeNodesGenerated: number
111
+ }[] = []
112
+ for (const {name: queryName, result: query} of result.queries) {
113
+ try {
114
+ const ast = safeParseQuery(query)
115
+ const queryTypes = typeEvaluate(ast, schema)
116
+
117
+ const typeName = `${queryName}Result`
118
+ const type = typeGenerator.generateTypeNodeTypes(typeName, queryTypes)
119
+
120
+ const queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes)
121
+ fileQueryTypes.push({
122
+ queryName,
123
+ query,
124
+ typeName,
125
+ typeNode: queryTypes,
126
+ type: `${type.trim()}\n`,
127
+ unknownTypeNodesGenerated: queryTypeStats.unknownTypes,
128
+ typeNodesGenerated: queryTypeStats.allTypes,
129
+ emptyUnionTypeNodesGenerated: queryTypeStats.emptyUnions,
130
+ })
131
+ } catch (err) {
132
+ parentPort?.postMessage({
133
+ type: 'error',
134
+ error: new Error(
135
+ `Error generating types for query "${queryName}" in "${result.filename}": ${err.message}`,
136
+ {cause: err},
137
+ ),
138
+ fatal: false,
139
+ query,
140
+ } satisfies TypegenGenerateTypesWorkerMessage)
112
141
  }
113
142
  }
114
- }
115
- report.event.loadedSchemas()
116
-
117
- const generator = new TypeGenerator({
118
- schemas,
119
- queriesByFile: findQueriesInPath({path: opts.searchPath, resolver: getResolver()}),
120
- augmentGroqModule: opts.augmentGroqModule,
121
- overloadClientMethods: opts.overloadClientMethods,
122
- })
123
143
 
124
- report.event.generatedSchemaDeclarations({
125
- code: [
126
- generator.getKnownTypes().code,
127
- ...generator.getSchemaTypeDeclarations().map((i) => i.code),
128
- generator.getAllSanitySchemaTypesDeclaration().code,
129
- ...generator.getSchemaDeclarations().map((i) => i.code),
130
- generator.getAugmentedSchemasDeclarations().code,
131
- ].join('\n'),
132
- schemaStats: {
133
- schemaTypesCount: generator.getSchemaTypeDeclarations().length,
134
- schemaCount: schemas.length,
135
- },
136
- })
144
+ if (fileQueryTypes.length > 0) {
145
+ $info(`Generated types for ${fileQueryTypes.length} queries in "${result.filename}"\n`)
146
+ parentPort?.postMessage({
147
+ type: 'types',
148
+ types: fileQueryTypes,
149
+ filename: result.filename,
150
+ } satisfies TypegenGenerateTypesWorkerMessage)
151
+ }
137
152
 
138
- const allFilenames = new Set<string>()
139
- const errorFilenames = new Set<string>()
140
- const queryFilenames = new Set<string>()
141
- const projectionFilenames = new Set<string>()
142
-
143
- let errorCount = 0
144
- let queriesCount = 0
145
- let projectionsCount = 0
146
- let typeNodesGenerated = 0
147
- let unknownTypeNodesGenerated = 0
148
- let emptyUnionTypeNodesGenerated = 0
149
-
150
- const {fileCount} = await generator.getQueryFileCount()
151
- report.event.fileCount({fileCount})
152
-
153
- for await (const {filename, ...result} of generator.getQueryResultDeclarations()) {
154
- allFilenames.add(filename)
155
- const progress = {
156
- queriesCount,
157
- projectionsCount,
158
- filesCount: allFilenames.size,
153
+ if (fileQueryTypes.length > 0) {
154
+ allQueries.push(...fileQueryTypes)
159
155
  }
156
+ }
160
157
 
161
- switch (result.type) {
162
- case 'error': {
163
- errorCount += 1
164
- errorFilenames.add(filename)
158
+ if (opts.overloadClientMethods && allQueries.length > 0) {
159
+ const typeMap = `${typeGenerator.generateQueryMap(allQueries).trim()}\n`
160
+ parentPort?.postMessage({
161
+ type: 'typemap',
162
+ typeMap,
163
+ } satisfies TypegenGenerateTypesWorkerMessage)
164
+ }
165
165
 
166
- const errorMessage =
167
- typeof result.error === 'object' && result.error !== null && 'message' in result.error
168
- ? String(result.error.message)
169
- : 'Unknown Error'
166
+ parentPort?.postMessage({
167
+ type: 'complete',
168
+ } satisfies TypegenGenerateTypesWorkerMessage)
169
+ }
170
170
 
171
- const message = `Error generating types in "${filename}": ${errorMessage}`
172
- report.stream.generatedQueryResultDeclaration.emit({type: 'error', message, progress})
173
- continue
171
+ function walkAndCountQueryTypeNodeStats(typeNode: TypeNode): {
172
+ allTypes: number
173
+ unknownTypes: number
174
+ emptyUnions: number
175
+ } {
176
+ switch (typeNode.type) {
177
+ case 'unknown': {
178
+ return {allTypes: 1, unknownTypes: 1, emptyUnions: 0}
179
+ }
180
+ case 'array': {
181
+ const acc = walkAndCountQueryTypeNodeStats(typeNode.of)
182
+ acc.allTypes += 1 // count the array type itself
183
+ return acc
184
+ }
185
+ case 'object': {
186
+ // if the rest is unknown, we count it as one unknown type
187
+ if (typeNode.rest && typeNode.rest.type === 'unknown') {
188
+ return {allTypes: 2, unknownTypes: 1, emptyUnions: 0} // count the object type itself as well
174
189
  }
175
190
 
176
- case 'queries': {
177
- if (!result.queryResultDeclarations.length) {
178
- report.stream.generatedQueryResultDeclaration.emit({type: 'progress', progress})
179
- continue
180
- }
181
-
182
- for (const {code, type, stats} of result.queryResultDeclarations) {
183
- queriesCount += type === 'query' ? 1 : 0
184
- projectionsCount += type === 'projection' ? 1 : 0
185
- typeNodesGenerated += stats.allTypes
186
- unknownTypeNodesGenerated += stats.unknownTypes
187
- emptyUnionTypeNodesGenerated += stats.emptyUnions
188
-
189
- if (type === 'projection') {
190
- projectionFilenames.add(filename)
191
- } else {
192
- queryFilenames.add(filename)
193
- }
191
+ const restStats = typeNode.rest
192
+ ? walkAndCountQueryTypeNodeStats(typeNode.rest)
193
+ : {allTypes: 1, unknownTypes: 0, emptyUnions: 0} // count the object type itself
194
194
 
195
- report.stream.generatedQueryResultDeclaration.emit({type: 'declaration', code, progress})
195
+ return Object.values(typeNode.attributes).reduce((acc, attribute) => {
196
+ const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(
197
+ attribute.value,
198
+ )
199
+ return {
200
+ allTypes: acc.allTypes + allTypes,
201
+ unknownTypes: acc.unknownTypes + unknownTypes,
202
+ emptyUnions: acc.emptyUnions + emptyUnions,
196
203
  }
197
- continue
204
+ }, restStats)
205
+ }
206
+ case 'union': {
207
+ if (typeNode.of.length === 0) {
208
+ return {allTypes: 1, unknownTypes: 0, emptyUnions: 1}
198
209
  }
199
210
 
200
- default: {
201
- continue
202
- }
211
+ return typeNode.of.reduce(
212
+ (acc, type) => {
213
+ const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(type)
214
+ return {
215
+ allTypes: acc.allTypes + allTypes,
216
+ unknownTypes: acc.unknownTypes + unknownTypes,
217
+ emptyUnions: acc.emptyUnions + emptyUnions,
218
+ }
219
+ },
220
+ {allTypes: 1, unknownTypes: 0, emptyUnions: 0}, // count the union type itself
221
+ )
222
+ }
223
+ default: {
224
+ return {allTypes: 1, unknownTypes: 0, emptyUnions: 0}
203
225
  }
204
226
  }
205
- report.stream.generatedQueryResultDeclaration.end()
206
-
207
- report.event.generationComplete({
208
- augmentedQueryResultDeclarations: await generator.getAugmentedQueryResultsDeclarations(),
209
- queryStats: {
210
- errorCount,
211
- queriesCount,
212
- projectionsCount,
213
- typeNodesGenerated,
214
- unknownTypeNodesGenerated,
215
- emptyUnionTypeNodesGenerated,
216
- totalScannedFilesCount: allFilenames.size,
217
- filesWithErrors: errorFilenames.size,
218
- queryFilesCount: queryFilenames.size,
219
- projectionFilesCount: projectionFilenames.size,
220
- unknownTypeNodesRatio:
221
- typeNodesGenerated > 0 ? unknownTypeNodesGenerated / typeNodesGenerated : 0,
222
- },
223
- })
224
227
  }
225
228
 
226
- registerBabel()
227
229
  main()
@@ -1,84 +0,0 @@
1
- "use strict";
2
- class MessageQueue {
3
- resolver = null;
4
- queue = [];
5
- ended = !1;
6
- // Flag to indicate if end() was called
7
- push(message) {
8
- this.ended || (this.resolver ? (this.resolver({ value: message, done: !1 }), this.resolver = null) : this.queue.push(message));
9
- }
10
- next() {
11
- return this.queue.length ? Promise.resolve({ value: this.queue.shift(), done: !1 }) : this.ended ? Promise.resolve({ value: void 0, done: !0 }) : new Promise((resolve) => this.resolver = resolve);
12
- }
13
- end() {
14
- this.resolver ? (this.resolver({ value: void 0, done: !0 }), this.resolver = null) : this.ended = !0;
15
- }
16
- }
17
- function isWorkerChannelMessage(message) {
18
- return typeof message != "object" || !message || !("type" in message) || typeof message.type != "string" ? !1 : ["event", "emission", "end"].includes(message.type);
19
- }
20
- function createReceiver(worker) {
21
- const _events = /* @__PURE__ */ new Map(), _streams = /* @__PURE__ */ new Map(), errors = new MessageQueue(), eventQueue = (name) => {
22
- const queue = _events.get(name) ?? new MessageQueue();
23
- return _events.has(name) || _events.set(name, queue), queue;
24
- }, streamQueue = (name) => {
25
- const queue = _streams.get(name) ?? new MessageQueue();
26
- return _streams.has(name) || _streams.set(name, queue), queue;
27
- }, handleMessage = (message) => {
28
- isWorkerChannelMessage(message) && (message.type === "event" && eventQueue(message.name).push(message), message.type === "emission" && streamQueue(message.name).push(message), message.type === "end" && streamQueue(message.name).end());
29
- }, handleError = (error) => {
30
- errors.push({ type: "error", error });
31
- };
32
- return worker.addListener("message", handleMessage), worker.addListener("error", handleError), {
33
- event: new Proxy({}, {
34
- get: (target, name) => typeof name != "string" ? target[name] : async () => {
35
- const { value } = await Promise.race([eventQueue(name).next(), errors.next()]);
36
- if (value.type === "error") throw value.error;
37
- return value.payload;
38
- }
39
- }),
40
- stream: new Proxy({}, {
41
- get: (target, prop) => {
42
- if (typeof prop != "string") return target[prop];
43
- const name = prop;
44
- async function* streamReceiver() {
45
- for (; ; ) {
46
- const { value, done } = await Promise.race([streamQueue(name).next(), errors.next()]);
47
- if (done) return;
48
- if (value.type === "error") throw value.error;
49
- yield value.payload;
50
- }
51
- }
52
- return streamReceiver;
53
- }
54
- }),
55
- dispose: () => (worker.removeListener("message", handleMessage), worker.removeListener("error", handleError), worker.terminate())
56
- };
57
- }
58
- function createReporter(parentPort) {
59
- if (!parentPort)
60
- throw new Error("parentPart was falsy");
61
- return {
62
- event: new Proxy({}, {
63
- get: (target, name) => typeof name != "string" ? target[name] : (payload) => {
64
- const message = { type: "event", name, payload };
65
- parentPort.postMessage(message);
66
- }
67
- }),
68
- stream: new Proxy({}, {
69
- get: (target, name) => typeof name != "string" ? target[name] : {
70
- emit: (payload) => {
71
- const message = { type: "emission", name, payload };
72
- parentPort.postMessage(message);
73
- },
74
- end: () => {
75
- const message = { type: "end", name };
76
- parentPort.postMessage(message);
77
- }
78
- }
79
- })
80
- };
81
- }
82
- exports.createReceiver = createReceiver;
83
- exports.createReporter = createReporter;
84
- //# sourceMappingURL=workerChannel.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"workerChannel.js","sources":["../../src/util/workerChannel.ts"],"sourcesContent":["// NOTE: this file was originally copied from\n// https://github.com/sanity-io/sanity/blob/4c4e03d407106dbda12f52cfd9511fbfe75a9696/packages/sanity/src/_internal/cli/util/workerChannels.ts\nimport {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n *\n * @example\n *\n * ```ts\n * // Define the channel interface (shared between parent and worker)\n * type MyWorkerChannel = WorkerChannel<{\n * compileStart: WorkerChannelEvent<void>\n * compileProgress: WorkerChannelStream<{ file: string; progress: number }>\n * compileEnd: WorkerChannelEvent<{ duration: number }>\n * }>;\n *\n * // --- In the worker file (e.g., worker.ts) ---\n * import { parentPort } from 'node:worker_threads';\n * import { createReporter } from './workerChannels';\n *\n * const report = createReporter<MyWorkerChannel>(parentPort);\n *\n * async function runCompilation() {\n * report.event.compileStart(); // Signal start\n *\n * const files = ['a.js', 'b.js', 'c.js'];\n * for (const file of files) {\n * // Simulate work and report progress\n * await new Promise(resolve => setTimeout(resolve, 100));\n * report.stream.compileProgress.emit({ file, progress: 100 });\n * }\n * report.stream.compileProgress.end(); // Signal end of progress stream\n *\n * report.event.compileEnd({ duration: 300 }); // Signal end with result\n * }\n *\n * runCompilation();\n *\n * // --- In the parent file (e.g., main.ts) ---\n * import { Worker } from 'node:worker_threads';\n * import { createReceiver } from './workerChannels';\n *\n * const worker = new Worker('./worker.js');\n * const receiver = createReceiver<MyWorkerChannel>(worker);\n *\n * async function monitorCompilation() {\n * console.log('Waiting for compilation to start...');\n * await receiver.event.compileStart();\n * console.log('Compilation started.');\n *\n * console.log('Receiving progress:');\n * for await (const progress of receiver.stream.compileProgress()) {\n * console.log(` - ${progress.file}: ${progress.progress}%`);\n * }\n *\n * console.log('Waiting for compilation to end...');\n * const { duration } = await receiver.event.compileEnd();\n * console.log(`Compilation finished in ${duration}ms.`);\n *\n * await receiver.dispose(); // Clean up listeners and terminate worker\n * }\n *\n * monitorCompilation();\n * ```\n *\n * @internal\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\n/** @internal */\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\n/** @internal */\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n private ended = false // Flag to indicate if end() was called\n\n push(message: T) {\n if (this.ended) {\n // Don't push messages after the queue has ended\n return\n }\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n if (this.ended) {\n // If end() was called before and queue is empty, resolve immediately as done\n return Promise.resolve({value: undefined, done: true})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n this.resolver = null // Clear resolver after ending\n } else {\n // If resolver is null, it means next() hasn't been called yet or\n // previous next() was resolved by a push(). Mark as ended so the\n // *next* call to next() resolves immediately as done.\n this.ended = true\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n *\n * @internal\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n"],"names":[],"mappings":";AAwIA,MAAM,aAAgB;AAAA,EACpB,WAAyD;AAAA,EACzD,QAAa,CAAC;AAAA,EACN,QAAQ;AAAA;AAAA,EAEhB,KAAK,SAAY;AACX,SAAK,UAIL,KAAK,YACP,KAAK,SAAS,EAAC,OAAO,SAAS,MAAM,IAAM,GAC3C,KAAK,WAAW,QAEhB,KAAK,MAAM,KAAK,OAAO;AAAA,EAAA;AAAA,EAI3B,OAAmC;AACjC,WAAI,KAAK,MAAM,SACN,QAAQ,QAAQ,EAAC,OAAO,KAAK,MAAM,MAAM,GAAI,MAAM,GAAM,CAAA,IAG9D,KAAK,QAEA,QAAQ,QAAQ,EAAC,OAAO,QAAW,MAAM,GAAK,CAAA,IAGhD,IAAI,QAAQ,CAAC,YAAa,KAAK,WAAW,OAAQ;AAAA,EAAA;AAAA,EAG3D,MAAM;AACA,SAAK,YACP,KAAK,SAAS,EAAC,OAAO,QAAW,MAAM,GAAK,CAAA,GAC5C,KAAK,WAAW,QAKhB,KAAK,QAAQ;AAAA,EAAA;AAGnB;AAEA,SAAS,uBAAuB,SAAmD;AAC7E,SAAA,OAAO,WAAY,YACnB,CAAC,WACD,EAAE,UAAU,YACZ,OAAO,QAAQ,QAAS,WAAiB,KACrB,CAAC,SAAS,YAAY,KAAK,EACtC,SAAS,QAAQ,IAAI;AACpC;AAOO,SAAS,eACd,QACuC;AACvC,QAAM,UAAU,oBAAI,IAAwC,GACtD,WAAe,oBAAA,OACf,SAAS,IAAI,aAAA,GAEb,aAAa,CAAC,SAAiB;AACnC,UAAM,QAAQ,QAAQ,IAAI,IAAI,KAAK,IAAI,aAAa;AAC/C,WAAA,QAAQ,IAAI,IAAI,KAAG,QAAQ,IAAI,MAAM,KAAK,GACxC;AAAA,EAAA,GAGH,cAAc,CAAC,SAAiB;AACpC,UAAM,QAAQ,SAAS,IAAI,IAAI,KAAK,IAAI,aAAa;AAChD,WAAA,SAAS,IAAI,IAAI,KAAG,SAAS,IAAI,MAAM,KAAK,GAC1C;AAAA,EAAA,GAGH,gBAAgB,CAAC,YAAqB;AACrC,2BAAuB,OAAO,MAC/B,QAAQ,SAAS,WAAS,WAAW,QAAQ,IAAI,EAAE,KAAK,OAAO,GAC/D,QAAQ,SAAS,cAAY,YAAY,QAAQ,IAAI,EAAE,KAAK,OAAO,GACnE,QAAQ,SAAS,SAAO,YAAY,QAAQ,IAAI,EAAE,IAAI;AAAA,EAAA,GAGtD,cAAc,CAAC,UAAmB;AACtC,WAAO,KAAK,EAAC,MAAM,SAAS,OAAM;AAAA,EACpC;AAEO,SAAA,OAAA,YAAY,WAAW,aAAa,GAC3C,OAAO,YAAY,SAAS,WAAW,GAEhC;AAAA,IACL,OAAO,IAAI,MAAM,IAAsD;AAAA,MACrE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAElC,YAAY;AAC/C,cAAM,EAAC,MAAS,IAAA,MAAM,QAAQ,KAAK,CAAC,WAAW,IAAI,EAAE,KAAK,GAAG,OAAO,KAAM,CAAA,CAAC;AAC3E,YAAI,MAAM,SAAS,QAAS,OAAM,MAAM;AACxC,eAAO,MAAM;AAAA,MAAA;AAAA,IACf,CAIH;AAAA,IACD,QAAQ,IAAI,MAAM,IAAuD;AAAA,MACvE,KAAK,CAAC,QAAQ,SAAS;AACrB,YAAI,OAAO,QAAS,SAAU,QAAO,OAAO,IAA2B;AACvE,cAAM,OAAO;AAEb,wBAAgB,iBAAiB;AAClB,qBAAA;AACX,kBAAM,EAAC,OAAO,KAAQ,IAAA,MAAM,QAAQ,KAAK,CAAC,YAAY,IAAI,EAAE,KAAK,GAAG,OAAO,KAAM,CAAA,CAAC;AAClF,gBAAI,KAAM;AACV,gBAAI,MAAM,SAAS,QAAS,OAAM,MAAM;AACxC,kBAAM,MAAM;AAAA,UAAA;AAAA,QACd;AAGK,eAAA;AAAA,MAAA;AAAA,IACT,CACD;AAAA,IACD,SAAS,OACP,OAAO,eAAe,WAAW,aAAa,GAC9C,OAAO,eAAe,SAAS,WAAW,GACnC,OAAO,UAAU;AAAA,EAE5B;AACF;AAQO,SAAS,eACd,YACuC;AACvC,MAAI,CAAC;AACG,UAAA,IAAI,MAAM,sBAAsB;AAGjC,SAAA;AAAA,IACL,OAAO,IAAI,MAAM,IAAsD;AAAA,MACrE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAElC,CAAC,YAAY;AAChD,cAAM,UAAwB,EAAC,MAAM,SAAS,MAAM,QAAO;AAC3D,mBAAW,YAAY,OAAO;AAAA,MAAA;AAAA,IAChC,CAIH;AAAA,IACD,QAAQ,IAAI,MAAM,IAAuD;AAAA,MACvE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAEhC;AAAA,QACrC,MAAM,CAAC,YAAY;AACjB,gBAAM,UAAiC,EAAC,MAAM,YAAY,MAAM,QAAO;AACvE,qBAAW,YAAY,OAAO;AAAA,QAChC;AAAA,QACA,KAAK,MAAM;AACT,gBAAM,UAA4B,EAAC,MAAM,OAAO,KAAI;AACpD,qBAAW,YAAY,OAAO;AAAA,QAAA;AAAA,MAChC;AAAA,IAKL,CAAA;AAAA,EACH;AACF;;;"}