@databricks/appkit 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +89 -12
- package/CLAUDE.md +89 -12
- package/NOTICE.md +4 -0
- package/README.md +21 -15
- package/bin/appkit-lint.js +129 -0
- package/dist/analytics/analytics.d.ts +33 -8
- package/dist/analytics/analytics.d.ts.map +1 -1
- package/dist/analytics/analytics.js +67 -27
- package/dist/analytics/analytics.js.map +1 -1
- package/dist/analytics/defaults.js.map +1 -1
- package/dist/analytics/query.js +12 -6
- package/dist/analytics/query.js.map +1 -1
- package/dist/app/index.d.ts.map +1 -1
- package/dist/app/index.js +7 -5
- package/dist/app/index.js.map +1 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/defaults.js.map +1 -1
- package/dist/cache/index.d.ts +1 -0
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/index.js +25 -5
- package/dist/cache/index.js.map +1 -1
- package/dist/cache/storage/memory.js.map +1 -1
- package/dist/cache/storage/persistent.js +12 -6
- package/dist/cache/storage/persistent.js.map +1 -1
- package/dist/connectors/lakebase/client.js +31 -21
- package/dist/connectors/lakebase/client.js.map +1 -1
- package/dist/connectors/lakebase/defaults.js.map +1 -1
- package/dist/connectors/sql-warehouse/client.js +68 -28
- package/dist/connectors/sql-warehouse/client.js.map +1 -1
- package/dist/connectors/sql-warehouse/defaults.js.map +1 -1
- package/dist/context/execution-context.js +75 -0
- package/dist/context/execution-context.js.map +1 -0
- package/dist/context/index.js +27 -0
- package/dist/context/index.js.map +1 -0
- package/dist/context/service-context.js +154 -0
- package/dist/context/service-context.js.map +1 -0
- package/dist/context/user-context.js +15 -0
- package/dist/context/user-context.js.map +1 -0
- package/dist/core/appkit.d.ts +3 -0
- package/dist/core/appkit.d.ts.map +1 -1
- package/dist/core/appkit.js +7 -0
- package/dist/core/appkit.js.map +1 -1
- package/dist/errors/authentication.d.ts +38 -0
- package/dist/errors/authentication.d.ts.map +1 -0
- package/dist/errors/authentication.js +48 -0
- package/dist/errors/authentication.js.map +1 -0
- package/dist/errors/base.d.ts +58 -0
- package/dist/errors/base.d.ts.map +1 -0
- package/dist/errors/base.js +70 -0
- package/dist/errors/base.js.map +1 -0
- package/dist/errors/configuration.d.ts +38 -0
- package/dist/errors/configuration.d.ts.map +1 -0
- package/dist/errors/configuration.js +45 -0
- package/dist/errors/configuration.js.map +1 -0
- package/dist/errors/connection.d.ts +42 -0
- package/dist/errors/connection.d.ts.map +1 -0
- package/dist/errors/connection.js +54 -0
- package/dist/errors/connection.js.map +1 -0
- package/dist/errors/execution.d.ts +42 -0
- package/dist/errors/execution.d.ts.map +1 -0
- package/dist/errors/execution.js +51 -0
- package/dist/errors/execution.js.map +1 -0
- package/dist/errors/index.js +28 -0
- package/dist/errors/index.js.map +1 -0
- package/dist/errors/initialization.d.ts +34 -0
- package/dist/errors/initialization.d.ts.map +1 -0
- package/dist/errors/initialization.js +42 -0
- package/dist/errors/initialization.js.map +1 -0
- package/dist/errors/server.d.ts +38 -0
- package/dist/errors/server.d.ts.map +1 -0
- package/dist/errors/server.js +45 -0
- package/dist/errors/server.js.map +1 -0
- package/dist/errors/tunnel.d.ts +38 -0
- package/dist/errors/tunnel.d.ts.map +1 -0
- package/dist/errors/tunnel.js +51 -0
- package/dist/errors/tunnel.js.map +1 -0
- package/dist/errors/validation.d.ts +36 -0
- package/dist/errors/validation.d.ts.map +1 -0
- package/dist/errors/validation.js +45 -0
- package/dist/errors/validation.js.map +1 -0
- package/dist/index.d.ts +12 -4
- package/dist/index.js +12 -4
- package/dist/index.js.map +1 -1
- package/dist/logging/logger.js +179 -0
- package/dist/logging/logger.js.map +1 -0
- package/dist/logging/sampling.js +56 -0
- package/dist/logging/sampling.js.map +1 -0
- package/dist/logging/wide-event-emitter.js +108 -0
- package/dist/logging/wide-event-emitter.js.map +1 -0
- package/dist/logging/wide-event.js +167 -0
- package/dist/logging/wide-event.js.map +1 -0
- package/dist/plugin/dev-reader.d.ts.map +1 -1
- package/dist/plugin/dev-reader.js +8 -3
- package/dist/plugin/dev-reader.js.map +1 -1
- package/dist/plugin/interceptors/cache.js.map +1 -1
- package/dist/plugin/interceptors/retry.js +10 -2
- package/dist/plugin/interceptors/retry.js.map +1 -1
- package/dist/plugin/interceptors/telemetry.js +24 -9
- package/dist/plugin/interceptors/telemetry.js.map +1 -1
- package/dist/plugin/interceptors/timeout.js +4 -0
- package/dist/plugin/interceptors/timeout.js.map +1 -1
- package/dist/plugin/plugin.d.ts +38 -4
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +86 -5
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/plugin/to-plugin.d.ts +4 -0
- package/dist/plugin/to-plugin.d.ts.map +1 -1
- package/dist/plugin/to-plugin.js +3 -0
- package/dist/plugin/to-plugin.js.map +1 -1
- package/dist/server/index.d.ts +3 -0
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +25 -21
- package/dist/server/index.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-controller.js +4 -2
- package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-manager.js +10 -8
- package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
- package/dist/server/utils.js.map +1 -1
- package/dist/server/vite-dev-server.js +8 -5
- package/dist/server/vite-dev-server.js.map +1 -1
- package/dist/shared/src/sql/helpers.js.map +1 -1
- package/dist/stream/arrow-stream-processor.js +13 -6
- package/dist/stream/arrow-stream-processor.js.map +1 -1
- package/dist/stream/buffers.js +5 -1
- package/dist/stream/buffers.js.map +1 -1
- package/dist/stream/sse-writer.js.map +1 -1
- package/dist/stream/stream-manager.d.ts.map +1 -1
- package/dist/stream/stream-manager.js +47 -36
- package/dist/stream/stream-manager.js.map +1 -1
- package/dist/stream/stream-registry.js.map +1 -1
- package/dist/stream/types.js.map +1 -1
- package/dist/telemetry/index.d.ts +2 -2
- package/dist/telemetry/index.js +2 -2
- package/dist/telemetry/instrumentations.js +14 -10
- package/dist/telemetry/instrumentations.js.map +1 -1
- package/dist/telemetry/telemetry-manager.js +8 -6
- package/dist/telemetry/telemetry-manager.js.map +1 -1
- package/dist/telemetry/trace-sampler.js +33 -0
- package/dist/telemetry/trace-sampler.js.map +1 -0
- package/dist/type-generator/index.js +4 -2
- package/dist/type-generator/index.js.map +1 -1
- package/dist/type-generator/query-registry.js +4 -2
- package/dist/type-generator/query-registry.js.map +1 -1
- package/dist/type-generator/types.js.map +1 -1
- package/dist/type-generator/vite-plugin.d.ts.map +1 -1
- package/dist/type-generator/vite-plugin.js +5 -3
- package/dist/type-generator/vite-plugin.js.map +1 -1
- package/dist/utils/env-validator.js +5 -5
- package/dist/utils/env-validator.js.map +1 -1
- package/dist/utils/merge.js +1 -5
- package/dist/utils/merge.js.map +1 -1
- package/dist/utils/path-exclusions.js +66 -0
- package/dist/utils/path-exclusions.js.map +1 -0
- package/dist/utils/vite-config-merge.js +1 -5
- package/dist/utils/vite-config-merge.js.map +1 -1
- package/llms.txt +89 -12
- package/package.json +6 -1
- package/dist/utils/databricks-client-middleware.d.ts +0 -17
- package/dist/utils/databricks-client-middleware.d.ts.map +0 -1
- package/dist/utils/databricks-client-middleware.js +0 -117
- package/dist/utils/databricks-client-middleware.js.map +0 -1
- package/dist/utils/index.js +0 -26
- package/dist/utils/index.js.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream-manager.js","names":["streamOperation: StreamOperation","streamEntry: StreamEntry"],"sources":["../../src/stream/stream-manager.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { IAppResponse, StreamConfig } from \"shared\";\nimport { EventRingBuffer } from \"./buffers\";\nimport { streamDefaults } from \"./defaults\";\nimport { SSEWriter } from \"./sse-writer\";\nimport { StreamRegistry } from \"./stream-registry\";\nimport { SSEErrorCode, type StreamEntry, type StreamOperation } from \"./types\";\nimport { StreamValidator } from \"./validator\";\n\n// main entry point for Server-Sent events streaming\nexport class StreamManager {\n private activeOperations: Set<StreamOperation>;\n private streamRegistry: StreamRegistry;\n private sseWriter: SSEWriter;\n private maxEventSize: number;\n private bufferTTL: number;\n\n constructor(options?: StreamConfig) {\n this.streamRegistry = new StreamRegistry(\n options?.maxActiveStreams ?? streamDefaults.maxActiveStreams,\n );\n this.sseWriter = new SSEWriter();\n this.maxEventSize = options?.maxEventSize ?? streamDefaults.maxEventSize;\n this.bufferTTL = options?.bufferTTL ?? streamDefaults.bufferTTL;\n this.activeOperations = new Set();\n }\n\n // main streaming method - handles new connection and reconnection\n stream(\n res: IAppResponse,\n handler: (signal: AbortSignal) => AsyncGenerator<any, void, unknown>,\n options?: StreamConfig,\n ): Promise<void> {\n const { streamId } = options || {};\n\n // setup SSE headers\n this.sseWriter.setupHeaders(res);\n\n // handle reconnection\n if (streamId && StreamValidator.validateStreamId(streamId)) {\n const existingStream = this.streamRegistry.get(streamId);\n // if stream exists, attach to it\n if (existingStream) {\n return this._attachToExistingStream(res, existingStream, options);\n }\n }\n\n // if stream does not exist, create a new one\n return this._createNewStream(res, handler, options);\n }\n\n // abort all active operations\n abortAll(): void {\n this.activeOperations.forEach((operation) => {\n if (operation.heartbeat) clearInterval(operation.heartbeat);\n operation.controller.abort(\"Server shutdown\");\n });\n this.activeOperations.clear();\n this.streamRegistry.clear();\n }\n\n // get the number of active operations\n getActiveCount(): number {\n return this.activeOperations.size;\n }\n\n // attach to existing stream\n private async _attachToExistingStream(\n res: IAppResponse,\n streamEntry: StreamEntry,\n options?: StreamConfig,\n ): Promise<void> {\n // handle reconnection - replay missed events\n const lastEventId = res.req?.headers[\"last-event-id\"];\n\n if (StreamValidator.validateEventId(lastEventId)) {\n // cast to string after validation\n const validEventId = lastEventId as string;\n if (streamEntry.eventBuffer.has(validEventId)) {\n const missedEvents =\n streamEntry.eventBuffer.getEventsSince(validEventId);\n // broadcast missed events to client\n for (const event of missedEvents) {\n if (options?.userSignal?.aborted) break;\n this.sseWriter.writeBufferedEvent(res, event);\n }\n } else {\n // buffer overflow - send warning\n this.sseWriter.writeBufferOverflowWarning(res, validEventId);\n }\n }\n\n // add client to stream entry\n streamEntry.clients.add(res);\n streamEntry.lastAccess = Date.now();\n\n // start heartbeat\n const combinedSignal = this._combineSignals(\n streamEntry.abortController.signal,\n options?.userSignal,\n );\n const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);\n\n // track operation\n const streamOperation: StreamOperation = {\n controller: streamEntry.abortController,\n type: \"stream\",\n heartbeat,\n };\n this.activeOperations.add(streamOperation);\n\n // handle client disconnect\n res.on(\"close\", () => {\n clearInterval(heartbeat);\n streamEntry.clients.delete(res);\n this.activeOperations.delete(streamOperation);\n\n // cleanup if stream is completed and no clients are connected\n if (streamEntry.isCompleted && streamEntry.clients.size === 0) {\n setTimeout(() => {\n if (streamEntry.clients.size === 0) {\n this.streamRegistry.remove(streamEntry.streamId);\n }\n }, this.bufferTTL);\n }\n });\n\n // if stream is completed, close connection\n if (streamEntry.isCompleted) {\n res.end();\n // cleanup operation\n this.activeOperations.delete(streamOperation);\n clearInterval(heartbeat);\n }\n }\n private async _createNewStream(\n res: IAppResponse,\n handler: (signal: AbortSignal) => AsyncGenerator<any, void, unknown>,\n options?: StreamConfig,\n ): Promise<void> {\n const streamId = options?.streamId ?? randomUUID();\n const abortController = new AbortController();\n\n // create event buffer\n const eventBuffer = new EventRingBuffer(\n options?.bufferSize ?? streamDefaults.bufferSize,\n );\n\n // setup signals and heartbeat\n const combinedSignal = this._combineSignals(\n abortController.signal,\n options?.userSignal,\n );\n const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);\n\n // create stream entry\n const streamEntry: StreamEntry = {\n streamId,\n generator: handler(combinedSignal),\n eventBuffer,\n clients: new Set([res]),\n isCompleted: false,\n lastAccess: Date.now(),\n abortController,\n };\n this.streamRegistry.add(streamEntry);\n\n // track operation\n const streamOperation: StreamOperation = {\n controller: abortController,\n type: \"stream\",\n heartbeat,\n };\n this.activeOperations.add(streamOperation);\n\n // handle client disconnect\n res.on(\"close\", () => {\n clearInterval(heartbeat);\n this.activeOperations.delete(streamOperation);\n streamEntry.clients.delete(res);\n });\n\n await this._processGeneratorInBackground(streamEntry);\n\n // cleanup\n clearInterval(heartbeat);\n this.activeOperations.delete(streamOperation);\n }\n\n private async _processGeneratorInBackground(\n streamEntry: StreamEntry,\n ): Promise<void> {\n try {\n // retrieve all events from generator\n for await (const event of streamEntry.generator) {\n if (streamEntry.abortController.signal.aborted) break;\n const eventId = randomUUID();\n const eventData = JSON.stringify(event);\n\n // validate event size\n if (eventData.length > this.maxEventSize) {\n const errorMsg = `Event exceeds max size of ${this.maxEventSize} bytes`;\n const errorCode = SSEErrorCode.INVALID_REQUEST;\n // broadcast error to all connected clients\n this._broadcastErrorToClients(\n streamEntry,\n eventId,\n errorMsg,\n errorCode,\n );\n continue;\n }\n\n // buffer event for reconnection\n streamEntry.eventBuffer.add({\n id: eventId,\n type: event.type,\n data: eventData,\n timestamp: Date.now(),\n });\n\n // broadcast to all connected clients\n this._broadcastEventsToClients(streamEntry, eventId, event);\n streamEntry.lastAccess = Date.now();\n }\n\n streamEntry.isCompleted = true;\n\n // close all clients\n this._closeAllClients(streamEntry);\n\n // cleanup if no clients are connected\n this._cleanupStream(streamEntry);\n } catch (error) {\n const errorMsg =\n error instanceof Error ? error.message : \"Internal server error\";\n const errorEventId = randomUUID();\n const errorCode = this._categorizeError(error);\n\n // buffer error event\n streamEntry.eventBuffer.add({\n id: errorEventId,\n type: \"error\",\n data: JSON.stringify({ error: errorMsg, code: errorCode }),\n timestamp: Date.now(),\n });\n\n // send error event to all connected clients\n this._broadcastErrorToClients(\n streamEntry,\n errorEventId,\n errorMsg,\n errorCode,\n true,\n );\n streamEntry.isCompleted = true;\n }\n }\n\n private _combineSignals(\n internalSignal?: AbortSignal,\n userSignal?: AbortSignal,\n ): AbortSignal {\n if (!userSignal) return internalSignal || new AbortController().signal;\n\n const signals = [internalSignal, userSignal].filter(\n Boolean,\n ) as AbortSignal[];\n const controller = new AbortController();\n\n signals.forEach((signal) => {\n if (signal?.aborted) {\n controller.abort(signal.reason);\n return;\n }\n\n signal?.addEventListener(\n \"abort\",\n () => {\n controller.abort(signal.reason);\n },\n { once: true },\n );\n });\n return controller.signal;\n }\n\n // broadcast events to all connected clients\n private _broadcastEventsToClients(\n streamEntry: StreamEntry,\n eventId: string,\n event: any,\n ): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n this.sseWriter.writeEvent(client, eventId, event);\n }\n }\n }\n\n // broadcast error to all connected clients\n private _broadcastErrorToClients(\n streamEntry: StreamEntry,\n eventId: string,\n errorMessage: string,\n errorCode: SSEErrorCode,\n closeClients: boolean = false,\n ): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n this.sseWriter.writeError(client, eventId, errorMessage, errorCode);\n if (closeClients) {\n client.end();\n }\n }\n }\n }\n\n // close all connected clients\n private _closeAllClients(streamEntry: StreamEntry): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n client.end();\n }\n }\n }\n\n // cleanup stream if no clients are connected\n private _cleanupStream(streamEntry: StreamEntry): void {\n if (streamEntry.clients.size === 0) {\n setTimeout(() => {\n if (streamEntry.clients.size === 0) {\n this.streamRegistry.remove(streamEntry.streamId);\n }\n }, this.bufferTTL);\n }\n }\n\n private _categorizeError(error: unknown): SSEErrorCode {\n if (error instanceof Error) {\n const message = error.message.toLowerCase();\n if (message.includes(\"timeout\") || message.includes(\"timed out\")) {\n return SSEErrorCode.TIMEOUT;\n }\n\n if (message.includes(\"unavailable\") || message.includes(\"econnrefused\")) {\n return SSEErrorCode.TEMPORARY_UNAVAILABLE;\n }\n\n if (error.name === \"AbortError\") {\n return SSEErrorCode.STREAM_ABORTED;\n }\n }\n\n return SSEErrorCode.INTERNAL_ERROR;\n }\n}\n"],"mappings":";;;;;;;;;AAUA,IAAa,gBAAb,MAA2B;CAOzB,YAAY,SAAwB;AAClC,OAAK,iBAAiB,IAAI,eACxB,SAAS,oBAAoB,eAAe,iBAC7C;AACD,OAAK,YAAY,IAAI,WAAW;AAChC,OAAK,eAAe,SAAS,gBAAgB,eAAe;AAC5D,OAAK,YAAY,SAAS,aAAa,eAAe;AACtD,OAAK,mCAAmB,IAAI,KAAK;;CAInC,OACE,KACA,SACA,SACe;EACf,MAAM,EAAE,aAAa,WAAW,EAAE;AAGlC,OAAK,UAAU,aAAa,IAAI;AAGhC,MAAI,YAAY,gBAAgB,iBAAiB,SAAS,EAAE;GAC1D,MAAM,iBAAiB,KAAK,eAAe,IAAI,SAAS;AAExD,OAAI,eACF,QAAO,KAAK,wBAAwB,KAAK,gBAAgB,QAAQ;;AAKrE,SAAO,KAAK,iBAAiB,KAAK,SAAS,QAAQ;;CAIrD,WAAiB;AACf,OAAK,iBAAiB,SAAS,cAAc;AAC3C,OAAI,UAAU,UAAW,eAAc,UAAU,UAAU;AAC3D,aAAU,WAAW,MAAM,kBAAkB;IAC7C;AACF,OAAK,iBAAiB,OAAO;AAC7B,OAAK,eAAe,OAAO;;CAI7B,iBAAyB;AACvB,SAAO,KAAK,iBAAiB;;CAI/B,MAAc,wBACZ,KACA,aACA,SACe;EAEf,MAAM,cAAc,IAAI,KAAK,QAAQ;AAErC,MAAI,gBAAgB,gBAAgB,YAAY,EAAE;GAEhD,MAAM,eAAe;AACrB,OAAI,YAAY,YAAY,IAAI,aAAa,EAAE;IAC7C,MAAM,eACJ,YAAY,YAAY,eAAe,aAAa;AAEtD,SAAK,MAAM,SAAS,cAAc;AAChC,SAAI,SAAS,YAAY,QAAS;AAClC,UAAK,UAAU,mBAAmB,KAAK,MAAM;;SAI/C,MAAK,UAAU,2BAA2B,KAAK,aAAa;;AAKhE,cAAY,QAAQ,IAAI,IAAI;AAC5B,cAAY,aAAa,KAAK,KAAK;EAGnC,MAAM,iBAAiB,KAAK,gBAC1B,YAAY,gBAAgB,QAC5B,SAAS,WACV;EACD,MAAM,YAAY,KAAK,UAAU,eAAe,KAAK,eAAe;EAGpE,MAAMA,kBAAmC;GACvC,YAAY,YAAY;GACxB,MAAM;GACN;GACD;AACD,OAAK,iBAAiB,IAAI,gBAAgB;AAG1C,MAAI,GAAG,eAAe;AACpB,iBAAc,UAAU;AACxB,eAAY,QAAQ,OAAO,IAAI;AAC/B,QAAK,iBAAiB,OAAO,gBAAgB;AAG7C,OAAI,YAAY,eAAe,YAAY,QAAQ,SAAS,EAC1D,kBAAiB;AACf,QAAI,YAAY,QAAQ,SAAS,EAC/B,MAAK,eAAe,OAAO,YAAY,SAAS;MAEjD,KAAK,UAAU;IAEpB;AAGF,MAAI,YAAY,aAAa;AAC3B,OAAI,KAAK;AAET,QAAK,iBAAiB,OAAO,gBAAgB;AAC7C,iBAAc,UAAU;;;CAG5B,MAAc,iBACZ,KACA,SACA,SACe;EACf,MAAM,WAAW,SAAS,YAAY,YAAY;EAClD,MAAM,kBAAkB,IAAI,iBAAiB;EAG7C,MAAM,cAAc,IAAI,gBACtB,SAAS,cAAc,eAAe,WACvC;EAGD,MAAM,iBAAiB,KAAK,gBAC1B,gBAAgB,QAChB,SAAS,WACV;EACD,MAAM,YAAY,KAAK,UAAU,eAAe,KAAK,eAAe;EAGpE,MAAMC,cAA2B;GAC/B;GACA,WAAW,QAAQ,eAAe;GAClC;GACA,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC;GACvB,aAAa;GACb,YAAY,KAAK,KAAK;GACtB;GACD;AACD,OAAK,eAAe,IAAI,YAAY;EAGpC,MAAMD,kBAAmC;GACvC,YAAY;GACZ,MAAM;GACN;GACD;AACD,OAAK,iBAAiB,IAAI,gBAAgB;AAG1C,MAAI,GAAG,eAAe;AACpB,iBAAc,UAAU;AACxB,QAAK,iBAAiB,OAAO,gBAAgB;AAC7C,eAAY,QAAQ,OAAO,IAAI;IAC/B;AAEF,QAAM,KAAK,8BAA8B,YAAY;AAGrD,gBAAc,UAAU;AACxB,OAAK,iBAAiB,OAAO,gBAAgB;;CAG/C,MAAc,8BACZ,aACe;AACf,MAAI;AAEF,cAAW,MAAM,SAAS,YAAY,WAAW;AAC/C,QAAI,YAAY,gBAAgB,OAAO,QAAS;IAChD,MAAM,UAAU,YAAY;IAC5B,MAAM,YAAY,KAAK,UAAU,MAAM;AAGvC,QAAI,UAAU,SAAS,KAAK,cAAc;KACxC,MAAM,WAAW,6BAA6B,KAAK,aAAa;KAChE,MAAM,YAAY,aAAa;AAE/B,UAAK,yBACH,aACA,SACA,UACA,UACD;AACD;;AAIF,gBAAY,YAAY,IAAI;KAC1B,IAAI;KACJ,MAAM,MAAM;KACZ,MAAM;KACN,WAAW,KAAK,KAAK;KACtB,CAAC;AAGF,SAAK,0BAA0B,aAAa,SAAS,MAAM;AAC3D,gBAAY,aAAa,KAAK,KAAK;;AAGrC,eAAY,cAAc;AAG1B,QAAK,iBAAiB,YAAY;AAGlC,QAAK,eAAe,YAAY;WACzB,OAAO;GACd,MAAM,WACJ,iBAAiB,QAAQ,MAAM,UAAU;GAC3C,MAAM,eAAe,YAAY;GACjC,MAAM,YAAY,KAAK,iBAAiB,MAAM;AAG9C,eAAY,YAAY,IAAI;IAC1B,IAAI;IACJ,MAAM;IACN,MAAM,KAAK,UAAU;KAAE,OAAO;KAAU,MAAM;KAAW,CAAC;IAC1D,WAAW,KAAK,KAAK;IACtB,CAAC;AAGF,QAAK,yBACH,aACA,cACA,UACA,WACA,KACD;AACD,eAAY,cAAc;;;CAI9B,AAAQ,gBACN,gBACA,YACa;AACb,MAAI,CAAC,WAAY,QAAO,kBAAkB,IAAI,iBAAiB,CAAC;EAEhE,MAAM,UAAU,CAAC,gBAAgB,WAAW,CAAC,OAC3C,QACD;EACD,MAAM,aAAa,IAAI,iBAAiB;AAExC,UAAQ,SAAS,WAAW;AAC1B,OAAI,QAAQ,SAAS;AACnB,eAAW,MAAM,OAAO,OAAO;AAC/B;;AAGF,WAAQ,iBACN,eACM;AACJ,eAAW,MAAM,OAAO,OAAO;MAEjC,EAAE,MAAM,MAAM,CACf;IACD;AACF,SAAO,WAAW;;CAIpB,AAAQ,0BACN,aACA,SACA,OACM;AACN,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,cACV,MAAK,UAAU,WAAW,QAAQ,SAAS,MAAM;;CAMvD,AAAQ,yBACN,aACA,SACA,cACA,WACA,eAAwB,OAClB;AACN,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,eAAe;AACzB,QAAK,UAAU,WAAW,QAAQ,SAAS,cAAc,UAAU;AACnE,OAAI,aACF,QAAO,KAAK;;;CAOpB,AAAQ,iBAAiB,aAAgC;AACvD,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,cACV,QAAO,KAAK;;CAMlB,AAAQ,eAAe,aAAgC;AACrD,MAAI,YAAY,QAAQ,SAAS,EAC/B,kBAAiB;AACf,OAAI,YAAY,QAAQ,SAAS,EAC/B,MAAK,eAAe,OAAO,YAAY,SAAS;KAEjD,KAAK,UAAU;;CAItB,AAAQ,iBAAiB,OAA8B;AACrD,MAAI,iBAAiB,OAAO;GAC1B,MAAM,UAAU,MAAM,QAAQ,aAAa;AAC3C,OAAI,QAAQ,SAAS,UAAU,IAAI,QAAQ,SAAS,YAAY,CAC9D,QAAO,aAAa;AAGtB,OAAI,QAAQ,SAAS,cAAc,IAAI,QAAQ,SAAS,eAAe,CACrE,QAAO,aAAa;AAGtB,OAAI,MAAM,SAAS,aACjB,QAAO,aAAa;;AAIxB,SAAO,aAAa"}
|
|
1
|
+
{"version":3,"file":"stream-manager.js","names":[],"sources":["../../src/stream/stream-manager.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport { context } from \"@opentelemetry/api\";\nimport type { IAppResponse, StreamConfig } from \"shared\";\nimport { EventRingBuffer } from \"./buffers\";\nimport { streamDefaults } from \"./defaults\";\nimport { SSEWriter } from \"./sse-writer\";\nimport { StreamRegistry } from \"./stream-registry\";\nimport { SSEErrorCode, type StreamEntry, type StreamOperation } from \"./types\";\nimport { StreamValidator } from \"./validator\";\n\n// main entry point for Server-Sent events streaming\nexport class StreamManager {\n private activeOperations: Set<StreamOperation>;\n private streamRegistry: StreamRegistry;\n private sseWriter: SSEWriter;\n private maxEventSize: number;\n private bufferTTL: number;\n\n constructor(options?: StreamConfig) {\n this.streamRegistry = new StreamRegistry(\n options?.maxActiveStreams ?? streamDefaults.maxActiveStreams,\n );\n this.sseWriter = new SSEWriter();\n this.maxEventSize = options?.maxEventSize ?? streamDefaults.maxEventSize;\n this.bufferTTL = options?.bufferTTL ?? streamDefaults.bufferTTL;\n this.activeOperations = new Set();\n }\n\n // main streaming method - handles new connection and reconnection\n async stream(\n res: IAppResponse,\n handler: (signal: AbortSignal) => AsyncGenerator<any, void, unknown>,\n options?: StreamConfig,\n ): Promise<void> {\n const { streamId } = options || {};\n\n // check if response is already closed\n if (res.writableEnded || res.destroyed) {\n return;\n }\n\n // setup SSE headers\n this.sseWriter.setupHeaders(res);\n\n // handle reconnection\n if (streamId && StreamValidator.validateStreamId(streamId)) {\n const existingStream = this.streamRegistry.get(streamId);\n // if stream exists, attach to it\n if (existingStream) {\n return this._attachToExistingStream(res, existingStream, options);\n }\n }\n\n // if stream does not exist, create a new one\n return this._createNewStream(res, handler, options);\n }\n\n // abort all active operations\n abortAll(): void {\n this.activeOperations.forEach((operation) => {\n if (operation.heartbeat) clearInterval(operation.heartbeat);\n operation.controller.abort(\"Server shutdown\");\n });\n this.activeOperations.clear();\n this.streamRegistry.clear();\n }\n\n // get the number of active operations\n getActiveCount(): number {\n return this.activeOperations.size;\n }\n\n // attach to existing stream\n private async _attachToExistingStream(\n res: IAppResponse,\n streamEntry: StreamEntry,\n options?: StreamConfig,\n ): Promise<void> {\n // handle reconnection - replay missed events\n const lastEventId = res.req?.headers[\"last-event-id\"];\n\n if (StreamValidator.validateEventId(lastEventId)) {\n // cast to string after validation\n const validEventId = lastEventId as string;\n if (streamEntry.eventBuffer.has(validEventId)) {\n const missedEvents =\n streamEntry.eventBuffer.getEventsSince(validEventId);\n // broadcast missed events to client\n for (const event of missedEvents) {\n if (options?.userSignal?.aborted) break;\n this.sseWriter.writeBufferedEvent(res, event);\n }\n } else {\n // buffer overflow - send warning\n this.sseWriter.writeBufferOverflowWarning(res, validEventId);\n }\n }\n\n // add client to stream entry\n streamEntry.clients.add(res);\n streamEntry.lastAccess = Date.now();\n\n // start heartbeat\n const combinedSignal = this._combineSignals(\n streamEntry.abortController.signal,\n options?.userSignal,\n );\n const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);\n\n // track operation\n const streamOperation: StreamOperation = {\n controller: streamEntry.abortController,\n type: \"stream\",\n heartbeat,\n };\n this.activeOperations.add(streamOperation);\n\n // handle client disconnect\n res.on(\"close\", () => {\n clearInterval(heartbeat);\n streamEntry.clients.delete(res);\n this.activeOperations.delete(streamOperation);\n\n // cleanup if stream is completed and no clients are connected\n if (streamEntry.isCompleted && streamEntry.clients.size === 0) {\n setTimeout(() => {\n if (streamEntry.clients.size === 0) {\n this.streamRegistry.remove(streamEntry.streamId);\n }\n }, this.bufferTTL);\n }\n });\n\n // if stream is completed, close connection\n if (streamEntry.isCompleted) {\n res.end();\n // cleanup operation\n this.activeOperations.delete(streamOperation);\n clearInterval(heartbeat);\n }\n }\n private async _createNewStream(\n res: IAppResponse,\n handler: (signal: AbortSignal) => AsyncGenerator<any, void, unknown>,\n options?: StreamConfig,\n ): Promise<void> {\n const streamId = options?.streamId ?? randomUUID();\n\n // abort stream if response is closed\n if (res.writableEnded || res.destroyed) {\n return;\n }\n\n const abortController = new AbortController();\n\n // create event buffer\n const eventBuffer = new EventRingBuffer(\n options?.bufferSize ?? streamDefaults.bufferSize,\n );\n\n // setup signals and heartbeat\n const combinedSignal = this._combineSignals(\n abortController.signal,\n options?.userSignal,\n );\n const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);\n\n // capture the current trace context at stream creation time\n const traceContext = context.active();\n\n // abort stream if response is closed\n if (res.writableEnded || res.destroyed) {\n clearInterval(heartbeat);\n return;\n }\n\n // create stream entry\n const streamEntry: StreamEntry = {\n streamId,\n generator: handler(combinedSignal),\n eventBuffer,\n clients: new Set([res]),\n isCompleted: false,\n lastAccess: Date.now(),\n abortController,\n traceContext,\n };\n this.streamRegistry.add(streamEntry);\n\n // track operation\n const streamOperation: StreamOperation = {\n controller: abortController,\n type: \"stream\",\n heartbeat,\n };\n this.activeOperations.add(streamOperation);\n\n res.on(\"close\", () => {\n clearInterval(heartbeat);\n this.activeOperations.delete(streamOperation);\n streamEntry.clients.delete(res);\n });\n\n await this._processGeneratorInBackground(streamEntry);\n\n // cleanup\n clearInterval(heartbeat);\n this.activeOperations.delete(streamOperation);\n }\n\n private async _processGeneratorInBackground(\n streamEntry: StreamEntry,\n ): Promise<void> {\n // run the entire generator processing within the captured trace context\n return context.with(streamEntry.traceContext, async () => {\n try {\n // retrieve all events from generator\n for await (const event of streamEntry.generator) {\n if (streamEntry.abortController.signal.aborted) break;\n const eventId = randomUUID();\n const eventData = JSON.stringify(event);\n\n // validate event size\n if (eventData.length > this.maxEventSize) {\n const errorMsg = `Event exceeds max size of ${this.maxEventSize} bytes`;\n const errorCode = SSEErrorCode.INVALID_REQUEST;\n // broadcast error to all connected clients\n this._broadcastErrorToClients(\n streamEntry,\n eventId,\n errorMsg,\n errorCode,\n );\n continue;\n }\n\n // buffer event for reconnection\n streamEntry.eventBuffer.add({\n id: eventId,\n type: event.type,\n data: eventData,\n timestamp: Date.now(),\n });\n\n // broadcast to all connected clients\n this._broadcastEventsToClients(streamEntry, eventId, event);\n streamEntry.lastAccess = Date.now();\n }\n\n streamEntry.isCompleted = true;\n\n // close all clients\n this._closeAllClients(streamEntry);\n\n // cleanup if no clients are connected\n this._cleanupStream(streamEntry);\n } catch (error) {\n const errorMsg =\n error instanceof Error ? error.message : \"Internal server error\";\n const errorEventId = randomUUID();\n const errorCode = this._categorizeError(error);\n\n // buffer error event\n streamEntry.eventBuffer.add({\n id: errorEventId,\n type: \"error\",\n data: JSON.stringify({ error: errorMsg, code: errorCode }),\n timestamp: Date.now(),\n });\n\n // send error event to all connected clients\n this._broadcastErrorToClients(\n streamEntry,\n errorEventId,\n errorMsg,\n errorCode,\n true,\n );\n streamEntry.isCompleted = true;\n }\n });\n }\n\n private _combineSignals(\n internalSignal?: AbortSignal,\n userSignal?: AbortSignal,\n ): AbortSignal {\n if (!userSignal) return internalSignal || new AbortController().signal;\n\n const signals = [internalSignal, userSignal].filter(\n Boolean,\n ) as AbortSignal[];\n const controller = new AbortController();\n\n signals.forEach((signal) => {\n if (signal?.aborted) {\n controller.abort(signal.reason);\n return;\n }\n\n signal?.addEventListener(\n \"abort\",\n () => {\n controller.abort(signal.reason);\n },\n { once: true },\n );\n });\n return controller.signal;\n }\n\n // broadcast events to all connected clients\n private _broadcastEventsToClients(\n streamEntry: StreamEntry,\n eventId: string,\n event: any,\n ): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n this.sseWriter.writeEvent(client, eventId, event);\n }\n }\n }\n\n // broadcast error to all connected clients\n private _broadcastErrorToClients(\n streamEntry: StreamEntry,\n eventId: string,\n errorMessage: string,\n errorCode: SSEErrorCode,\n closeClients: boolean = false,\n ): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n this.sseWriter.writeError(client, eventId, errorMessage, errorCode);\n if (closeClients) {\n client.end();\n }\n }\n }\n }\n\n // close all connected clients\n private _closeAllClients(streamEntry: StreamEntry): void {\n for (const client of streamEntry.clients) {\n if (!client.writableEnded) {\n client.end();\n }\n }\n }\n\n // cleanup stream if no clients are connected\n private _cleanupStream(streamEntry: StreamEntry): void {\n if (streamEntry.clients.size === 0) {\n setTimeout(() => {\n if (streamEntry.clients.size === 0) {\n this.streamRegistry.remove(streamEntry.streamId);\n }\n }, this.bufferTTL);\n }\n }\n\n private _categorizeError(error: unknown): SSEErrorCode {\n if (error instanceof Error) {\n const message = error.message.toLowerCase();\n if (message.includes(\"timeout\") || message.includes(\"timed out\")) {\n return SSEErrorCode.TIMEOUT;\n }\n\n if (message.includes(\"unavailable\") || message.includes(\"econnrefused\")) {\n return SSEErrorCode.TEMPORARY_UNAVAILABLE;\n }\n\n if (error.name === \"AbortError\") {\n return SSEErrorCode.STREAM_ABORTED;\n }\n }\n\n return SSEErrorCode.INTERNAL_ERROR;\n }\n}\n"],"mappings":";;;;;;;;;;AAWA,IAAa,gBAAb,MAA2B;CAOzB,YAAY,SAAwB;AAClC,OAAK,iBAAiB,IAAI,eACxB,SAAS,oBAAoB,eAAe,iBAC7C;AACD,OAAK,YAAY,IAAI,WAAW;AAChC,OAAK,eAAe,SAAS,gBAAgB,eAAe;AAC5D,OAAK,YAAY,SAAS,aAAa,eAAe;AACtD,OAAK,mCAAmB,IAAI,KAAK;;CAInC,MAAM,OACJ,KACA,SACA,SACe;EACf,MAAM,EAAE,aAAa,WAAW,EAAE;AAGlC,MAAI,IAAI,iBAAiB,IAAI,UAC3B;AAIF,OAAK,UAAU,aAAa,IAAI;AAGhC,MAAI,YAAY,gBAAgB,iBAAiB,SAAS,EAAE;GAC1D,MAAM,iBAAiB,KAAK,eAAe,IAAI,SAAS;AAExD,OAAI,eACF,QAAO,KAAK,wBAAwB,KAAK,gBAAgB,QAAQ;;AAKrE,SAAO,KAAK,iBAAiB,KAAK,SAAS,QAAQ;;CAIrD,WAAiB;AACf,OAAK,iBAAiB,SAAS,cAAc;AAC3C,OAAI,UAAU,UAAW,eAAc,UAAU,UAAU;AAC3D,aAAU,WAAW,MAAM,kBAAkB;IAC7C;AACF,OAAK,iBAAiB,OAAO;AAC7B,OAAK,eAAe,OAAO;;CAI7B,iBAAyB;AACvB,SAAO,KAAK,iBAAiB;;CAI/B,MAAc,wBACZ,KACA,aACA,SACe;EAEf,MAAM,cAAc,IAAI,KAAK,QAAQ;AAErC,MAAI,gBAAgB,gBAAgB,YAAY,EAAE;GAEhD,MAAM,eAAe;AACrB,OAAI,YAAY,YAAY,IAAI,aAAa,EAAE;IAC7C,MAAM,eACJ,YAAY,YAAY,eAAe,aAAa;AAEtD,SAAK,MAAM,SAAS,cAAc;AAChC,SAAI,SAAS,YAAY,QAAS;AAClC,UAAK,UAAU,mBAAmB,KAAK,MAAM;;SAI/C,MAAK,UAAU,2BAA2B,KAAK,aAAa;;AAKhE,cAAY,QAAQ,IAAI,IAAI;AAC5B,cAAY,aAAa,KAAK,KAAK;EAGnC,MAAM,iBAAiB,KAAK,gBAC1B,YAAY,gBAAgB,QAC5B,SAAS,WACV;EACD,MAAM,YAAY,KAAK,UAAU,eAAe,KAAK,eAAe;EAGpE,MAAM,kBAAmC;GACvC,YAAY,YAAY;GACxB,MAAM;GACN;GACD;AACD,OAAK,iBAAiB,IAAI,gBAAgB;AAG1C,MAAI,GAAG,eAAe;AACpB,iBAAc,UAAU;AACxB,eAAY,QAAQ,OAAO,IAAI;AAC/B,QAAK,iBAAiB,OAAO,gBAAgB;AAG7C,OAAI,YAAY,eAAe,YAAY,QAAQ,SAAS,EAC1D,kBAAiB;AACf,QAAI,YAAY,QAAQ,SAAS,EAC/B,MAAK,eAAe,OAAO,YAAY,SAAS;MAEjD,KAAK,UAAU;IAEpB;AAGF,MAAI,YAAY,aAAa;AAC3B,OAAI,KAAK;AAET,QAAK,iBAAiB,OAAO,gBAAgB;AAC7C,iBAAc,UAAU;;;CAG5B,MAAc,iBACZ,KACA,SACA,SACe;EACf,MAAM,WAAW,SAAS,YAAY,YAAY;AAGlD,MAAI,IAAI,iBAAiB,IAAI,UAC3B;EAGF,MAAM,kBAAkB,IAAI,iBAAiB;EAG7C,MAAM,cAAc,IAAI,gBACtB,SAAS,cAAc,eAAe,WACvC;EAGD,MAAM,iBAAiB,KAAK,gBAC1B,gBAAgB,QAChB,SAAS,WACV;EACD,MAAM,YAAY,KAAK,UAAU,eAAe,KAAK,eAAe;EAGpE,MAAM,eAAe,QAAQ,QAAQ;AAGrC,MAAI,IAAI,iBAAiB,IAAI,WAAW;AACtC,iBAAc,UAAU;AACxB;;EAIF,MAAM,cAA2B;GAC/B;GACA,WAAW,QAAQ,eAAe;GAClC;GACA,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC;GACvB,aAAa;GACb,YAAY,KAAK,KAAK;GACtB;GACA;GACD;AACD,OAAK,eAAe,IAAI,YAAY;EAGpC,MAAM,kBAAmC;GACvC,YAAY;GACZ,MAAM;GACN;GACD;AACD,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,MAAI,GAAG,eAAe;AACpB,iBAAc,UAAU;AACxB,QAAK,iBAAiB,OAAO,gBAAgB;AAC7C,eAAY,QAAQ,OAAO,IAAI;IAC/B;AAEF,QAAM,KAAK,8BAA8B,YAAY;AAGrD,gBAAc,UAAU;AACxB,OAAK,iBAAiB,OAAO,gBAAgB;;CAG/C,MAAc,8BACZ,aACe;AAEf,SAAO,QAAQ,KAAK,YAAY,cAAc,YAAY;AACxD,OAAI;AAEF,eAAW,MAAM,SAAS,YAAY,WAAW;AAC/C,SAAI,YAAY,gBAAgB,OAAO,QAAS;KAChD,MAAM,UAAU,YAAY;KAC5B,MAAM,YAAY,KAAK,UAAU,MAAM;AAGvC,SAAI,UAAU,SAAS,KAAK,cAAc;MACxC,MAAM,WAAW,6BAA6B,KAAK,aAAa;MAChE,MAAM,YAAY,aAAa;AAE/B,WAAK,yBACH,aACA,SACA,UACA,UACD;AACD;;AAIF,iBAAY,YAAY,IAAI;MAC1B,IAAI;MACJ,MAAM,MAAM;MACZ,MAAM;MACN,WAAW,KAAK,KAAK;MACtB,CAAC;AAGF,UAAK,0BAA0B,aAAa,SAAS,MAAM;AAC3D,iBAAY,aAAa,KAAK,KAAK;;AAGrC,gBAAY,cAAc;AAG1B,SAAK,iBAAiB,YAAY;AAGlC,SAAK,eAAe,YAAY;YACzB,OAAO;IACd,MAAM,WACJ,iBAAiB,QAAQ,MAAM,UAAU;IAC3C,MAAM,eAAe,YAAY;IACjC,MAAM,YAAY,KAAK,iBAAiB,MAAM;AAG9C,gBAAY,YAAY,IAAI;KAC1B,IAAI;KACJ,MAAM;KACN,MAAM,KAAK,UAAU;MAAE,OAAO;MAAU,MAAM;MAAW,CAAC;KAC1D,WAAW,KAAK,KAAK;KACtB,CAAC;AAGF,SAAK,yBACH,aACA,cACA,UACA,WACA,KACD;AACD,gBAAY,cAAc;;IAE5B;;CAGJ,AAAQ,gBACN,gBACA,YACa;AACb,MAAI,CAAC,WAAY,QAAO,kBAAkB,IAAI,iBAAiB,CAAC;EAEhE,MAAM,UAAU,CAAC,gBAAgB,WAAW,CAAC,OAC3C,QACD;EACD,MAAM,aAAa,IAAI,iBAAiB;AAExC,UAAQ,SAAS,WAAW;AAC1B,OAAI,QAAQ,SAAS;AACnB,eAAW,MAAM,OAAO,OAAO;AAC/B;;AAGF,WAAQ,iBACN,eACM;AACJ,eAAW,MAAM,OAAO,OAAO;MAEjC,EAAE,MAAM,MAAM,CACf;IACD;AACF,SAAO,WAAW;;CAIpB,AAAQ,0BACN,aACA,SACA,OACM;AACN,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,cACV,MAAK,UAAU,WAAW,QAAQ,SAAS,MAAM;;CAMvD,AAAQ,yBACN,aACA,SACA,cACA,WACA,eAAwB,OAClB;AACN,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,eAAe;AACzB,QAAK,UAAU,WAAW,QAAQ,SAAS,cAAc,UAAU;AACnE,OAAI,aACF,QAAO,KAAK;;;CAOpB,AAAQ,iBAAiB,aAAgC;AACvD,OAAK,MAAM,UAAU,YAAY,QAC/B,KAAI,CAAC,OAAO,cACV,QAAO,KAAK;;CAMlB,AAAQ,eAAe,aAAgC;AACrD,MAAI,YAAY,QAAQ,SAAS,EAC/B,kBAAiB;AACf,OAAI,YAAY,QAAQ,SAAS,EAC/B,MAAK,eAAe,OAAO,YAAY,SAAS;KAEjD,KAAK,UAAU;;CAItB,AAAQ,iBAAiB,OAA8B;AACrD,MAAI,iBAAiB,OAAO;GAC1B,MAAM,UAAU,MAAM,QAAQ,aAAa;AAC3C,OAAI,QAAQ,SAAS,UAAU,IAAI,QAAQ,SAAS,YAAY,CAC9D,QAAO,aAAa;AAGtB,OAAI,QAAQ,SAAS,cAAc,IAAI,QAAQ,SAAS,eAAe,CACrE,QAAO,aAAa;AAGtB,OAAI,MAAM,SAAS,aACjB,QAAO,aAAa;;AAIxB,SAAO,aAAa"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream-registry.js","names":[
|
|
1
|
+
{"version":3,"file":"stream-registry.js","names":[],"sources":["../../src/stream/stream-registry.ts"],"sourcesContent":["import { RingBuffer } from \"./buffers\";\nimport { SSEErrorCode, type StreamEntry } from \"./types\";\n\nexport class StreamRegistry {\n private streams: RingBuffer<StreamEntry>;\n\n constructor(maxActiveStreams: number) {\n this.streams = new RingBuffer<StreamEntry>(\n maxActiveStreams,\n (entry) => entry.streamId,\n );\n }\n\n // add a stream to the registry\n add(entry: StreamEntry): void {\n // enforce hard cap\n if (this.streams.getSize() >= this.streams.capacity) {\n this._evictOldestStream(entry.streamId);\n }\n\n this.streams.add(entry);\n }\n\n // get a stream from the registry\n get(streamId: string): StreamEntry | null {\n return this.streams.get(streamId);\n }\n\n // check if a stream exists in the registry\n has(streamId: string): boolean {\n return this.streams.has(streamId);\n }\n\n // remove a stream from the registry\n remove(streamId: string): void {\n this.streams.remove(streamId);\n }\n\n // get the number of streams in the registry\n size(): number {\n return this.streams.getSize();\n }\n\n clear(): void {\n const allStreams = this.streams.getAll();\n\n for (const stream of allStreams) {\n stream.abortController.abort(\"Server shutdown\");\n }\n\n this.streams.clear();\n }\n\n // evict the oldest stream from the registry\n private _evictOldestStream(excludeStreamId: string): void {\n const allStreams = this.streams.getAll();\n let oldestStream: StreamEntry | null = null;\n let oldestAccess = Infinity;\n\n // find the least recently accessed stream\n for (const stream of allStreams) {\n if (\n stream.streamId !== excludeStreamId &&\n stream.lastAccess < oldestAccess\n ) {\n oldestStream = stream;\n oldestAccess = stream.lastAccess;\n }\n }\n\n // abort the oldest stream\n if (oldestStream) {\n // broadcast stream eviction error to all clients\n for (const client of oldestStream.clients) {\n if (!client.writableEnded) {\n try {\n client.write(`event: error\\n`);\n client.write(\n `data: ${JSON.stringify({ error: \"Stream evicted\", code: SSEErrorCode.STREAM_EVICTED })}\\n\\n`,\n );\n } catch (_error) {\n // ignore\n }\n }\n }\n oldestStream.abortController.abort(\"Stream evicted\");\n this.streams.remove(oldestStream.streamId);\n }\n }\n}\n"],"mappings":";;;;AAGA,IAAa,iBAAb,MAA4B;CAG1B,YAAY,kBAA0B;AACpC,OAAK,UAAU,IAAI,WACjB,mBACC,UAAU,MAAM,SAClB;;CAIH,IAAI,OAA0B;AAE5B,MAAI,KAAK,QAAQ,SAAS,IAAI,KAAK,QAAQ,SACzC,MAAK,mBAAmB,MAAM,SAAS;AAGzC,OAAK,QAAQ,IAAI,MAAM;;CAIzB,IAAI,UAAsC;AACxC,SAAO,KAAK,QAAQ,IAAI,SAAS;;CAInC,IAAI,UAA2B;AAC7B,SAAO,KAAK,QAAQ,IAAI,SAAS;;CAInC,OAAO,UAAwB;AAC7B,OAAK,QAAQ,OAAO,SAAS;;CAI/B,OAAe;AACb,SAAO,KAAK,QAAQ,SAAS;;CAG/B,QAAc;EACZ,MAAM,aAAa,KAAK,QAAQ,QAAQ;AAExC,OAAK,MAAM,UAAU,WACnB,QAAO,gBAAgB,MAAM,kBAAkB;AAGjD,OAAK,QAAQ,OAAO;;CAItB,AAAQ,mBAAmB,iBAA+B;EACxD,MAAM,aAAa,KAAK,QAAQ,QAAQ;EACxC,IAAI,eAAmC;EACvC,IAAI,eAAe;AAGnB,OAAK,MAAM,UAAU,WACnB,KACE,OAAO,aAAa,mBACpB,OAAO,aAAa,cACpB;AACA,kBAAe;AACf,kBAAe,OAAO;;AAK1B,MAAI,cAAc;AAEhB,QAAK,MAAM,UAAU,aAAa,QAChC,KAAI,CAAC,OAAO,cACV,KAAI;AACF,WAAO,MAAM,iBAAiB;AAC9B,WAAO,MACL,SAAS,KAAK,UAAU;KAAE,OAAO;KAAkB,MAAM,aAAa;KAAgB,CAAC,CAAC,MACzF;YACM,QAAQ;AAKrB,gBAAa,gBAAgB,MAAM,iBAAiB;AACpD,QAAK,QAAQ,OAAO,aAAa,SAAS"}
|
package/dist/stream/types.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.js","names":[],"sources":["../../src/stream/types.ts"],"sourcesContent":["import type { IAppResponse } from \"shared\";\nimport type { EventRingBuffer } from \"./buffers\";\n\nexport const SSEWarningCode = {\n BUFFER_OVERFLOW_RESTART: \"BUFFER_OVERFLOW_RESTART\",\n} as const satisfies Record<string, string>;\n\nexport type SSEWarningCode =\n (typeof SSEWarningCode)[keyof typeof SSEWarningCode];\n\nexport const SSEErrorCode = {\n TEMPORARY_UNAVAILABLE: \"TEMPORARY_UNAVAILABLE\",\n TIMEOUT: \"TIMEOUT\",\n INTERNAL_ERROR: \"INTERNAL_ERROR\",\n INVALID_REQUEST: \"INVALID_REQUEST\",\n STREAM_ABORTED: \"STREAM_ABORTED\",\n STREAM_EVICTED: \"STREAM_EVICTED\",\n} as const satisfies Record<string, string>;\n\nexport type SSEErrorCode = (typeof SSEErrorCode)[keyof typeof SSEErrorCode];\n\nexport interface SSEError {\n error: string;\n code: SSEErrorCode;\n}\n\nexport interface BufferedEvent {\n id: string;\n type: string;\n data: string;\n timestamp: number;\n}\n\nexport interface StreamEntry {\n streamId: string;\n generator: AsyncGenerator<any, void, unknown>;\n eventBuffer: EventRingBuffer;\n clients: Set<IAppResponse>;\n isCompleted: boolean;\n lastAccess: number;\n abortController: AbortController;\n}\n\nexport interface BufferEntry {\n buffer: EventRingBuffer;\n lastAccess: number;\n}\n\nexport interface StreamOperation {\n controller: AbortController;\n type: \"query\" | \"stream\";\n heartbeat?: NodeJS.Timeout;\n}\n"],"mappings":";
|
|
1
|
+
{"version":3,"file":"types.js","names":[],"sources":["../../src/stream/types.ts"],"sourcesContent":["import type { Context } from \"@opentelemetry/api\";\nimport type { IAppResponse } from \"shared\";\nimport type { EventRingBuffer } from \"./buffers\";\n\nexport const SSEWarningCode = {\n BUFFER_OVERFLOW_RESTART: \"BUFFER_OVERFLOW_RESTART\",\n} as const satisfies Record<string, string>;\n\nexport type SSEWarningCode =\n (typeof SSEWarningCode)[keyof typeof SSEWarningCode];\n\nexport const SSEErrorCode = {\n TEMPORARY_UNAVAILABLE: \"TEMPORARY_UNAVAILABLE\",\n TIMEOUT: \"TIMEOUT\",\n INTERNAL_ERROR: \"INTERNAL_ERROR\",\n INVALID_REQUEST: \"INVALID_REQUEST\",\n STREAM_ABORTED: \"STREAM_ABORTED\",\n STREAM_EVICTED: \"STREAM_EVICTED\",\n} as const satisfies Record<string, string>;\n\nexport type SSEErrorCode = (typeof SSEErrorCode)[keyof typeof SSEErrorCode];\n\nexport interface SSEError {\n error: string;\n code: SSEErrorCode;\n}\n\nexport interface BufferedEvent {\n id: string;\n type: string;\n data: string;\n timestamp: number;\n}\n\nexport interface StreamEntry {\n streamId: string;\n generator: AsyncGenerator<any, void, unknown>;\n eventBuffer: EventRingBuffer;\n clients: Set<IAppResponse>;\n isCompleted: boolean;\n lastAccess: number;\n abortController: AbortController;\n traceContext: Context;\n}\n\nexport interface BufferEntry {\n buffer: EventRingBuffer;\n lastAccess: number;\n}\n\nexport interface StreamOperation {\n controller: AbortController;\n type: \"query\" | \"stream\";\n heartbeat?: NodeJS.Timeout;\n}\n"],"mappings":";AAIA,MAAa,iBAAiB,EAC5B,yBAAyB,2BAC1B;AAKD,MAAa,eAAe;CAC1B,uBAAuB;CACvB,SAAS;CACT,gBAAgB;CAChB,iBAAiB;CACjB,gBAAgB;CAChB,gBAAgB;CACjB"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import { ITelemetry, InstrumentConfig, TelemetryConfig } from "./types.js";
|
|
2
2
|
import { Counter, Histogram, Span as Span$1, SpanStatusCode } from "@opentelemetry/api";
|
|
3
|
-
import { SeverityNumber } from "@opentelemetry/api-logs";
|
|
4
|
-
export { type Counter, type Histogram, SeverityNumber, type Span$1 as Span, SpanStatusCode };
|
|
3
|
+
import { SeverityNumber as SeverityNumber$1 } from "@opentelemetry/api-logs";
|
|
4
|
+
export { type Counter, type Histogram, SeverityNumber$1 as SeverityNumber, type Span$1 as Span, SpanStatusCode };
|
package/dist/telemetry/index.js
CHANGED
|
@@ -3,6 +3,6 @@ import { instrumentations } from "./instrumentations.js";
|
|
|
3
3
|
import { TelemetryProvider } from "./telemetry-provider.js";
|
|
4
4
|
import { TelemetryManager } from "./telemetry-manager.js";
|
|
5
5
|
import { SpanKind, SpanStatusCode } from "@opentelemetry/api";
|
|
6
|
-
import { SeverityNumber } from "@opentelemetry/api-logs";
|
|
6
|
+
import { SeverityNumber as SeverityNumber$1 } from "@opentelemetry/api-logs";
|
|
7
7
|
|
|
8
|
-
export { SeverityNumber, SpanKind, SpanStatusCode };
|
|
8
|
+
export { SeverityNumber$1 as SeverityNumber, SpanKind, SpanStatusCode };
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { shouldIgnoreRequest } from "../utils/path-exclusions.js";
|
|
1
2
|
import { ExpressInstrumentation } from "@opentelemetry/instrumentation-express";
|
|
2
3
|
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
|
|
3
4
|
|
|
@@ -10,17 +11,20 @@ import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
|
|
|
10
11
|
* the recommended approach is to register them once in a corresponding plugin constructor.
|
|
11
12
|
*/
|
|
12
13
|
const instrumentations = {
|
|
13
|
-
http: new HttpInstrumentation({
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
if (
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
14
|
+
http: new HttpInstrumentation({
|
|
15
|
+
ignoreIncomingRequestHook: shouldIgnoreRequest,
|
|
16
|
+
applyCustomAttributesOnSpan(span, request) {
|
|
17
|
+
let spanName = null;
|
|
18
|
+
if (request.route) {
|
|
19
|
+
const fullPath = (request.baseUrl || "") + (request.url?.split("?")[0] || "");
|
|
20
|
+
if (fullPath) spanName = `${request.method} ${fullPath}`;
|
|
21
|
+
} else if (request.url) {
|
|
22
|
+
const path = request.url.split("?")[0];
|
|
23
|
+
spanName = `${request.method} ${path}`;
|
|
24
|
+
}
|
|
25
|
+
if (spanName) span.updateName(spanName);
|
|
21
26
|
}
|
|
22
|
-
|
|
23
|
-
} }),
|
|
27
|
+
}),
|
|
24
28
|
express: new ExpressInstrumentation({ requestHook: (span, info) => {
|
|
25
29
|
const req = info.request;
|
|
26
30
|
if (info.layerType === "request_handler" && req.route) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"instrumentations.js","names":[
|
|
1
|
+
{"version":3,"file":"instrumentations.js","names":[],"sources":["../../src/telemetry/instrumentations.ts"],"sourcesContent":["import type { Instrumentation } from \"@opentelemetry/instrumentation\";\nimport { ExpressInstrumentation } from \"@opentelemetry/instrumentation-express\";\nimport { HttpInstrumentation } from \"@opentelemetry/instrumentation-http\";\nimport { shouldIgnoreRequest } from \"../utils/path-exclusions\";\n\n/**\n * Registry of pre-configured instrumentations for common use cases.\n * These can be selectively registered by plugins that need them.\n *\n * While instrumentations are generally safe to re-register,\n * the recommended approach is to register them once in a corresponding plugin constructor.\n */\nexport const instrumentations: Record<string, Instrumentation> = {\n http: new HttpInstrumentation({\n // Filter out requests before creating spans - this is the most efficient approach\n ignoreIncomingRequestHook: shouldIgnoreRequest,\n\n applyCustomAttributesOnSpan(span: any, request: any) {\n let spanName: string | null = null;\n\n if (request.route) {\n const baseUrl = request.baseUrl || \"\";\n const url = request.url?.split(\"?\")[0] || \"\";\n const fullPath = baseUrl + url;\n if (fullPath) {\n spanName = `${request.method} ${fullPath}`;\n }\n } else if (request.url) {\n // No Express route (e.g., static assets) - use the raw URL path\n // Remove query string for cleaner trace names\n const path = request.url.split(\"?\")[0];\n spanName = `${request.method} ${path}`;\n }\n\n if (spanName) {\n span.updateName(spanName);\n }\n },\n }),\n express: new ExpressInstrumentation({\n requestHook: (span: any, info: any) => {\n const req = info.request;\n\n // Only update span name for route handlers (layerType: request_handler)\n // This ensures we're not renaming middleware spans\n if (info.layerType === \"request_handler\" && req.route) {\n // Combine baseUrl with url to get full path with actual parameter values\n // e.g., baseUrl=\"/api/analytics\" + url=\"/query/spend_data\" = \"/api/analytics/query/spend_data\"\n const baseUrl = req.baseUrl || \"\";\n const url = req.url?.split(\"?\")[0] || \"\";\n const fullPath = baseUrl + url;\n if (fullPath) {\n const spanName = `${req.method} ${fullPath}`;\n span.updateName(spanName);\n }\n }\n },\n }),\n};\n"],"mappings":";;;;;;;;;;;;AAYA,MAAa,mBAAoD;CAC/D,MAAM,IAAI,oBAAoB;EAE5B,2BAA2B;EAE3B,4BAA4B,MAAW,SAAc;GACnD,IAAI,WAA0B;AAE9B,OAAI,QAAQ,OAAO;IAGjB,MAAM,YAFU,QAAQ,WAAW,OACvB,QAAQ,KAAK,MAAM,IAAI,CAAC,MAAM;AAE1C,QAAI,SACF,YAAW,GAAG,QAAQ,OAAO,GAAG;cAEzB,QAAQ,KAAK;IAGtB,MAAM,OAAO,QAAQ,IAAI,MAAM,IAAI,CAAC;AACpC,eAAW,GAAG,QAAQ,OAAO,GAAG;;AAGlC,OAAI,SACF,MAAK,WAAW,SAAS;;EAG9B,CAAC;CACF,SAAS,IAAI,uBAAuB,EAClC,cAAc,MAAW,SAAc;EACrC,MAAM,MAAM,KAAK;AAIjB,MAAI,KAAK,cAAc,qBAAqB,IAAI,OAAO;GAKrD,MAAM,YAFU,IAAI,WAAW,OACnB,IAAI,KAAK,MAAM,IAAI,CAAC,MAAM;AAEtC,OAAI,UAAU;IACZ,MAAM,WAAW,GAAG,IAAI,OAAO,GAAG;AAClC,SAAK,WAAW,SAAS;;;IAIhC,CAAC;CACH"}
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import { createLogger } from "../logging/logger.js";
|
|
1
2
|
import { TelemetryProvider } from "./telemetry-provider.js";
|
|
3
|
+
import { AppKitSampler } from "./trace-sampler.js";
|
|
2
4
|
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
|
3
5
|
import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-proto";
|
|
4
6
|
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
|
@@ -7,11 +9,11 @@ import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
|
|
7
9
|
import { detectResources, envDetector, hostDetector, processDetector, resourceFromAttributes } from "@opentelemetry/resources";
|
|
8
10
|
import { BatchLogRecordProcessor } from "@opentelemetry/sdk-logs";
|
|
9
11
|
import { PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
|
10
|
-
import { AlwaysOnSampler } from "@opentelemetry/sdk-trace-base";
|
|
11
|
-
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
|
12
12
|
import { NodeSDK } from "@opentelemetry/sdk-node";
|
|
13
|
+
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
|
13
14
|
|
|
14
15
|
//#region src/telemetry/telemetry-manager.ts
|
|
16
|
+
const logger = createLogger("telemetry");
|
|
15
17
|
var TelemetryManager = class TelemetryManager {
|
|
16
18
|
static {
|
|
17
19
|
this.DEFAULT_EXPORT_INTERVAL_MS = 1e4;
|
|
@@ -44,7 +46,7 @@ var TelemetryManager = class TelemetryManager {
|
|
|
44
46
|
this.sdk = new NodeSDK({
|
|
45
47
|
resource: this.createResource(config),
|
|
46
48
|
autoDetectResources: false,
|
|
47
|
-
sampler: new
|
|
49
|
+
sampler: new AppKitSampler(),
|
|
48
50
|
traceExporter: new OTLPTraceExporter({ headers: config.headers }),
|
|
49
51
|
metricReaders: [new PeriodicExportingMetricReader({
|
|
50
52
|
exporter: new OTLPMetricExporter({ headers: config.headers }),
|
|
@@ -55,9 +57,9 @@ var TelemetryManager = class TelemetryManager {
|
|
|
55
57
|
});
|
|
56
58
|
this.sdk.start();
|
|
57
59
|
this.registerShutdown();
|
|
58
|
-
|
|
60
|
+
logger.debug("Initialized successfully");
|
|
59
61
|
} catch (error) {
|
|
60
|
-
|
|
62
|
+
logger.error("Failed to initialize: %O", error);
|
|
61
63
|
}
|
|
62
64
|
}
|
|
63
65
|
/**
|
|
@@ -103,7 +105,7 @@ var TelemetryManager = class TelemetryManager {
|
|
|
103
105
|
await this.sdk.shutdown();
|
|
104
106
|
this.sdk = void 0;
|
|
105
107
|
} catch (error) {
|
|
106
|
-
|
|
108
|
+
logger.error("Error shutting down: %O", error);
|
|
107
109
|
}
|
|
108
110
|
}
|
|
109
111
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"telemetry-manager.js","names":[],"sources":["../../src/telemetry/telemetry-manager.ts"],"sourcesContent":["import
|
|
1
|
+
{"version":3,"file":"telemetry-manager.js","names":[],"sources":["../../src/telemetry/telemetry-manager.ts"],"sourcesContent":["import { getNodeAutoInstrumentations } from \"@opentelemetry/auto-instrumentations-node\";\nimport { OTLPLogExporter } from \"@opentelemetry/exporter-logs-otlp-proto\";\nimport { OTLPMetricExporter } from \"@opentelemetry/exporter-metrics-otlp-proto\";\nimport { OTLPTraceExporter } from \"@opentelemetry/exporter-trace-otlp-proto\";\nimport {\n type Instrumentation,\n registerInstrumentations as otelRegisterInstrumentations,\n} from \"@opentelemetry/instrumentation\";\nimport {\n detectResources,\n envDetector,\n hostDetector,\n processDetector,\n type Resource,\n resourceFromAttributes,\n} from \"@opentelemetry/resources\";\nimport { BatchLogRecordProcessor } from \"@opentelemetry/sdk-logs\";\nimport { PeriodicExportingMetricReader } from \"@opentelemetry/sdk-metrics\";\nimport { NodeSDK } from \"@opentelemetry/sdk-node\";\nimport {\n ATTR_SERVICE_NAME,\n ATTR_SERVICE_VERSION,\n} from \"@opentelemetry/semantic-conventions\";\nimport type { TelemetryOptions } from \"shared\";\nimport { createLogger } from \"../logging/logger\";\nimport { TelemetryProvider } from \"./telemetry-provider\";\nimport { AppKitSampler } from \"./trace-sampler\";\nimport type { TelemetryConfig } from \"./types\";\n\nconst logger = createLogger(\"telemetry\");\n\nexport class TelemetryManager {\n private static readonly DEFAULT_EXPORT_INTERVAL_MS = 10000;\n private static readonly DEFAULT_FALLBACK_APP_NAME = \"databricks-app\";\n\n private static instance?: TelemetryManager;\n private sdk?: NodeSDK;\n\n /**\n * Create a scoped telemetry provider for a specific plugin.\n * The plugin's name will be used as the default tracer/meter name.\n * @param pluginName - The name of the plugin to create scoped telemetry for\n * @param telemetryConfig - The telemetry configuration for the plugin\n * @returns A scoped telemetry instance for the plugin\n */\n static getProvider(\n pluginName: string,\n telemetryConfig?: TelemetryOptions,\n ): TelemetryProvider {\n const globalManager = TelemetryManager.getInstance();\n return new TelemetryProvider(pluginName, globalManager, telemetryConfig);\n }\n\n private constructor() {}\n\n static getInstance(): TelemetryManager {\n if (!TelemetryManager.instance) {\n TelemetryManager.instance = new TelemetryManager();\n }\n return TelemetryManager.instance;\n }\n\n static initialize(config: Partial<TelemetryConfig> = {}): void {\n const instance = TelemetryManager.getInstance();\n instance._initialize(config);\n }\n\n private _initialize(config: Partial<TelemetryConfig>): void {\n if (this.sdk) return;\n\n if (!process.env.OTEL_EXPORTER_OTLP_ENDPOINT) {\n return;\n }\n\n try {\n this.sdk = new NodeSDK({\n resource: this.createResource(config),\n autoDetectResources: false,\n sampler: new AppKitSampler(),\n traceExporter: new OTLPTraceExporter({ headers: config.headers }),\n metricReaders: [\n new PeriodicExportingMetricReader({\n exporter: new OTLPMetricExporter({ headers: config.headers }),\n exportIntervalMillis:\n config.exportIntervalMs ||\n TelemetryManager.DEFAULT_EXPORT_INTERVAL_MS,\n }),\n ],\n logRecordProcessors: [\n new BatchLogRecordProcessor(\n new OTLPLogExporter({ headers: config.headers }),\n ),\n ],\n instrumentations: this.getDefaultInstrumentations(),\n });\n\n this.sdk.start();\n this.registerShutdown();\n logger.debug(\"Initialized successfully\");\n } catch (error) {\n logger.error(\"Failed to initialize: %O\", error);\n }\n }\n\n /**\n * Register OpenTelemetry instrumentations.\n * Can be called at any time, but recommended to call in plugin constructor.\n * @param instrumentations - Array of OpenTelemetry instrumentations to register\n */\n registerInstrumentations(instrumentations: Instrumentation[]): void {\n otelRegisterInstrumentations({\n // global providers set by NodeSDK.start()\n instrumentations,\n });\n }\n\n private createResource(config: Partial<TelemetryConfig>): Resource {\n const serviceName =\n config.serviceName ||\n process.env.OTEL_SERVICE_NAME ||\n process.env.DATABRICKS_APP_NAME ||\n TelemetryManager.DEFAULT_FALLBACK_APP_NAME;\n const initialResource = resourceFromAttributes({\n [ATTR_SERVICE_NAME]: serviceName,\n [ATTR_SERVICE_VERSION]: config.serviceVersion ?? undefined,\n });\n const detectedResource = detectResources({\n detectors: [envDetector, hostDetector, processDetector],\n });\n return initialResource.merge(detectedResource);\n }\n\n private getDefaultInstrumentations(): Instrumentation[] {\n return [\n ...getNodeAutoInstrumentations({\n //\n // enabled as a part of the server plugin\n //\n \"@opentelemetry/instrumentation-http\": {\n enabled: false,\n },\n \"@opentelemetry/instrumentation-express\": {\n enabled: false,\n },\n //\n // reduce noise\n //\n \"@opentelemetry/instrumentation-fs\": {\n enabled: false,\n },\n \"@opentelemetry/instrumentation-dns\": {\n enabled: false,\n },\n \"@opentelemetry/instrumentation-net\": {\n enabled: false,\n },\n }),\n ];\n }\n\n private registerShutdown() {\n const shutdownFn = async () => {\n await TelemetryManager.getInstance().shutdown();\n };\n process.once(\"SIGTERM\", shutdownFn);\n process.once(\"SIGINT\", shutdownFn);\n }\n\n private async shutdown(): Promise<void> {\n if (!this.sdk) {\n return;\n }\n\n try {\n await this.sdk.shutdown();\n this.sdk = undefined;\n } catch (error) {\n logger.error(\"Error shutting down: %O\", error);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AA6BA,MAAM,SAAS,aAAa,YAAY;AAExC,IAAa,mBAAb,MAAa,iBAAiB;;oCACyB;;;mCACD;;;;;;;;;CAYpD,OAAO,YACL,YACA,iBACmB;AAEnB,SAAO,IAAI,kBAAkB,YADP,iBAAiB,aAAa,EACI,gBAAgB;;CAG1E,AAAQ,cAAc;CAEtB,OAAO,cAAgC;AACrC,MAAI,CAAC,iBAAiB,SACpB,kBAAiB,WAAW,IAAI,kBAAkB;AAEpD,SAAO,iBAAiB;;CAG1B,OAAO,WAAW,SAAmC,EAAE,EAAQ;AAE7D,EADiB,iBAAiB,aAAa,CACtC,YAAY,OAAO;;CAG9B,AAAQ,YAAY,QAAwC;AAC1D,MAAI,KAAK,IAAK;AAEd,MAAI,CAAC,QAAQ,IAAI,4BACf;AAGF,MAAI;AACF,QAAK,MAAM,IAAI,QAAQ;IACrB,UAAU,KAAK,eAAe,OAAO;IACrC,qBAAqB;IACrB,SAAS,IAAI,eAAe;IAC5B,eAAe,IAAI,kBAAkB,EAAE,SAAS,OAAO,SAAS,CAAC;IACjE,eAAe,CACb,IAAI,8BAA8B;KAChC,UAAU,IAAI,mBAAmB,EAAE,SAAS,OAAO,SAAS,CAAC;KAC7D,sBACE,OAAO,oBACP,iBAAiB;KACpB,CAAC,CACH;IACD,qBAAqB,CACnB,IAAI,wBACF,IAAI,gBAAgB,EAAE,SAAS,OAAO,SAAS,CAAC,CACjD,CACF;IACD,kBAAkB,KAAK,4BAA4B;IACpD,CAAC;AAEF,QAAK,IAAI,OAAO;AAChB,QAAK,kBAAkB;AACvB,UAAO,MAAM,2BAA2B;WACjC,OAAO;AACd,UAAO,MAAM,4BAA4B,MAAM;;;;;;;;CASnD,yBAAyB,kBAA2C;AAClE,2BAA6B,EAE3B,kBACD,CAAC;;CAGJ,AAAQ,eAAe,QAA4C;EACjE,MAAM,cACJ,OAAO,eACP,QAAQ,IAAI,qBACZ,QAAQ,IAAI,uBACZ,iBAAiB;EACnB,MAAM,kBAAkB,uBAAuB;IAC5C,oBAAoB;IACpB,uBAAuB,OAAO,kBAAkB;GAClD,CAAC;EACF,MAAM,mBAAmB,gBAAgB,EACvC,WAAW;GAAC;GAAa;GAAc;GAAgB,EACxD,CAAC;AACF,SAAO,gBAAgB,MAAM,iBAAiB;;CAGhD,AAAQ,6BAAgD;AACtD,SAAO,CACL,GAAG,4BAA4B;GAI7B,uCAAuC,EACrC,SAAS,OACV;GACD,0CAA0C,EACxC,SAAS,OACV;GAID,qCAAqC,EACnC,SAAS,OACV;GACD,sCAAsC,EACpC,SAAS,OACV;GACD,sCAAsC,EACpC,SAAS,OACV;GACF,CAAC,CACH;;CAGH,AAAQ,mBAAmB;EACzB,MAAM,aAAa,YAAY;AAC7B,SAAM,iBAAiB,aAAa,CAAC,UAAU;;AAEjD,UAAQ,KAAK,WAAW,WAAW;AACnC,UAAQ,KAAK,UAAU,WAAW;;CAGpC,MAAc,WAA0B;AACtC,MAAI,CAAC,KAAK,IACR;AAGF,MAAI;AACF,SAAM,KAAK,IAAI,UAAU;AACzB,QAAK,MAAM;WACJ,OAAO;AACd,UAAO,MAAM,2BAA2B,MAAM"}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { shouldExcludePath } from "../utils/path-exclusions.js";
|
|
2
|
+
import { SamplingDecision } from "@opentelemetry/sdk-trace-base";
|
|
3
|
+
|
|
4
|
+
//#region src/telemetry/trace-sampler.ts
|
|
5
|
+
/**
|
|
6
|
+
* Custom sampler that filters out asset requests and other noise.
|
|
7
|
+
*
|
|
8
|
+
* This acts as a secondary filter after HttpInstrumentation.ignoreIncomingRequestHook.
|
|
9
|
+
* It catches any spans that slip through the primary filter.
|
|
10
|
+
*/
|
|
11
|
+
var AppKitSampler = class {
|
|
12
|
+
shouldSample(_context, _traceId, spanName, _spanKind, attributes, _links) {
|
|
13
|
+
const httpTarget = attributes["http.target"];
|
|
14
|
+
const httpRoute = attributes["http.route"];
|
|
15
|
+
const httpUrl = attributes["http.url"];
|
|
16
|
+
let path = httpTarget || httpRoute;
|
|
17
|
+
if (!path && httpUrl) try {
|
|
18
|
+
path = new URL(httpUrl).pathname;
|
|
19
|
+
} catch {
|
|
20
|
+
path = httpUrl;
|
|
21
|
+
}
|
|
22
|
+
if (!path) path = spanName;
|
|
23
|
+
if (shouldExcludePath(path)) return { decision: SamplingDecision.NOT_RECORD };
|
|
24
|
+
return { decision: SamplingDecision.RECORD_AND_SAMPLED };
|
|
25
|
+
}
|
|
26
|
+
toString() {
|
|
27
|
+
return "AppKitSampler";
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
//#endregion
|
|
32
|
+
export { AppKitSampler };
|
|
33
|
+
//# sourceMappingURL=trace-sampler.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trace-sampler.js","names":[],"sources":["../../src/telemetry/trace-sampler.ts"],"sourcesContent":["import type { Attributes, Context, Link } from \"@opentelemetry/api\";\nimport type { Sampler, SamplingResult } from \"@opentelemetry/sdk-trace-base\";\nimport { SamplingDecision } from \"@opentelemetry/sdk-trace-base\";\nimport { shouldExcludePath } from \"../utils/path-exclusions\";\n\n/**\n * Custom sampler that filters out asset requests and other noise.\n *\n * This acts as a secondary filter after HttpInstrumentation.ignoreIncomingRequestHook.\n * It catches any spans that slip through the primary filter.\n */\nexport class AppKitSampler implements Sampler {\n shouldSample(\n _context: Context,\n _traceId: string,\n spanName: string,\n _spanKind: number,\n attributes: Attributes,\n _links: Link[],\n ): SamplingResult {\n // Check if this is an HTTP request span\n const httpTarget = attributes[\"http.target\"] as string | undefined;\n const httpRoute = attributes[\"http.route\"] as string | undefined;\n const httpUrl = attributes[\"http.url\"] as string | undefined;\n\n // Try to extract path from various attributes\n let path = httpTarget || httpRoute;\n if (!path && httpUrl) {\n try {\n path = new URL(httpUrl).pathname;\n } catch {\n // Not a valid URL, use as-is\n path = httpUrl;\n }\n }\n if (!path) {\n path = spanName;\n }\n\n // Check if path should be excluded\n if (shouldExcludePath(path)) {\n return {\n decision: SamplingDecision.NOT_RECORD,\n };\n }\n\n // For all other requests, record and sample\n return {\n decision: SamplingDecision.RECORD_AND_SAMPLED,\n };\n }\n\n toString(): string {\n return \"AppKitSampler\";\n }\n}\n"],"mappings":";;;;;;;;;;AAWA,IAAa,gBAAb,MAA8C;CAC5C,aACE,UACA,UACA,UACA,WACA,YACA,QACgB;EAEhB,MAAM,aAAa,WAAW;EAC9B,MAAM,YAAY,WAAW;EAC7B,MAAM,UAAU,WAAW;EAG3B,IAAI,OAAO,cAAc;AACzB,MAAI,CAAC,QAAQ,QACX,KAAI;AACF,UAAO,IAAI,IAAI,QAAQ,CAAC;UAClB;AAEN,UAAO;;AAGX,MAAI,CAAC,KACH,QAAO;AAIT,MAAI,kBAAkB,KAAK,CACzB,QAAO,EACL,UAAU,iBAAiB,YAC5B;AAIH,SAAO,EACL,UAAU,iBAAiB,oBAC5B;;CAGH,WAAmB;AACjB,SAAO"}
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
import { createLogger } from "../logging/logger.js";
|
|
1
2
|
import { generateQueriesFromDescribe } from "./query-registry.js";
|
|
2
3
|
import fs from "node:fs";
|
|
3
4
|
import dotenv from "dotenv";
|
|
4
5
|
|
|
5
6
|
//#region src/type-generator/index.ts
|
|
6
7
|
dotenv.config();
|
|
8
|
+
const logger = createLogger("type-generator");
|
|
7
9
|
/**
|
|
8
10
|
* Generate type declarations for QueryRegistry
|
|
9
11
|
* Create the d.ts file from the plugin routes and query schemas
|
|
@@ -33,12 +35,12 @@ declare module "@databricks/appkit-ui/react" {
|
|
|
33
35
|
*/
|
|
34
36
|
async function generateFromEntryPoint(options) {
|
|
35
37
|
const { outFile, queryFolder, warehouseId, noCache } = options;
|
|
36
|
-
|
|
38
|
+
logger.debug("Starting type generation...");
|
|
37
39
|
let queryRegistry = [];
|
|
38
40
|
if (queryFolder) queryRegistry = await generateQueriesFromDescribe(queryFolder, warehouseId, { noCache });
|
|
39
41
|
const typeDeclarations = generateTypeDeclarations(queryRegistry);
|
|
40
42
|
fs.writeFileSync(outFile, typeDeclarations, "utf-8");
|
|
41
|
-
|
|
43
|
+
logger.debug("Type generation complete!");
|
|
42
44
|
}
|
|
43
45
|
|
|
44
46
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":[
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/type-generator/index.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport dotenv from \"dotenv\";\nimport { createLogger } from \"../logging/logger\";\nimport { generateQueriesFromDescribe } from \"./query-registry\";\nimport type { QuerySchema } from \"./types\";\n\ndotenv.config();\n\nconst logger = createLogger(\"type-generator\");\n\n/**\n * Generate type declarations for QueryRegistry\n * Create the d.ts file from the plugin routes and query schemas\n * @param querySchemas - the list of query schemas\n * @returns - the type declarations as a string\n */\nfunction generateTypeDeclarations(querySchemas: QuerySchema[] = []): string {\n const queryEntries = querySchemas\n .map(({ name, type }) => {\n const indentedType = type\n .split(\"\\n\")\n .map((line, i) => (i === 0 ? line : ` ${line}`))\n .join(\"\\n\");\n return ` ${name}: ${indentedType}`;\n })\n .join(\";\\n\");\n\n const querySection = queryEntries ? `\\n${queryEntries};\\n ` : \"\";\n\n return `// Auto-generated by AppKit - DO NOT EDIT\n// Generated by 'npx appkit-generate-types' or Vite plugin during build\nimport \"@databricks/appkit-ui/react\";\nimport type { SQLTypeMarker, SQLStringMarker, SQLNumberMarker, SQLBooleanMarker, SQLBinaryMarker, SQLDateMarker, SQLTimestampMarker } from \"@databricks/appkit-ui/js\";\n\ndeclare module \"@databricks/appkit-ui/react\" {\n interface QueryRegistry {${querySection}}\n}\n`;\n}\n\n/**\n * Entry point for generating type declarations from all imported files\n * @param options - the options for the generation\n * @param options.entryPoint - the entry point file\n * @param options.outFile - the output file\n * @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts)\n */\nexport async function generateFromEntryPoint(options: {\n outFile: string;\n queryFolder?: string;\n warehouseId: string;\n noCache?: boolean;\n}) {\n const { outFile, queryFolder, warehouseId, noCache } = options;\n\n logger.debug(\"Starting type generation...\");\n\n let queryRegistry: QuerySchema[] = [];\n if (queryFolder)\n queryRegistry = await generateQueriesFromDescribe(\n queryFolder,\n warehouseId,\n {\n noCache,\n },\n );\n\n const typeDeclarations = generateTypeDeclarations(queryRegistry);\n\n fs.writeFileSync(outFile, typeDeclarations, \"utf-8\");\n\n logger.debug(\"Type generation complete!\");\n}\n"],"mappings":";;;;;;AAMA,OAAO,QAAQ;AAEf,MAAM,SAAS,aAAa,iBAAiB;;;;;;;AAQ7C,SAAS,yBAAyB,eAA8B,EAAE,EAAU;CAC1E,MAAM,eAAe,aAClB,KAAK,EAAE,MAAM,WAAW;AAKvB,SAAO,OAAO,KAAK,IAJE,KAClB,MAAM,KAAK,CACX,KAAK,MAAM,MAAO,MAAM,IAAI,OAAO,OAAO,OAAQ,CAClD,KAAK,KAAK;GAEb,CACD,KAAK,MAAM;AAId,QAAO;;;;;;6BAFc,eAAe,KAAK,aAAa,SAAS,GAQvB;;;;;;;;;;;AAY1C,eAAsB,uBAAuB,SAK1C;CACD,MAAM,EAAE,SAAS,aAAa,aAAa,YAAY;AAEvD,QAAO,MAAM,8BAA8B;CAE3C,IAAI,gBAA+B,EAAE;AACrC,KAAI,YACF,iBAAgB,MAAM,4BACpB,aACA,aACA,EACE,SACD,CACF;CAEH,MAAM,mBAAmB,yBAAyB,cAAc;AAEhE,IAAG,cAAc,SAAS,kBAAkB,QAAQ;AAEpD,QAAO,MAAM,4BAA4B"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { createLogger } from "../logging/logger.js";
|
|
1
2
|
import { CACHE_VERSION, hashSQL, loadCache, saveCache } from "./cache.js";
|
|
2
3
|
import { Spinner } from "./spinner.js";
|
|
3
4
|
import { sqlTypeToHelper, sqlTypeToMarker } from "./types.js";
|
|
@@ -6,6 +7,7 @@ import path from "node:path";
|
|
|
6
7
|
import fs from "node:fs";
|
|
7
8
|
|
|
8
9
|
//#region src/type-generator/query-registry.ts
|
|
10
|
+
const logger = createLogger("type-generator:query-registry");
|
|
9
11
|
/**
|
|
10
12
|
* Extract parameters from a SQL query
|
|
11
13
|
* @param sql - the SQL query to extract parameters from
|
|
@@ -64,7 +66,7 @@ function extractParameterTypes(sql) {
|
|
|
64
66
|
async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {}) {
|
|
65
67
|
const { noCache = false } = options;
|
|
66
68
|
const queryFiles = fs.readdirSync(queryFolder).filter((file) => file.endsWith(".sql"));
|
|
67
|
-
|
|
69
|
+
logger.debug("Found %d SQL queries", queryFiles.length);
|
|
68
70
|
const cache = noCache ? {
|
|
69
71
|
version: CACHE_VERSION,
|
|
70
72
|
queries: {}
|
|
@@ -123,7 +125,7 @@ async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {
|
|
|
123
125
|
}
|
|
124
126
|
}
|
|
125
127
|
saveCache(cache);
|
|
126
|
-
if (failedQueries.length > 0)
|
|
128
|
+
if (failedQueries.length > 0) logger.debug("Warning: %d queries failed", failedQueries.length);
|
|
127
129
|
return querySchemas;
|
|
128
130
|
}
|
|
129
131
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"query-registry.js","names":["paramTypes: Record<string, string>","querySchemas: QuerySchema[]","failedQueries: { name: string; error: string }[]","typeMap: Record<string, string>"],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): string {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n\n const paramTypes = extractParameterTypes(sql);\n\n // generate parameters types with JSDoc hints\n const paramsType =\n params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n // if no type annotation, use SQLTypeMarker (union type)\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: Array<{\n ${resultFields.join(\";\\n \")};\n }>;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false } = options;\n\n // read all query files in the folder\n const queryFiles = fs\n .readdirSync(queryFolder)\n .filter((file) => file.endsWith(\".sql\"));\n\n console.log(` Found ${queryFiles.length} SQL queries\\n`);\n\n // load cache\n const cache = noCache ? { version: CACHE_VERSION, queries: {} } : loadCache();\n\n const client = new WorkspaceClient({});\n const querySchemas: QuerySchema[] = [];\n const failedQueries: { name: string; error: string }[] = [];\n const spinner = new Spinner();\n\n // process each query file\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const queryName = path.basename(file, \".sql\");\n\n // read query file content\n const sql = fs.readFileSync(path.join(queryFolder, file), \"utf8\");\n const sqlHash = hashSQL(sql);\n\n // check cache\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash) {\n querySchemas.push({ name: queryName, type: cached.type });\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n spinner.stop(`✓ ${queryName} (cached)`);\n continue;\n }\n\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n\n // strip trailing semicolon for DESCRIBE QUERY\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n\n // execute DESCRIBE QUERY to get schema without running the actual query\n try {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n if (result.status.state === \"FAILED\") {\n spinner.stop(`✗ ${queryName} - failed`);\n failedQueries.push({\n name: queryName,\n error: \"Query execution failed\",\n });\n continue;\n }\n\n // convert result to query schema\n const type = convertToQueryType(result, sql, queryName);\n querySchemas.push({ name: queryName, type });\n\n // update cache\n cache.queries[queryName] = { hash: sqlHash, type };\n\n spinner.stop(`✓ ${queryName}`);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : \"Unknown error\";\n spinner.stop(`✗ ${queryName} - ${errorMessage}`);\n failedQueries.push({ name: queryName, error: errorMessage });\n }\n }\n\n // save cache\n saveCache(cache);\n\n // log warning if there are failed queries\n if (failedQueries.length > 0) {\n console.warn(` Warning: ${failedQueries.length} queries failed\\n`);\n }\n\n return querySchemas;\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;;;;AAiBA,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;AAErD,SAAgB,mBACd,QACA,KACA,WACQ;CAER,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CAED,MAAM,aAAa,sBAAsB,IAAI;AAmC7C,QAAO;aACI,UAAU;kBAhCnB,OAAO,SAAS,IACZ,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAE3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB,wBAqBqB;;QAlBN,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B,CAMiB,KAAK,YAAY,CAAC;;;;AAKvC,SAAgB,sBAAsB,KAAqC;CACzE,MAAMA,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAiC,EAAE,EACX;CACxB,MAAM,EAAE,UAAU,UAAU;CAG5B,MAAM,aAAa,GAChB,YAAY,YAAY,CACxB,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AAE1C,SAAQ,IAAI,WAAW,WAAW,OAAO,gBAAgB;CAGzD,MAAM,QAAQ,UAAU;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAAG,WAAW;CAE7E,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAMC,eAA8B,EAAE;CACtC,MAAMC,gBAAmD,EAAE;CAC3D,MAAM,UAAU,IAAI,SAAS;AAG7B,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EACxB,MAAM,YAAY,KAAK,SAAS,MAAM,OAAO;EAG7C,MAAM,MAAM,GAAG,aAAa,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO;EACjE,MAAM,UAAU,QAAQ,IAAI;EAG5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,SAAS;AACrC,gBAAa,KAAK;IAAE,MAAM;IAAW,MAAM,OAAO;IAAM,CAAC;AACzD,WAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;AACxE,WAAQ,KAAK,KAAK,UAAU,WAAW;AACvC;;AAGF,UAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;EAKxE,MAAM,aAHkB,IAAI,QAAQ,oBAAoB,KAAK,CAG1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAG9D,MAAI;GACF,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF,OAAI,OAAO,OAAO,UAAU,UAAU;AACpC,YAAQ,KAAK,KAAK,UAAU,WAAW;AACvC,kBAAc,KAAK;KACjB,MAAM;KACN,OAAO;KACR,CAAC;AACF;;GAIF,MAAM,OAAO,mBAAmB,QAAQ,KAAK,UAAU;AACvD,gBAAa,KAAK;IAAE,MAAM;IAAW;IAAM,CAAC;AAG5C,SAAM,QAAQ,aAAa;IAAE,MAAM;IAAS;IAAM;AAElD,WAAQ,KAAK,KAAK,YAAY;WACvB,OAAO;GACd,MAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU;AAC3C,WAAQ,KAAK,KAAK,UAAU,KAAK,eAAe;AAChD,iBAAc,KAAK;IAAE,MAAM;IAAW,OAAO;IAAc,CAAC;;;AAKhE,WAAU,MAAM;AAGhB,KAAI,cAAc,SAAS,EACzB,SAAQ,KAAK,cAAc,cAAc,OAAO,mBAAmB;AAGrE,QAAO;;;;;;;;;;;;AAaT,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAMC,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
|
|
1
|
+
{"version":3,"file":"query-registry.js","names":[],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { createLogger } from \"../logging/logger\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\nconst logger = createLogger(\"type-generator:query-registry\");\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): string {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n\n const paramTypes = extractParameterTypes(sql);\n\n // generate parameters types with JSDoc hints\n const paramsType =\n params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n // if no type annotation, use SQLTypeMarker (union type)\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: Array<{\n ${resultFields.join(\";\\n \")};\n }>;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false } = options;\n\n // read all query files in the folder\n const queryFiles = fs\n .readdirSync(queryFolder)\n .filter((file) => file.endsWith(\".sql\"));\n\n logger.debug(\"Found %d SQL queries\", queryFiles.length);\n\n // load cache\n const cache = noCache ? { version: CACHE_VERSION, queries: {} } : loadCache();\n\n const client = new WorkspaceClient({});\n const querySchemas: QuerySchema[] = [];\n const failedQueries: { name: string; error: string }[] = [];\n const spinner = new Spinner();\n\n // process each query file\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const queryName = path.basename(file, \".sql\");\n\n // read query file content\n const sql = fs.readFileSync(path.join(queryFolder, file), \"utf8\");\n const sqlHash = hashSQL(sql);\n\n // check cache\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash) {\n querySchemas.push({ name: queryName, type: cached.type });\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n spinner.stop(`✓ ${queryName} (cached)`);\n continue;\n }\n\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n\n // strip trailing semicolon for DESCRIBE QUERY\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n\n // execute DESCRIBE QUERY to get schema without running the actual query\n try {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n if (result.status.state === \"FAILED\") {\n spinner.stop(`✗ ${queryName} - failed`);\n failedQueries.push({\n name: queryName,\n error: \"Query execution failed\",\n });\n continue;\n }\n\n // convert result to query schema\n const type = convertToQueryType(result, sql, queryName);\n querySchemas.push({ name: queryName, type });\n\n // update cache\n cache.queries[queryName] = { hash: sqlHash, type };\n\n spinner.stop(`✓ ${queryName}`);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : \"Unknown error\";\n spinner.stop(`✗ ${queryName} - ${errorMessage}`);\n failedQueries.push({ name: queryName, error: errorMessage });\n }\n }\n\n // save cache\n saveCache(cache);\n\n // log warning if there are failed queries\n if (failedQueries.length > 0) {\n logger.debug(\"Warning: %d queries failed\", failedQueries.length);\n }\n\n return querySchemas;\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;AAaA,MAAM,SAAS,aAAa,gCAAgC;;;;;;AAO5D,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;AAErD,SAAgB,mBACd,QACA,KACA,WACQ;CAER,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CAED,MAAM,aAAa,sBAAsB,IAAI;AAmC7C,QAAO;aACI,UAAU;kBAhCnB,OAAO,SAAS,IACZ,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAE3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB,wBAqBqB;;QAlBN,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B,CAMiB,KAAK,YAAY,CAAC;;;;AAKvC,SAAgB,sBAAsB,KAAqC;CACzE,MAAM,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAiC,EAAE,EACX;CACxB,MAAM,EAAE,UAAU,UAAU;CAG5B,MAAM,aAAa,GAChB,YAAY,YAAY,CACxB,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AAE1C,QAAO,MAAM,wBAAwB,WAAW,OAAO;CAGvD,MAAM,QAAQ,UAAU;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAAG,WAAW;CAE7E,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAM,eAA8B,EAAE;CACtC,MAAM,gBAAmD,EAAE;CAC3D,MAAM,UAAU,IAAI,SAAS;AAG7B,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EACxB,MAAM,YAAY,KAAK,SAAS,MAAM,OAAO;EAG7C,MAAM,MAAM,GAAG,aAAa,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO;EACjE,MAAM,UAAU,QAAQ,IAAI;EAG5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,SAAS;AACrC,gBAAa,KAAK;IAAE,MAAM;IAAW,MAAM,OAAO;IAAM,CAAC;AACzD,WAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;AACxE,WAAQ,KAAK,KAAK,UAAU,WAAW;AACvC;;AAGF,UAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;EAKxE,MAAM,aAHkB,IAAI,QAAQ,oBAAoB,KAAK,CAG1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAG9D,MAAI;GACF,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF,OAAI,OAAO,OAAO,UAAU,UAAU;AACpC,YAAQ,KAAK,KAAK,UAAU,WAAW;AACvC,kBAAc,KAAK;KACjB,MAAM;KACN,OAAO;KACR,CAAC;AACF;;GAIF,MAAM,OAAO,mBAAmB,QAAQ,KAAK,UAAU;AACvD,gBAAa,KAAK;IAAE,MAAM;IAAW;IAAM,CAAC;AAG5C,SAAM,QAAQ,aAAa;IAAE,MAAM;IAAS;IAAM;AAElD,WAAQ,KAAK,KAAK,YAAY;WACvB,OAAO;GACd,MAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU;AAC3C,WAAQ,KAAK,KAAK,UAAU,KAAK,eAAe;AAChD,iBAAc,KAAK;IAAE,MAAM;IAAW,OAAO;IAAc,CAAC;;;AAKhE,WAAU,MAAM;AAGhB,KAAI,cAAc,SAAS,EACzB,QAAO,MAAM,8BAA8B,cAAc,OAAO;AAGlE,QAAO;;;;;;;;;;;;AAaT,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAM,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.js","names":[
|
|
1
|
+
{"version":3,"file":"types.js","names":[],"sources":["../../src/type-generator/types.ts"],"sourcesContent":["/**\n * Databricks statement execution response interface for DESCRIBE QUERY\n * @property statement_id - the id of the statement\n * @property status - the status of the statement\n * @property result - the result containing column schema as rows [col_name, data_type, comment]\n */\nexport interface DatabricksStatementExecutionResponse {\n statement_id: string;\n status: { state: string };\n result?: {\n data_array?: (string | null)[][];\n };\n}\n\n/**\n * Map of SQL types to their corresponding marker types\n * Used to convert SQL types to their corresponding marker types\n */\nexport const sqlTypeToMarker: Record<string, string> = {\n // string\n STRING: \"SQLStringMarker\",\n BINARY: \"SQLBinaryMarker\",\n // boolean\n BOOLEAN: \"SQLBooleanMarker\",\n // numeric\n NUMERIC: \"SQLNumberMarker\",\n INT: \"SQLNumberMarker\",\n BIGINT: \"SQLNumberMarker\",\n TINYINT: \"SQLNumberMarker\",\n SMALLINT: \"SQLNumberMarker\",\n FLOAT: \"SQLNumberMarker\",\n DOUBLE: \"SQLNumberMarker\",\n DECIMAL: \"SQLNumberMarker\",\n // date/time\n DATE: \"SQLDateMarker\",\n TIMESTAMP: \"SQLTimestampMarker\",\n TIMESTAMP_NTZ: \"SQLTimestampMarker\",\n};\n\n/**\n * Map of SQL types to their corresponding helper function names\n * Used to generate JSDoc hints for parameters\n */\nexport const sqlTypeToHelper: Record<string, string> = {\n // string\n STRING: \"sql.string()\",\n BINARY: \"sql.binary()\",\n // boolean\n BOOLEAN: \"sql.boolean()\",\n // numeric\n NUMERIC: \"sql.number()\",\n INT: \"sql.number()\",\n BIGINT: \"sql.number()\",\n TINYINT: \"sql.number()\",\n SMALLINT: \"sql.number()\",\n FLOAT: \"sql.number()\",\n DOUBLE: \"sql.number()\",\n DECIMAL: \"sql.number()\",\n // date/time\n DATE: \"sql.date()\",\n TIMESTAMP: \"sql.timestamp()\",\n TIMESTAMP_NTZ: \"sql.timestamp()\",\n};\n\n/**\n * Query schema interface\n * @property name - the name of the query\n * @property type - the type of the query (string, number, boolean, object, array, etc.)\n */\nexport interface QuerySchema {\n name: string;\n type: string;\n}\n"],"mappings":";;;;;AAkBA,MAAa,kBAA0C;CAErD,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,KAAK;CACL,QAAQ;CACR,SAAS;CACT,UAAU;CACV,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CAChB;;;;;AAMD,MAAa,kBAA0C;CAErD,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,KAAK;CACL,QAAQ;CACR,SAAS;CACT,UAAU;CACV,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CAChB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;
|
|
1
|
+
{"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;AAEmC;AAsBnC,UAbU,wBAAA,CAauB;EAAA,OAAA,CAAA,EAAA,MAAA;;cAAsC,CAAA,EAAA,MAAA,EAAA;;;;;;;;iBAAvD,iBAAA,WAA4B,2BAA2B"}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
import { createLogger } from "../logging/logger.js";
|
|
1
2
|
import { generateFromEntryPoint } from "./index.js";
|
|
2
3
|
import path from "node:path";
|
|
3
4
|
import fs from "node:fs";
|
|
4
5
|
|
|
5
6
|
//#region src/type-generator/vite-plugin.ts
|
|
7
|
+
const logger = createLogger("type-generator:vite-plugin");
|
|
6
8
|
/**
|
|
7
9
|
* Vite plugin to generate types for AppKit queries.
|
|
8
10
|
* Calls generateFromEntryPoint under the hood.
|
|
@@ -17,7 +19,7 @@ function appKitTypesPlugin(options) {
|
|
|
17
19
|
try {
|
|
18
20
|
const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || "";
|
|
19
21
|
if (!warehouseId) {
|
|
20
|
-
|
|
22
|
+
logger.debug("Warehouse ID not found. Skipping type generation.");
|
|
21
23
|
return;
|
|
22
24
|
}
|
|
23
25
|
await generateFromEntryPoint({
|
|
@@ -28,14 +30,14 @@ function appKitTypesPlugin(options) {
|
|
|
28
30
|
});
|
|
29
31
|
} catch (error) {
|
|
30
32
|
if (process.env.NODE_ENV === "production") throw error;
|
|
31
|
-
|
|
33
|
+
logger.error("Error generating types: %O", error);
|
|
32
34
|
}
|
|
33
35
|
}
|
|
34
36
|
return {
|
|
35
37
|
name: "appkit-types",
|
|
36
38
|
apply() {
|
|
37
39
|
if (!(process.env.DATABRICKS_WAREHOUSE_ID || "")) {
|
|
38
|
-
|
|
40
|
+
logger.debug("Warehouse ID not found. Skipping type generation.");
|
|
39
41
|
return false;
|
|
40
42
|
}
|
|
41
43
|
if (!fs.existsSync(path.join(process.cwd(), "config", "queries"))) return false;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vite-plugin.js","names":[
|
|
1
|
+
{"version":3,"file":"vite-plugin.js","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport { createLogger } from \"../logging/logger\";\nimport { generateFromEntryPoint } from \"./index\";\n\nconst logger = createLogger(\"type-generator:vite-plugin\");\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n logger.debug(\"Warehouse ID not found. Skipping type generation.\");\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n logger.error(\"Error generating types: %O\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n logger.debug(\"Warehouse ID not found. Skipping type generation.\");\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;AAMA,MAAM,SAAS,aAAa,6BAA6B;;;;;;;AAkBzD,SAAgB,kBAAkB,SAA4C;CAC5E,IAAI;CACJ,IAAI;CACJ,IAAI;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,WAAO,MAAM,oDAAoD;AACjE;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,UAAO,MAAM,8BAA8B,MAAM;;;AAIrD,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,WAAO,MAAM,oDAAoD;AACjE,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { ValidationError } from "../errors/validation.js";
|
|
2
|
+
import { init_errors } from "../errors/index.js";
|
|
2
3
|
|
|
3
4
|
//#region src/utils/env-validator.ts
|
|
5
|
+
init_errors();
|
|
4
6
|
function validateEnv(envVars) {
|
|
5
7
|
const missingVars = [];
|
|
6
8
|
for (const envVar of envVars) if (!process.env[envVar]) missingVars.push(envVar);
|
|
7
|
-
if (missingVars.length > 0) throw
|
|
9
|
+
if (missingVars.length > 0) throw ValidationError.missingEnvVars(missingVars);
|
|
8
10
|
}
|
|
9
|
-
var init_env_validator = __esmMin((() => {}));
|
|
10
11
|
|
|
11
12
|
//#endregion
|
|
12
|
-
|
|
13
|
-
export { init_env_validator, validateEnv };
|
|
13
|
+
export { validateEnv };
|
|
14
14
|
//# sourceMappingURL=env-validator.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["import { ValidationError } from \"../errors\";\n\nexport function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw ValidationError.missingEnvVars(missingVars);\n }\n}\n"],"mappings":";;;;aAA4C;AAE5C,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,gBAAgB,eAAe,YAAY"}
|
package/dist/utils/merge.js
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
-
|
|
3
1
|
//#region src/utils/merge.ts
|
|
4
2
|
function deepMerge(target, ...sources) {
|
|
5
3
|
if (!sources.length) return target;
|
|
@@ -17,9 +15,7 @@ function deepMerge(target, ...sources) {
|
|
|
17
15
|
function isObject(item) {
|
|
18
16
|
return typeof item === "object" && item !== null && !Array.isArray(item);
|
|
19
17
|
}
|
|
20
|
-
var init_merge = __esmMin((() => {}));
|
|
21
18
|
|
|
22
19
|
//#endregion
|
|
23
|
-
|
|
24
|
-
export { deepMerge, init_merge };
|
|
20
|
+
export { deepMerge };
|
|
25
21
|
//# sourceMappingURL=merge.js.map
|
package/dist/utils/merge.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
|