@backstage/backend-defaults 0.5.3 → 0.6.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +67 -0
- package/config.d.ts +34 -6
- package/dist/auth.cjs.js +1 -0
- package/dist/auth.cjs.js.map +1 -1
- package/dist/auth.d.ts +27 -1
- package/dist/cache.d.ts +0 -1
- package/dist/database.d.ts +1 -1
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js +4 -1
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js.map +1 -1
- package/dist/entrypoints/auth/authServiceFactory.cjs.js +30 -9
- package/dist/entrypoints/auth/authServiceFactory.cjs.js.map +1 -1
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js +6 -10
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js.map +1 -1
- package/dist/entrypoints/cache/CacheManager.cjs.js +20 -19
- package/dist/entrypoints/cache/CacheManager.cjs.js.map +1 -1
- package/dist/entrypoints/database/connectors/postgres.cjs.js +40 -6
- package/dist/entrypoints/database/connectors/postgres.cjs.js.map +1 -1
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js +22 -9
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js.map +1 -1
- package/dist/entrypoints/httpRouter/http/createAuthIntegrationRouter.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/http/createCookieAuthRefreshMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/http/createCredentialsBarrier.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/http/createLifecycleMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js +4 -4
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js.map +1 -1
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js +36 -13
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js.map +1 -1
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js +9 -3
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js.map +1 -1
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js +15 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/AzureBlobStorageUrlReader.cjs.js +156 -0
- package/dist/entrypoints/urlReader/lib/AzureBlobStorageUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js +5 -11
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js +5 -14
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js +5 -14
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js +5 -14
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js +2 -10
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js +4 -9
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js +7 -16
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js +16 -2
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js +2 -0
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js +23 -4
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js.map +1 -1
- package/dist/entrypoints/urlReader/lib/util.cjs.js +29 -1
- package/dist/entrypoints/urlReader/lib/util.cjs.js.map +1 -1
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js +1 -6
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js.map +1 -1
- package/dist/httpAuth.cjs.js +1 -0
- package/dist/httpAuth.cjs.js.map +1 -1
- package/dist/httpAuth.d.ts +36 -2
- package/dist/httpRouter.cjs.js +8 -0
- package/dist/httpRouter.cjs.js.map +1 -1
- package/dist/httpRouter.d.ts +62 -1
- package/dist/package.json.cjs.js +20 -6
- package/dist/package.json.cjs.js.map +1 -1
- package/dist/urlReader.cjs.js +2 -0
- package/dist/urlReader.cjs.js.map +1 -1
- package/dist/urlReader.d.ts +41 -5
- package/package.json +33 -23
- package/dist/entrypoints/httpRouter/createAuthIntegrationRouter.cjs.js.map +0 -1
- package/dist/entrypoints/httpRouter/createCookieAuthRefreshMiddleware.cjs.js.map +0 -1
- package/dist/entrypoints/httpRouter/createCredentialsBarrier.cjs.js.map +0 -1
- package/dist/entrypoints/httpRouter/createLifecycleMiddleware.cjs.js.map +0 -1
- /package/dist/entrypoints/httpRouter/{createAuthIntegrationRouter.cjs.js → http/createAuthIntegrationRouter.cjs.js} +0 -0
- /package/dist/entrypoints/httpRouter/{createCookieAuthRefreshMiddleware.cjs.js → http/createCookieAuthRefreshMiddleware.cjs.js} +0 -0
- /package/dist/entrypoints/httpRouter/{createCredentialsBarrier.cjs.js → http/createCredentialsBarrier.cjs.js} +0 -0
- /package/dist/entrypoints/httpRouter/{createLifecycleMiddleware.cjs.js → http/createLifecycleMiddleware.cjs.js} +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"WinstonLogger.cjs.js","sources":["../../../src/entrypoints/rootLogger/WinstonLogger.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { JsonObject } from '@backstage/types';\nimport { Format, TransformableInfo } from 'logform';\nimport {\n Logger,\n format,\n createLogger,\n transports,\n transport as Transport,\n} from 'winston';\nimport { MESSAGE } from 'triple-beam';\nimport { escapeRegExp } from '../../lib/escapeRegExp';\n\n/**\n * @public\n */\nexport interface WinstonLoggerOptions {\n meta?: JsonObject;\n level?: string;\n format?: Format;\n transports?: Transport[];\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#LoggerService} implementation based on winston.\n *\n * @public\n */\nexport class WinstonLogger implements RootLoggerService {\n #winston: Logger;\n #addRedactions?: (redactions: Iterable<string>) => void;\n\n /**\n * Creates a {@link WinstonLogger} instance.\n */\n static create(options: WinstonLoggerOptions): WinstonLogger {\n const redacter = WinstonLogger.redacter();\n const defaultFormatter =\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat();\n\n let logger = createLogger({\n level: process.env.LOG_LEVEL || options.level || 'info',\n format: format.combine(\n options.format ?? defaultFormatter,\n redacter.format,\n ),\n transports: options.transports ?? new transports.Console(),\n });\n\n if (options.meta) {\n logger = logger.child(options.meta);\n }\n\n return new WinstonLogger(logger, redacter.add);\n }\n\n /**\n * Creates a winston log formatter for redacting secrets.\n */\n static redacter(): {\n format: Format;\n add: (redactions: Iterable<string>) => void;\n } {\n const redactionSet = new Set<string>();\n\n let redactionPattern: RegExp | undefined = undefined;\n\n return {\n format: format((obj: TransformableInfo) => {\n if (!redactionPattern || !obj) {\n return obj;\n }\n\n obj[MESSAGE] = obj[MESSAGE]?.replace?.(redactionPattern, '***');\n\n return obj;\n })(),\n add(newRedactions) {\n let added = 0;\n for (const redactionToTrim of newRedactions) {\n // Trimming the string ensures that we don't accdentally get extra\n // newlines or other whitespace interfering with the redaction; this\n // can happen for example when using string literals in yaml\n const redaction = redactionToTrim.trim();\n // Exclude secrets that are empty or just one character in length. These\n // typically mean that you are running local dev or tests, or using the\n // --lax flag which sets things to just 'x'.\n if (redaction.length <= 1) {\n continue;\n }\n if (!redactionSet.has(redaction)) {\n redactionSet.add(redaction);\n added += 1;\n }\n }\n if (added > 0) {\n const redactions = Array.from(redactionSet)\n .map(r => escapeRegExp(r))\n .join('|');\n redactionPattern = new RegExp(`(${redactions})`, 'g');\n }\n },\n };\n }\n\n /**\n * Creates a pretty printed winston log formatter.\n */\n static colorFormat(): Format {\n const colorizer = format.colorize();\n\n return format.combine(\n format.timestamp(),\n format.colorize({\n colors: {\n timestamp: 'dim',\n prefix: 'blue',\n field: 'cyan',\n debug: 'grey',\n },\n }),\n format.printf((info: TransformableInfo) => {\n const { timestamp, level, message, plugin, service, ...fields } = info;\n const prefix = plugin || service;\n const timestampColor = colorizer.colorize('timestamp', timestamp);\n const prefixColor = colorizer.colorize('prefix', prefix);\n\n const extraFields = Object.entries(fields)\n .map(
|
|
1
|
+
{"version":3,"file":"WinstonLogger.cjs.js","sources":["../../../src/entrypoints/rootLogger/WinstonLogger.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { JsonObject } from '@backstage/types';\nimport { Format, TransformableInfo } from 'logform';\nimport {\n Logger,\n format,\n createLogger,\n transports,\n transport as Transport,\n} from 'winston';\nimport { MESSAGE } from 'triple-beam';\nimport { escapeRegExp } from '../../lib/escapeRegExp';\n\n/**\n * @public\n */\nexport interface WinstonLoggerOptions {\n meta?: JsonObject;\n level?: string;\n format?: Format;\n transports?: Transport[];\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#LoggerService} implementation based on winston.\n *\n * @public\n */\nexport class WinstonLogger implements RootLoggerService {\n #winston: Logger;\n #addRedactions?: (redactions: Iterable<string>) => void;\n\n /**\n * Creates a {@link WinstonLogger} instance.\n */\n static create(options: WinstonLoggerOptions): WinstonLogger {\n const redacter = WinstonLogger.redacter();\n const defaultFormatter =\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat();\n\n let logger = createLogger({\n level: process.env.LOG_LEVEL || options.level || 'info',\n format: format.combine(\n options.format ?? defaultFormatter,\n redacter.format,\n ),\n transports: options.transports ?? new transports.Console(),\n });\n\n if (options.meta) {\n logger = logger.child(options.meta);\n }\n\n return new WinstonLogger(logger, redacter.add);\n }\n\n /**\n * Creates a winston log formatter for redacting secrets.\n */\n static redacter(): {\n format: Format;\n add: (redactions: Iterable<string>) => void;\n } {\n const redactionSet = new Set<string>();\n\n let redactionPattern: RegExp | undefined = undefined;\n\n return {\n format: format((obj: TransformableInfo) => {\n if (!redactionPattern || !obj) {\n return obj;\n }\n\n obj[MESSAGE] = obj[MESSAGE]?.replace?.(redactionPattern, '***');\n\n return obj;\n })(),\n add(newRedactions) {\n let added = 0;\n for (const redactionToTrim of newRedactions) {\n // Trimming the string ensures that we don't accdentally get extra\n // newlines or other whitespace interfering with the redaction; this\n // can happen for example when using string literals in yaml\n const redaction = redactionToTrim.trim();\n // Exclude secrets that are empty or just one character in length. These\n // typically mean that you are running local dev or tests, or using the\n // --lax flag which sets things to just 'x'.\n if (redaction.length <= 1) {\n continue;\n }\n if (!redactionSet.has(redaction)) {\n redactionSet.add(redaction);\n added += 1;\n }\n }\n if (added > 0) {\n const redactions = Array.from(redactionSet)\n .map(r => escapeRegExp(r))\n .join('|');\n redactionPattern = new RegExp(`(${redactions})`, 'g');\n }\n },\n };\n }\n\n /**\n * Creates a pretty printed winston log formatter.\n */\n static colorFormat(): Format {\n const colorizer = format.colorize();\n\n return format.combine(\n format.timestamp(),\n format.colorize({\n colors: {\n timestamp: 'dim',\n prefix: 'blue',\n field: 'cyan',\n debug: 'grey',\n },\n }),\n format.printf((info: TransformableInfo) => {\n const { timestamp, level, message, plugin, service, ...fields } = info;\n const prefix = plugin || service;\n const timestampColor = colorizer.colorize('timestamp', timestamp);\n const prefixColor = colorizer.colorize('prefix', prefix);\n\n const extraFields = Object.entries(fields)\n .map(([key, value]) => {\n let stringValue = '';\n\n try {\n stringValue = `${value}`;\n } catch (e) {\n stringValue = '[field value not castable to string]';\n }\n\n return `${colorizer.colorize('field', `${key}`)}=${stringValue}`;\n })\n .join(' ');\n\n return `${timestampColor} ${prefixColor} ${level} ${message} ${extraFields}`;\n }),\n );\n }\n\n private constructor(\n winston: Logger,\n addRedactions?: (redactions: Iterable<string>) => void,\n ) {\n this.#winston = winston;\n this.#addRedactions = addRedactions;\n }\n\n error(message: string, meta?: JsonObject): void {\n this.#winston.error(message, meta);\n }\n\n warn(message: string, meta?: JsonObject): void {\n this.#winston.warn(message, meta);\n }\n\n info(message: string, meta?: JsonObject): void {\n this.#winston.info(message, meta);\n }\n\n debug(message: string, meta?: JsonObject): void {\n this.#winston.debug(message, meta);\n }\n\n child(meta: JsonObject): LoggerService {\n return new WinstonLogger(this.#winston.child(meta));\n }\n\n addRedactions(redactions: Iterable<string>) {\n this.#addRedactions?.(redactions);\n }\n}\n"],"names":["format","createLogger","transports","MESSAGE","escapeRegExp"],"mappings":";;;;;;AA+CO,MAAM,aAA2C,CAAA;AAAA,EACtD,QAAA;AAAA,EACA,cAAA;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,OAA8C,EAAA;AAC1D,IAAM,MAAA,QAAA,GAAW,cAAc,QAAS,EAAA;AACxC,IAAM,MAAA,gBAAA,GACJ,QAAQ,GAAI,CAAA,QAAA,KAAa,eACrBA,cAAO,CAAA,IAAA,EACP,GAAA,aAAA,CAAc,WAAY,EAAA;AAEhC,IAAA,IAAI,SAASC,oBAAa,CAAA;AAAA,MACxB,KAAO,EAAA,OAAA,CAAQ,GAAI,CAAA,SAAA,IAAa,QAAQ,KAAS,IAAA,MAAA;AAAA,MACjD,QAAQD,cAAO,CAAA,OAAA;AAAA,QACb,QAAQ,MAAU,IAAA,gBAAA;AAAA,QAClB,QAAS,CAAA;AAAA,OACX;AAAA,MACA,UAAY,EAAA,OAAA,CAAQ,UAAc,IAAA,IAAIE,mBAAW,OAAQ;AAAA,KAC1D,CAAA;AAED,IAAA,IAAI,QAAQ,IAAM,EAAA;AAChB,MAAS,MAAA,GAAA,MAAA,CAAO,KAAM,CAAA,OAAA,CAAQ,IAAI,CAAA;AAAA;AAGpC,IAAA,OAAO,IAAI,aAAA,CAAc,MAAQ,EAAA,QAAA,CAAS,GAAG,CAAA;AAAA;AAC/C;AAAA;AAAA;AAAA,EAKA,OAAO,QAGL,GAAA;AACA,IAAM,MAAA,YAAA,uBAAmB,GAAY,EAAA;AAErC,IAAA,IAAI,gBAAuC,GAAA,KAAA,CAAA;AAE3C,IAAO,OAAA;AAAA,MACL,MAAA,EAAQF,cAAO,CAAA,CAAC,GAA2B,KAAA;AACzC,QAAI,IAAA,CAAC,gBAAoB,IAAA,CAAC,GAAK,EAAA;AAC7B,UAAO,OAAA,GAAA;AAAA;AAGT,QAAA,GAAA,CAAIG,kBAAO,CAAI,GAAA,GAAA,CAAIA,kBAAO,CAAG,EAAA,OAAA,GAAU,kBAAkB,KAAK,CAAA;AAE9D,QAAO,OAAA,GAAA;AAAA,OACR,CAAE,EAAA;AAAA,MACH,IAAI,aAAe,EAAA;AACjB,QAAA,IAAI,KAAQ,GAAA,CAAA;AACZ,QAAA,KAAA,MAAW,mBAAmB,aAAe,EAAA;AAI3C,UAAM,MAAA,SAAA,GAAY,gBAAgB,IAAK,EAAA;AAIvC,UAAI,IAAA,SAAA,CAAU,UAAU,CAAG,EAAA;AACzB,YAAA;AAAA;AAEF,UAAA,IAAI,CAAC,YAAA,CAAa,GAAI,CAAA,SAAS,CAAG,EAAA;AAChC,YAAA,YAAA,CAAa,IAAI,SAAS,CAAA;AAC1B,YAAS,KAAA,IAAA,CAAA;AAAA;AACX;AAEF,QAAA,IAAI,QAAQ,CAAG,EAAA;AACb,UAAA,MAAM,UAAa,GAAA,KAAA,CAAM,IAAK,CAAA,YAAY,CACvC,CAAA,GAAA,CAAI,CAAK,CAAA,KAAAC,yBAAA,CAAa,CAAC,CAAC,CACxB,CAAA,IAAA,CAAK,GAAG,CAAA;AACX,UAAA,gBAAA,GAAmB,IAAI,MAAA,CAAO,CAAI,CAAA,EAAA,UAAU,KAAK,GAAG,CAAA;AAAA;AACtD;AACF,KACF;AAAA;AACF;AAAA;AAAA;AAAA,EAKA,OAAO,WAAsB,GAAA;AAC3B,IAAM,MAAA,SAAA,GAAYJ,eAAO,QAAS,EAAA;AAElC,IAAA,OAAOA,cAAO,CAAA,OAAA;AAAA,MACZA,eAAO,SAAU,EAAA;AAAA,MACjBA,eAAO,QAAS,CAAA;AAAA,QACd,MAAQ,EAAA;AAAA,UACN,SAAW,EAAA,KAAA;AAAA,UACX,MAAQ,EAAA,MAAA;AAAA,UACR,KAAO,EAAA,MAAA;AAAA,UACP,KAAO,EAAA;AAAA;AACT,OACD,CAAA;AAAA,MACDA,cAAA,CAAO,MAAO,CAAA,CAAC,IAA4B,KAAA;AACzC,QAAM,MAAA,EAAE,WAAW,KAAO,EAAA,OAAA,EAAS,QAAQ,OAAS,EAAA,GAAG,QAAW,GAAA,IAAA;AAClE,QAAA,MAAM,SAAS,MAAU,IAAA,OAAA;AACzB,QAAA,MAAM,cAAiB,GAAA,SAAA,CAAU,QAAS,CAAA,WAAA,EAAa,SAAS,CAAA;AAChE,QAAA,MAAM,WAAc,GAAA,SAAA,CAAU,QAAS,CAAA,QAAA,EAAU,MAAM,CAAA;AAEvD,QAAM,MAAA,WAAA,GAAc,MAAO,CAAA,OAAA,CAAQ,MAAM,CAAA,CACtC,IAAI,CAAC,CAAC,GAAK,EAAA,KAAK,CAAM,KAAA;AACrB,UAAA,IAAI,WAAc,GAAA,EAAA;AAElB,UAAI,IAAA;AACF,YAAA,WAAA,GAAc,GAAG,KAAK,CAAA,CAAA;AAAA,mBACf,CAAG,EAAA;AACV,YAAc,WAAA,GAAA,sCAAA;AAAA;AAGhB,UAAO,OAAA,CAAA,EAAG,UAAU,QAAS,CAAA,OAAA,EAAS,GAAG,GAAG,CAAA,CAAE,CAAC,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,SAC/D,CACA,CAAA,IAAA,CAAK,GAAG,CAAA;AAEX,QAAO,OAAA,CAAA,EAAG,cAAc,CAAI,CAAA,EAAA,WAAW,IAAI,KAAK,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,OAC3E;AAAA,KACH;AAAA;AACF,EAEQ,WAAA,CACN,SACA,aACA,EAAA;AACA,IAAA,IAAA,CAAK,QAAW,GAAA,OAAA;AAChB,IAAA,IAAA,CAAK,cAAiB,GAAA,aAAA;AAAA;AACxB,EAEA,KAAA,CAAM,SAAiB,IAAyB,EAAA;AAC9C,IAAK,IAAA,CAAA,QAAA,CAAS,KAAM,CAAA,OAAA,EAAS,IAAI,CAAA;AAAA;AACnC,EAEA,IAAA,CAAK,SAAiB,IAAyB,EAAA;AAC7C,IAAK,IAAA,CAAA,QAAA,CAAS,IAAK,CAAA,OAAA,EAAS,IAAI,CAAA;AAAA;AAClC,EAEA,IAAA,CAAK,SAAiB,IAAyB,EAAA;AAC7C,IAAK,IAAA,CAAA,QAAA,CAAS,IAAK,CAAA,OAAA,EAAS,IAAI,CAAA;AAAA;AAClC,EAEA,KAAA,CAAM,SAAiB,IAAyB,EAAA;AAC9C,IAAK,IAAA,CAAA,QAAA,CAAS,KAAM,CAAA,OAAA,EAAS,IAAI,CAAA;AAAA;AACnC,EAEA,MAAM,IAAiC,EAAA;AACrC,IAAA,OAAO,IAAI,aAAc,CAAA,IAAA,CAAK,QAAS,CAAA,KAAA,CAAM,IAAI,CAAC,CAAA;AAAA;AACpD,EAEA,cAAc,UAA8B,EAAA;AAC1C,IAAA,IAAA,CAAK,iBAAiB,UAAU,CAAA;AAAA;AAEpC;;;;"}
|
|
@@ -19,6 +19,17 @@ class PluginTaskSchedulerImpl {
|
|
|
19
19
|
description: "Histogram of task run durations",
|
|
20
20
|
unit: "seconds"
|
|
21
21
|
});
|
|
22
|
+
this.lastStarted = meter.createGauge("backend_tasks.task.runs.started", {
|
|
23
|
+
description: "Epoch timestamp seconds when the task was last started",
|
|
24
|
+
unit: "seconds"
|
|
25
|
+
});
|
|
26
|
+
this.lastCompleted = meter.createGauge(
|
|
27
|
+
"backend_tasks.task.runs.completed",
|
|
28
|
+
{
|
|
29
|
+
description: "Epoch timestamp seconds when the task was last completed",
|
|
30
|
+
unit: "seconds"
|
|
31
|
+
}
|
|
32
|
+
);
|
|
22
33
|
this.shutdownInitiated = new Promise((shutdownInitiated) => {
|
|
23
34
|
rootLifecycle?.addShutdownHook(() => shutdownInitiated(true));
|
|
24
35
|
});
|
|
@@ -28,6 +39,8 @@ class PluginTaskSchedulerImpl {
|
|
|
28
39
|
shutdownInitiated;
|
|
29
40
|
counter;
|
|
30
41
|
duration;
|
|
42
|
+
lastStarted;
|
|
43
|
+
lastCompleted;
|
|
31
44
|
async triggerTask(id) {
|
|
32
45
|
const localTask = this.localTasksById.get(id);
|
|
33
46
|
if (localTask) {
|
|
@@ -89,6 +102,7 @@ class PluginTaskSchedulerImpl {
|
|
|
89
102
|
scope
|
|
90
103
|
};
|
|
91
104
|
this.counter.add(1, { ...labels, result: "started" });
|
|
105
|
+
this.lastStarted.record(Date.now() / 1e3, { taskId: task.id });
|
|
92
106
|
const startTime = process.hrtime();
|
|
93
107
|
try {
|
|
94
108
|
await tracer.startActiveSpan(`task ${task.id}`, async (span) => {
|
|
@@ -113,6 +127,7 @@ class PluginTaskSchedulerImpl {
|
|
|
113
127
|
const endTime = delta[0] + delta[1] / 1e9;
|
|
114
128
|
this.counter.add(1, labels);
|
|
115
129
|
this.duration.record(endTime, labels);
|
|
130
|
+
this.lastCompleted.record(Date.now() / 1e3, labels);
|
|
116
131
|
}
|
|
117
132
|
};
|
|
118
133
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"PluginTaskSchedulerImpl.cjs.js","sources":["../../../../src/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLifecycleService,\n SchedulerService,\n SchedulerServiceTaskDescriptor,\n SchedulerServiceTaskFunction,\n SchedulerServiceTaskInvocationDefinition,\n SchedulerServiceTaskRunner,\n SchedulerServiceTaskScheduleDefinition,\n} from '@backstage/backend-plugin-api';\nimport { Counter, Histogram, metrics, trace } from '@opentelemetry/api';\nimport { Knex } from 'knex';\nimport { Duration } from 'luxon';\nimport { LocalTaskWorker } from './LocalTaskWorker';\nimport { TaskWorker } from './TaskWorker';\nimport { TaskSettingsV2 } from './types';\nimport { delegateAbortController, TRACER_ID, validateId } from './util';\n\nconst tracer = trace.getTracer(TRACER_ID);\n\n/**\n * Implements the actual task management.\n */\nexport class PluginTaskSchedulerImpl implements SchedulerService {\n private readonly localTasksById = new Map<string, LocalTaskWorker>();\n private readonly allScheduledTasks: SchedulerServiceTaskDescriptor[] = [];\n private readonly shutdownInitiated: Promise<boolean>;\n\n private readonly counter: Counter;\n private readonly duration: Histogram;\n\n constructor(\n private readonly databaseFactory: () => Promise<Knex>,\n private readonly logger: LoggerService,\n rootLifecycle?: RootLifecycleService,\n ) {\n const meter = metrics.getMeter('default');\n this.counter = meter.createCounter('backend_tasks.task.runs.count', {\n description: 'Total number of times a task has been run',\n });\n this.duration = meter.createHistogram('backend_tasks.task.runs.duration', {\n description: 'Histogram of task run durations',\n unit: 'seconds',\n });\n this.shutdownInitiated = new Promise(shutdownInitiated => {\n rootLifecycle?.addShutdownHook(() => shutdownInitiated(true));\n });\n }\n\n async triggerTask(id: string): Promise<void> {\n const localTask = this.localTasksById.get(id);\n if (localTask) {\n localTask.trigger();\n return;\n }\n\n const knex = await this.databaseFactory();\n await TaskWorker.trigger(knex, id);\n }\n\n async scheduleTask(\n task: SchedulerServiceTaskScheduleDefinition &\n SchedulerServiceTaskInvocationDefinition,\n ): Promise<void> {\n validateId(task.id);\n const scope = task.scope ?? 'global';\n\n const settings: TaskSettingsV2 = {\n version: 2,\n cadence: parseDuration(task.frequency),\n initialDelayDuration:\n task.initialDelay && parseDuration(task.initialDelay),\n timeoutAfterDuration: parseDuration(task.timeout),\n };\n\n // Delegated abort controller that will abort either when the provided\n // controller aborts, or when a root lifecycle shutdown happens\n const abortController = delegateAbortController(task.signal);\n this.shutdownInitiated.then(() => abortController.abort());\n\n if (scope === 'global') {\n const knex = await this.databaseFactory();\n const worker = new TaskWorker(\n task.id,\n this.instrumentedFunction(task, scope),\n knex,\n this.logger.child({ task: task.id }),\n );\n await worker.start(settings, { signal: abortController.signal });\n } else {\n const worker = new LocalTaskWorker(\n task.id,\n this.instrumentedFunction(task, scope),\n this.logger.child({ task: task.id }),\n );\n worker.start(settings, { signal: abortController.signal });\n this.localTasksById.set(task.id, worker);\n }\n\n this.allScheduledTasks.push({\n id: task.id,\n scope: scope,\n settings: settings,\n });\n }\n\n createScheduledTaskRunner(\n schedule: SchedulerServiceTaskScheduleDefinition,\n ): SchedulerServiceTaskRunner {\n return {\n run: async task => {\n await this.scheduleTask({ ...task, ...schedule });\n },\n };\n }\n\n async getScheduledTasks(): Promise<SchedulerServiceTaskDescriptor[]> {\n return this.allScheduledTasks;\n }\n\n private instrumentedFunction(\n task: SchedulerServiceTaskInvocationDefinition,\n scope: string,\n ): SchedulerServiceTaskFunction {\n return async abort => {\n const labels: Record<string, string> = {\n taskId: task.id,\n scope,\n };\n this.counter.add(1, { ...labels, result: 'started' });\n\n const startTime = process.hrtime();\n\n try {\n await tracer.startActiveSpan(`task ${task.id}`, async span => {\n try {\n span.setAttributes(labels);\n await task.fn(abort);\n } catch (error) {\n if (error instanceof Error) {\n span.recordException(error);\n }\n throw error;\n } finally {\n span.end();\n }\n });\n labels.result = 'completed';\n } catch (ex) {\n labels.result = 'failed';\n throw ex;\n } finally {\n const delta = process.hrtime(startTime);\n const endTime = delta[0] + delta[1] / 1e9;\n this.counter.add(1, labels);\n this.duration.record(endTime, labels);\n }\n };\n }\n}\n\nexport function parseDuration(\n frequency: SchedulerServiceTaskScheduleDefinition['frequency'],\n): string {\n if (typeof frequency === 'object' && 'cron' in frequency) {\n return frequency.cron;\n }\n if (typeof frequency === 'object' && 'trigger' in frequency) {\n return frequency.trigger;\n }\n\n const parsed = Duration.isDuration(frequency)\n ? frequency\n : Duration.fromObject(frequency);\n\n if (!parsed.isValid) {\n throw new Error(\n `Invalid duration, ${parsed.invalidReason}: ${parsed.invalidExplanation}`,\n );\n }\n\n return parsed.toISO()!;\n}\n"],"names":["trace","TRACER_ID","metrics","TaskWorker","validateId","delegateAbortController","LocalTaskWorker","Duration"],"mappings":";;;;;;;;AAkCA,MAAM,MAAA,GAASA,SAAM,CAAA,SAAA,CAAUC,cAAS,CAAA;AAKjC,MAAM,uBAAoD,CAAA;AAAA,EAQ/D,WAAA,CACmB,eACA,EAAA,MAAA,EACjB,aACA,EAAA;AAHiB,IAAA,IAAA,CAAA,eAAA,GAAA,eAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAM,MAAA,KAAA,GAAQC,WAAQ,CAAA,QAAA,CAAS,SAAS,CAAA;AACxC,IAAK,IAAA,CAAA,OAAA,GAAU,KAAM,CAAA,aAAA,CAAc,+BAAiC,EAAA;AAAA,MAClE,WAAa,EAAA;AAAA,KACd,CAAA;AACD,IAAK,IAAA,CAAA,QAAA,GAAW,KAAM,CAAA,eAAA,CAAgB,kCAAoC,EAAA;AAAA,MACxE,WAAa,EAAA,iCAAA;AAAA,MACb,IAAM,EAAA;AAAA,KACP,CAAA;AACD,IAAK,IAAA,CAAA,iBAAA,GAAoB,IAAI,OAAA,CAAQ,CAAqB,iBAAA,KAAA;AACxD,MAAA,aAAA,EAAe,eAAgB,CAAA,MAAM,iBAAkB,CAAA,IAAI,CAAC,CAAA;AAAA,KAC7D,CAAA;AAAA;AACH,EAvBiB,cAAA,uBAAqB,GAA6B,EAAA;AAAA,EAClD,oBAAsD,EAAC;AAAA,EACvD,iBAAA;AAAA,EAEA,OAAA;AAAA,EACA,QAAA;AAAA,EAoBjB,MAAM,YAAY,EAA2B,EAAA;AAC3C,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,cAAe,CAAA,GAAA,CAAI,EAAE,CAAA;AAC5C,IAAA,IAAI,SAAW,EAAA;AACb,MAAA,SAAA,CAAU,OAAQ,EAAA;AAClB,MAAA;AAAA;AAGF,IAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,eAAgB,EAAA;AACxC,IAAM,MAAAC,qBAAA,CAAW,OAAQ,CAAA,IAAA,EAAM,EAAE,CAAA;AAAA;AACnC,EAEA,MAAM,aACJ,IAEe,EAAA;AACf,IAAAC,eAAA,CAAW,KAAK,EAAE,CAAA;AAClB,IAAM,MAAA,KAAA,GAAQ,KAAK,KAAS,IAAA,QAAA;AAE5B,IAAA,MAAM,QAA2B,GAAA;AAAA,MAC/B,OAAS,EAAA,CAAA;AAAA,MACT,OAAA,EAAS,aAAc,CAAA,IAAA,CAAK,SAAS,CAAA;AAAA,MACrC,oBACE,EAAA,IAAA,CAAK,YAAgB,IAAA,aAAA,CAAc,KAAK,YAAY,CAAA;AAAA,MACtD,oBAAA,EAAsB,aAAc,CAAA,IAAA,CAAK,OAAO;AAAA,KAClD;AAIA,IAAM,MAAA,eAAA,GAAkBC,4BAAwB,CAAA,IAAA,CAAK,MAAM,CAAA;AAC3D,IAAA,IAAA,CAAK,iBAAkB,CAAA,IAAA,CAAK,MAAM,eAAA,CAAgB,OAAO,CAAA;AAEzD,IAAA,IAAI,UAAU,QAAU,EAAA;AACtB,MAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,eAAgB,EAAA;AACxC,MAAA,MAAM,SAAS,IAAIF,qBAAA;AAAA,QACjB,IAAK,CAAA,EAAA;AAAA,QACL,IAAA,CAAK,oBAAqB,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA,QACrC,IAAA;AAAA,QACA,KAAK,MAAO,CAAA,KAAA,CAAM,EAAE,IAAM,EAAA,IAAA,CAAK,IAAI;AAAA,OACrC;AACA,MAAA,MAAM,OAAO,KAAM,CAAA,QAAA,EAAU,EAAE,MAAQ,EAAA,eAAA,CAAgB,QAAQ,CAAA;AAAA,KAC1D,MAAA;AACL,MAAA,MAAM,SAAS,IAAIG,+BAAA;AAAA,QACjB,IAAK,CAAA,EAAA;AAAA,QACL,IAAA,CAAK,oBAAqB,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA,QACrC,KAAK,MAAO,CAAA,KAAA,CAAM,EAAE,IAAM,EAAA,IAAA,CAAK,IAAI;AAAA,OACrC;AACA,MAAA,MAAA,CAAO,MAAM,QAAU,EAAA,EAAE,MAAQ,EAAA,eAAA,CAAgB,QAAQ,CAAA;AACzD,MAAA,IAAA,CAAK,cAAe,CAAA,GAAA,CAAI,IAAK,CAAA,EAAA,EAAI,MAAM,CAAA;AAAA;AAGzC,IAAA,IAAA,CAAK,kBAAkB,IAAK,CAAA;AAAA,MAC1B,IAAI,IAAK,CAAA,EAAA;AAAA,MACT,KAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA;AACH,EAEA,0BACE,QAC4B,EAAA;AAC5B,IAAO,OAAA;AAAA,MACL,GAAA,EAAK,OAAM,IAAQ,KAAA;AACjB,QAAA,MAAM,KAAK,YAAa,CAAA,EAAE,GAAG,IAAM,EAAA,GAAG,UAAU,CAAA;AAAA;AAClD,KACF;AAAA;AACF,EAEA,MAAM,iBAA+D,GAAA;AACnE,IAAA,OAAO,IAAK,CAAA,iBAAA;AAAA;AACd,EAEQ,oBAAA,CACN,MACA,KAC8B,EAAA;AAC9B,IAAA,OAAO,OAAM,KAAS,KAAA;AACpB,MAAA,MAAM,MAAiC,GAAA;AAAA,QACrC,QAAQ,IAAK,CAAA,EAAA;AAAA,QACb;AAAA,OACF;AACA,MAAK,IAAA,CAAA,OAAA,CAAQ,IAAI,CAAG,EAAA,EAAE,GAAG,MAAQ,EAAA,MAAA,EAAQ,WAAW,CAAA;AAEpD,MAAM,MAAA,SAAA,GAAY,QAAQ,MAAO,EAAA;AAEjC,MAAI,IAAA;AACF,QAAA,MAAM,OAAO,eAAgB,CAAA,CAAA,KAAA,EAAQ,KAAK,EAAE,CAAA,CAAA,EAAI,OAAM,IAAQ,KAAA;AAC5D,UAAI,IAAA;AACF,YAAA,IAAA,CAAK,cAAc,MAAM,CAAA;AACzB,YAAM,MAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA,mBACZ,KAAO,EAAA;AACd,YAAA,IAAI,iBAAiB,KAAO,EAAA;AAC1B,cAAA,IAAA,CAAK,gBAAgB,KAAK,CAAA;AAAA;AAE5B,YAAM,MAAA,KAAA;AAAA,WACN,SAAA;AACA,YAAA,IAAA,CAAK,GAAI,EAAA;AAAA;AACX,SACD,CAAA;AACD,QAAA,MAAA,CAAO,MAAS,GAAA,WAAA;AAAA,eACT,EAAI,EAAA;AACX,QAAA,MAAA,CAAO,MAAS,GAAA,QAAA;AAChB,QAAM,MAAA,EAAA;AAAA,OACN,SAAA;AACA,QAAM,MAAA,KAAA,GAAQ,OAAQ,CAAA,MAAA,CAAO,SAAS,CAAA;AACtC,QAAA,MAAM,UAAU,KAAM,CAAA,CAAC,CAAI,GAAA,KAAA,CAAM,CAAC,CAAI,GAAA,GAAA;AACtC,QAAK,IAAA,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,EAAG,MAAM,CAAA;AAC1B,QAAK,IAAA,CAAA,QAAA,CAAS,MAAO,CAAA,OAAA,EAAS,MAAM,CAAA;AAAA;AACtC,KACF;AAAA;AAEJ;AAEO,SAAS,cACd,SACQ,EAAA;AACR,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,MAAA,IAAU,SAAW,EAAA;AACxD,IAAA,OAAO,SAAU,CAAA,IAAA;AAAA;AAEnB,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,SAAA,IAAa,SAAW,EAAA;AAC3D,IAAA,OAAO,SAAU,CAAA,OAAA;AAAA;AAGnB,EAAM,MAAA,MAAA,GAASC,eAAS,UAAW,CAAA,SAAS,IACxC,SACA,GAAAA,cAAA,CAAS,WAAW,SAAS,CAAA;AAEjC,EAAI,IAAA,CAAC,OAAO,OAAS,EAAA;AACnB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAqB,kBAAA,EAAA,MAAA,CAAO,aAAa,CAAA,EAAA,EAAK,OAAO,kBAAkB,CAAA;AAAA,KACzE;AAAA;AAGF,EAAA,OAAO,OAAO,KAAM,EAAA;AACtB;;;;;"}
|
|
1
|
+
{"version":3,"file":"PluginTaskSchedulerImpl.cjs.js","sources":["../../../../src/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLifecycleService,\n SchedulerService,\n SchedulerServiceTaskDescriptor,\n SchedulerServiceTaskFunction,\n SchedulerServiceTaskInvocationDefinition,\n SchedulerServiceTaskRunner,\n SchedulerServiceTaskScheduleDefinition,\n} from '@backstage/backend-plugin-api';\nimport { Counter, Histogram, Gauge, metrics, trace } from '@opentelemetry/api';\nimport { Knex } from 'knex';\nimport { Duration } from 'luxon';\nimport { LocalTaskWorker } from './LocalTaskWorker';\nimport { TaskWorker } from './TaskWorker';\nimport { TaskSettingsV2 } from './types';\nimport { delegateAbortController, TRACER_ID, validateId } from './util';\n\nconst tracer = trace.getTracer(TRACER_ID);\n\n/**\n * Implements the actual task management.\n */\nexport class PluginTaskSchedulerImpl implements SchedulerService {\n private readonly localTasksById = new Map<string, LocalTaskWorker>();\n private readonly allScheduledTasks: SchedulerServiceTaskDescriptor[] = [];\n private readonly shutdownInitiated: Promise<boolean>;\n\n private readonly counter: Counter;\n private readonly duration: Histogram;\n private readonly lastStarted: Gauge;\n private readonly lastCompleted: Gauge;\n\n constructor(\n private readonly databaseFactory: () => Promise<Knex>,\n private readonly logger: LoggerService,\n rootLifecycle?: RootLifecycleService,\n ) {\n const meter = metrics.getMeter('default');\n this.counter = meter.createCounter('backend_tasks.task.runs.count', {\n description: 'Total number of times a task has been run',\n });\n this.duration = meter.createHistogram('backend_tasks.task.runs.duration', {\n description: 'Histogram of task run durations',\n unit: 'seconds',\n });\n this.lastStarted = meter.createGauge('backend_tasks.task.runs.started', {\n description: 'Epoch timestamp seconds when the task was last started',\n unit: 'seconds',\n });\n this.lastCompleted = meter.createGauge(\n 'backend_tasks.task.runs.completed',\n {\n description: 'Epoch timestamp seconds when the task was last completed',\n unit: 'seconds',\n },\n );\n this.shutdownInitiated = new Promise(shutdownInitiated => {\n rootLifecycle?.addShutdownHook(() => shutdownInitiated(true));\n });\n }\n\n async triggerTask(id: string): Promise<void> {\n const localTask = this.localTasksById.get(id);\n if (localTask) {\n localTask.trigger();\n return;\n }\n\n const knex = await this.databaseFactory();\n await TaskWorker.trigger(knex, id);\n }\n\n async scheduleTask(\n task: SchedulerServiceTaskScheduleDefinition &\n SchedulerServiceTaskInvocationDefinition,\n ): Promise<void> {\n validateId(task.id);\n const scope = task.scope ?? 'global';\n\n const settings: TaskSettingsV2 = {\n version: 2,\n cadence: parseDuration(task.frequency),\n initialDelayDuration:\n task.initialDelay && parseDuration(task.initialDelay),\n timeoutAfterDuration: parseDuration(task.timeout),\n };\n\n // Delegated abort controller that will abort either when the provided\n // controller aborts, or when a root lifecycle shutdown happens\n const abortController = delegateAbortController(task.signal);\n this.shutdownInitiated.then(() => abortController.abort());\n\n if (scope === 'global') {\n const knex = await this.databaseFactory();\n const worker = new TaskWorker(\n task.id,\n this.instrumentedFunction(task, scope),\n knex,\n this.logger.child({ task: task.id }),\n );\n await worker.start(settings, { signal: abortController.signal });\n } else {\n const worker = new LocalTaskWorker(\n task.id,\n this.instrumentedFunction(task, scope),\n this.logger.child({ task: task.id }),\n );\n worker.start(settings, { signal: abortController.signal });\n this.localTasksById.set(task.id, worker);\n }\n\n this.allScheduledTasks.push({\n id: task.id,\n scope: scope,\n settings: settings,\n });\n }\n\n createScheduledTaskRunner(\n schedule: SchedulerServiceTaskScheduleDefinition,\n ): SchedulerServiceTaskRunner {\n return {\n run: async task => {\n await this.scheduleTask({ ...task, ...schedule });\n },\n };\n }\n\n async getScheduledTasks(): Promise<SchedulerServiceTaskDescriptor[]> {\n return this.allScheduledTasks;\n }\n\n private instrumentedFunction(\n task: SchedulerServiceTaskInvocationDefinition,\n scope: string,\n ): SchedulerServiceTaskFunction {\n return async abort => {\n const labels: Record<string, string> = {\n taskId: task.id,\n scope,\n };\n this.counter.add(1, { ...labels, result: 'started' });\n this.lastStarted.record(Date.now() / 1000, { taskId: task.id });\n\n const startTime = process.hrtime();\n\n try {\n await tracer.startActiveSpan(`task ${task.id}`, async span => {\n try {\n span.setAttributes(labels);\n await task.fn(abort);\n } catch (error) {\n if (error instanceof Error) {\n span.recordException(error);\n }\n throw error;\n } finally {\n span.end();\n }\n });\n labels.result = 'completed';\n } catch (ex) {\n labels.result = 'failed';\n throw ex;\n } finally {\n const delta = process.hrtime(startTime);\n const endTime = delta[0] + delta[1] / 1e9;\n this.counter.add(1, labels);\n this.duration.record(endTime, labels);\n this.lastCompleted.record(Date.now() / 1000, labels);\n }\n };\n }\n}\n\nexport function parseDuration(\n frequency: SchedulerServiceTaskScheduleDefinition['frequency'],\n): string {\n if (typeof frequency === 'object' && 'cron' in frequency) {\n return frequency.cron;\n }\n if (typeof frequency === 'object' && 'trigger' in frequency) {\n return frequency.trigger;\n }\n\n const parsed = Duration.isDuration(frequency)\n ? frequency\n : Duration.fromObject(frequency);\n\n if (!parsed.isValid) {\n throw new Error(\n `Invalid duration, ${parsed.invalidReason}: ${parsed.invalidExplanation}`,\n );\n }\n\n return parsed.toISO()!;\n}\n"],"names":["trace","TRACER_ID","metrics","TaskWorker","validateId","delegateAbortController","LocalTaskWorker","Duration"],"mappings":";;;;;;;;AAkCA,MAAM,MAAA,GAASA,SAAM,CAAA,SAAA,CAAUC,cAAS,CAAA;AAKjC,MAAM,uBAAoD,CAAA;AAAA,EAU/D,WAAA,CACmB,eACA,EAAA,MAAA,EACjB,aACA,EAAA;AAHiB,IAAA,IAAA,CAAA,eAAA,GAAA,eAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAM,MAAA,KAAA,GAAQC,WAAQ,CAAA,QAAA,CAAS,SAAS,CAAA;AACxC,IAAK,IAAA,CAAA,OAAA,GAAU,KAAM,CAAA,aAAA,CAAc,+BAAiC,EAAA;AAAA,MAClE,WAAa,EAAA;AAAA,KACd,CAAA;AACD,IAAK,IAAA,CAAA,QAAA,GAAW,KAAM,CAAA,eAAA,CAAgB,kCAAoC,EAAA;AAAA,MACxE,WAAa,EAAA,iCAAA;AAAA,MACb,IAAM,EAAA;AAAA,KACP,CAAA;AACD,IAAK,IAAA,CAAA,WAAA,GAAc,KAAM,CAAA,WAAA,CAAY,iCAAmC,EAAA;AAAA,MACtE,WAAa,EAAA,wDAAA;AAAA,MACb,IAAM,EAAA;AAAA,KACP,CAAA;AACD,IAAA,IAAA,CAAK,gBAAgB,KAAM,CAAA,WAAA;AAAA,MACzB,mCAAA;AAAA,MACA;AAAA,QACE,WAAa,EAAA,0DAAA;AAAA,QACb,IAAM,EAAA;AAAA;AACR,KACF;AACA,IAAK,IAAA,CAAA,iBAAA,GAAoB,IAAI,OAAA,CAAQ,CAAqB,iBAAA,KAAA;AACxD,MAAA,aAAA,EAAe,eAAgB,CAAA,MAAM,iBAAkB,CAAA,IAAI,CAAC,CAAA;AAAA,KAC7D,CAAA;AAAA;AACH,EApCiB,cAAA,uBAAqB,GAA6B,EAAA;AAAA,EAClD,oBAAsD,EAAC;AAAA,EACvD,iBAAA;AAAA,EAEA,OAAA;AAAA,EACA,QAAA;AAAA,EACA,WAAA;AAAA,EACA,aAAA;AAAA,EA+BjB,MAAM,YAAY,EAA2B,EAAA;AAC3C,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,cAAe,CAAA,GAAA,CAAI,EAAE,CAAA;AAC5C,IAAA,IAAI,SAAW,EAAA;AACb,MAAA,SAAA,CAAU,OAAQ,EAAA;AAClB,MAAA;AAAA;AAGF,IAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,eAAgB,EAAA;AACxC,IAAM,MAAAC,qBAAA,CAAW,OAAQ,CAAA,IAAA,EAAM,EAAE,CAAA;AAAA;AACnC,EAEA,MAAM,aACJ,IAEe,EAAA;AACf,IAAAC,eAAA,CAAW,KAAK,EAAE,CAAA;AAClB,IAAM,MAAA,KAAA,GAAQ,KAAK,KAAS,IAAA,QAAA;AAE5B,IAAA,MAAM,QAA2B,GAAA;AAAA,MAC/B,OAAS,EAAA,CAAA;AAAA,MACT,OAAA,EAAS,aAAc,CAAA,IAAA,CAAK,SAAS,CAAA;AAAA,MACrC,oBACE,EAAA,IAAA,CAAK,YAAgB,IAAA,aAAA,CAAc,KAAK,YAAY,CAAA;AAAA,MACtD,oBAAA,EAAsB,aAAc,CAAA,IAAA,CAAK,OAAO;AAAA,KAClD;AAIA,IAAM,MAAA,eAAA,GAAkBC,4BAAwB,CAAA,IAAA,CAAK,MAAM,CAAA;AAC3D,IAAA,IAAA,CAAK,iBAAkB,CAAA,IAAA,CAAK,MAAM,eAAA,CAAgB,OAAO,CAAA;AAEzD,IAAA,IAAI,UAAU,QAAU,EAAA;AACtB,MAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,eAAgB,EAAA;AACxC,MAAA,MAAM,SAAS,IAAIF,qBAAA;AAAA,QACjB,IAAK,CAAA,EAAA;AAAA,QACL,IAAA,CAAK,oBAAqB,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA,QACrC,IAAA;AAAA,QACA,KAAK,MAAO,CAAA,KAAA,CAAM,EAAE,IAAM,EAAA,IAAA,CAAK,IAAI;AAAA,OACrC;AACA,MAAA,MAAM,OAAO,KAAM,CAAA,QAAA,EAAU,EAAE,MAAQ,EAAA,eAAA,CAAgB,QAAQ,CAAA;AAAA,KAC1D,MAAA;AACL,MAAA,MAAM,SAAS,IAAIG,+BAAA;AAAA,QACjB,IAAK,CAAA,EAAA;AAAA,QACL,IAAA,CAAK,oBAAqB,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA,QACrC,KAAK,MAAO,CAAA,KAAA,CAAM,EAAE,IAAM,EAAA,IAAA,CAAK,IAAI;AAAA,OACrC;AACA,MAAA,MAAA,CAAO,MAAM,QAAU,EAAA,EAAE,MAAQ,EAAA,eAAA,CAAgB,QAAQ,CAAA;AACzD,MAAA,IAAA,CAAK,cAAe,CAAA,GAAA,CAAI,IAAK,CAAA,EAAA,EAAI,MAAM,CAAA;AAAA;AAGzC,IAAA,IAAA,CAAK,kBAAkB,IAAK,CAAA;AAAA,MAC1B,IAAI,IAAK,CAAA,EAAA;AAAA,MACT,KAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA;AACH,EAEA,0BACE,QAC4B,EAAA;AAC5B,IAAO,OAAA;AAAA,MACL,GAAA,EAAK,OAAM,IAAQ,KAAA;AACjB,QAAA,MAAM,KAAK,YAAa,CAAA,EAAE,GAAG,IAAM,EAAA,GAAG,UAAU,CAAA;AAAA;AAClD,KACF;AAAA;AACF,EAEA,MAAM,iBAA+D,GAAA;AACnE,IAAA,OAAO,IAAK,CAAA,iBAAA;AAAA;AACd,EAEQ,oBAAA,CACN,MACA,KAC8B,EAAA;AAC9B,IAAA,OAAO,OAAM,KAAS,KAAA;AACpB,MAAA,MAAM,MAAiC,GAAA;AAAA,QACrC,QAAQ,IAAK,CAAA,EAAA;AAAA,QACb;AAAA,OACF;AACA,MAAK,IAAA,CAAA,OAAA,CAAQ,IAAI,CAAG,EAAA,EAAE,GAAG,MAAQ,EAAA,MAAA,EAAQ,WAAW,CAAA;AACpD,MAAK,IAAA,CAAA,WAAA,CAAY,MAAO,CAAA,IAAA,CAAK,GAAI,EAAA,GAAI,KAAM,EAAE,MAAA,EAAQ,IAAK,CAAA,EAAA,EAAI,CAAA;AAE9D,MAAM,MAAA,SAAA,GAAY,QAAQ,MAAO,EAAA;AAEjC,MAAI,IAAA;AACF,QAAA,MAAM,OAAO,eAAgB,CAAA,CAAA,KAAA,EAAQ,KAAK,EAAE,CAAA,CAAA,EAAI,OAAM,IAAQ,KAAA;AAC5D,UAAI,IAAA;AACF,YAAA,IAAA,CAAK,cAAc,MAAM,CAAA;AACzB,YAAM,MAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA,mBACZ,KAAO,EAAA;AACd,YAAA,IAAI,iBAAiB,KAAO,EAAA;AAC1B,cAAA,IAAA,CAAK,gBAAgB,KAAK,CAAA;AAAA;AAE5B,YAAM,MAAA,KAAA;AAAA,WACN,SAAA;AACA,YAAA,IAAA,CAAK,GAAI,EAAA;AAAA;AACX,SACD,CAAA;AACD,QAAA,MAAA,CAAO,MAAS,GAAA,WAAA;AAAA,eACT,EAAI,EAAA;AACX,QAAA,MAAA,CAAO,MAAS,GAAA,QAAA;AAChB,QAAM,MAAA,EAAA;AAAA,OACN,SAAA;AACA,QAAM,MAAA,KAAA,GAAQ,OAAQ,CAAA,MAAA,CAAO,SAAS,CAAA;AACtC,QAAA,MAAM,UAAU,KAAM,CAAA,CAAC,CAAI,GAAA,KAAA,CAAM,CAAC,CAAI,GAAA,GAAA;AACtC,QAAK,IAAA,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,EAAG,MAAM,CAAA;AAC1B,QAAK,IAAA,CAAA,QAAA,CAAS,MAAO,CAAA,OAAA,EAAS,MAAM,CAAA;AACpC,QAAA,IAAA,CAAK,cAAc,MAAO,CAAA,IAAA,CAAK,GAAI,EAAA,GAAI,KAAM,MAAM,CAAA;AAAA;AACrD,KACF;AAAA;AAEJ;AAEO,SAAS,cACd,SACQ,EAAA;AACR,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,MAAA,IAAU,SAAW,EAAA;AACxD,IAAA,OAAO,SAAU,CAAA,IAAA;AAAA;AAEnB,EAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,SAAA,IAAa,SAAW,EAAA;AAC3D,IAAA,OAAO,SAAU,CAAA,OAAA;AAAA;AAGnB,EAAM,MAAA,MAAA,GAASC,eAAS,UAAW,CAAA,SAAS,IACxC,SACA,GAAAA,cAAA,CAAS,WAAW,SAAS,CAAA;AAEjC,EAAI,IAAA,CAAC,OAAO,OAAS,EAAA;AACnB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAqB,kBAAA,EAAA,MAAA,CAAO,aAAa,CAAA,EAAA,EAAK,OAAO,kBAAkB,CAAA;AAAA,KACzE;AAAA;AAGF,EAAA,OAAO,OAAO,KAAM,EAAA;AACtB;;;;;"}
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var storageBlob = require('@azure/storage-blob');
|
|
4
|
+
var errors = require('@backstage/errors');
|
|
5
|
+
var stream = require('stream');
|
|
6
|
+
var posix = require('path/posix');
|
|
7
|
+
var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js');
|
|
8
|
+
var integration = require('@backstage/integration');
|
|
9
|
+
|
|
10
|
+
function parseUrl(url) {
|
|
11
|
+
const parsedUrl = new URL(url);
|
|
12
|
+
const pathSegments = parsedUrl.pathname.split("/").filter(Boolean);
|
|
13
|
+
if (pathSegments.length < 2) {
|
|
14
|
+
throw new Error(`Invalid Azure Blob Storage URL format: ${url}`);
|
|
15
|
+
}
|
|
16
|
+
const container = pathSegments[0];
|
|
17
|
+
const path = pathSegments.slice(1).join("/");
|
|
18
|
+
return { path, container };
|
|
19
|
+
}
|
|
20
|
+
class AzureBlobStorageUrlReader {
|
|
21
|
+
// private readonly blobServiceClient: BlobServiceClient;
|
|
22
|
+
constructor(credsManager, integration, deps) {
|
|
23
|
+
this.credsManager = credsManager;
|
|
24
|
+
this.integration = integration;
|
|
25
|
+
this.deps = deps;
|
|
26
|
+
}
|
|
27
|
+
static factory = ({ config, treeResponseFactory }) => {
|
|
28
|
+
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
29
|
+
const credsManager = integration.DefaultAzureCredentialsManager.fromIntegrations(integrations);
|
|
30
|
+
return integrations.azureBlobStorage.list().map((integrationConfig) => {
|
|
31
|
+
const reader = new AzureBlobStorageUrlReader(
|
|
32
|
+
credsManager,
|
|
33
|
+
integrationConfig,
|
|
34
|
+
{
|
|
35
|
+
treeResponseFactory
|
|
36
|
+
}
|
|
37
|
+
);
|
|
38
|
+
const predicate = (url) => url.host.endsWith(
|
|
39
|
+
`${integrationConfig.config.accountName}.${integrationConfig.config.host}`
|
|
40
|
+
);
|
|
41
|
+
return { reader, predicate };
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
async createContainerClient(containerName) {
|
|
45
|
+
const accountName = this.integration.config.accountName;
|
|
46
|
+
const accountKey = this.integration.config.accountKey;
|
|
47
|
+
if (accountKey && accountName) {
|
|
48
|
+
const creds = new storageBlob.StorageSharedKeyCredential(accountName, accountKey);
|
|
49
|
+
const blobServiceClient2 = new storageBlob.BlobServiceClient(
|
|
50
|
+
`https://${accountName}.${this.integration.config.host}`,
|
|
51
|
+
creds
|
|
52
|
+
);
|
|
53
|
+
return blobServiceClient2.getContainerClient(containerName);
|
|
54
|
+
}
|
|
55
|
+
const credential = await this.credsManager.getCredentials(
|
|
56
|
+
accountName
|
|
57
|
+
);
|
|
58
|
+
let blobServiceClientUrl;
|
|
59
|
+
if (this.integration.config.endpoint) {
|
|
60
|
+
if (this.integration.config.sasToken) {
|
|
61
|
+
blobServiceClientUrl = `${this.integration.config.endpoint}?${this.integration.config.sasToken}`;
|
|
62
|
+
} else {
|
|
63
|
+
blobServiceClientUrl = `${this.integration.config.endpoint}`;
|
|
64
|
+
}
|
|
65
|
+
} else {
|
|
66
|
+
blobServiceClientUrl = `https://${this.integration.config.accountName}.${this.integration.config.host}`;
|
|
67
|
+
}
|
|
68
|
+
const blobServiceClient = new storageBlob.BlobServiceClient(
|
|
69
|
+
blobServiceClientUrl,
|
|
70
|
+
credential
|
|
71
|
+
);
|
|
72
|
+
return blobServiceClient.getContainerClient(containerName);
|
|
73
|
+
}
|
|
74
|
+
async read(url) {
|
|
75
|
+
const response = await this.readUrl(url);
|
|
76
|
+
return response.buffer();
|
|
77
|
+
}
|
|
78
|
+
async readUrl(url, options) {
|
|
79
|
+
const { etag, lastModifiedAfter } = options ?? {};
|
|
80
|
+
try {
|
|
81
|
+
const { path, container } = parseUrl(url);
|
|
82
|
+
const containerClient = await this.createContainerClient(container);
|
|
83
|
+
const blobClient = containerClient.getBlobClient(path);
|
|
84
|
+
const getBlobOptions = {
|
|
85
|
+
abortSignal: options?.signal,
|
|
86
|
+
conditions: {
|
|
87
|
+
...etag && { ifNoneMatch: etag },
|
|
88
|
+
...lastModifiedAfter && { ifModifiedSince: lastModifiedAfter }
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
const downloadBlockBlobResponse = await blobClient.download(
|
|
92
|
+
0,
|
|
93
|
+
void 0,
|
|
94
|
+
getBlobOptions
|
|
95
|
+
);
|
|
96
|
+
return ReadUrlResponseFactory.ReadUrlResponseFactory.fromReadable(
|
|
97
|
+
downloadBlockBlobResponse.readableStreamBody,
|
|
98
|
+
{
|
|
99
|
+
etag: downloadBlockBlobResponse.etag,
|
|
100
|
+
lastModifiedAt: downloadBlockBlobResponse.lastModified
|
|
101
|
+
}
|
|
102
|
+
);
|
|
103
|
+
} catch (e) {
|
|
104
|
+
if (e.$metadata && e.$metadata.httpStatusCode === 304) {
|
|
105
|
+
throw new errors.NotModifiedError();
|
|
106
|
+
}
|
|
107
|
+
throw new errors.ForwardedError(
|
|
108
|
+
"Could not retrieve file from Azure Blob Storage",
|
|
109
|
+
e
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
async readTree(url, options) {
|
|
114
|
+
try {
|
|
115
|
+
const { path, container } = parseUrl(url);
|
|
116
|
+
const containerClient = await this.createContainerClient(container);
|
|
117
|
+
const blobs = containerClient.listBlobsFlat({ prefix: path });
|
|
118
|
+
const responses = [];
|
|
119
|
+
for await (const blob of blobs) {
|
|
120
|
+
const blobClient = containerClient.getBlobClient(blob.name);
|
|
121
|
+
const downloadBlockBlobResponse = await blobClient.download(
|
|
122
|
+
void 0,
|
|
123
|
+
void 0,
|
|
124
|
+
{ abortSignal: options?.signal }
|
|
125
|
+
);
|
|
126
|
+
responses.push({
|
|
127
|
+
data: stream.Readable.from(
|
|
128
|
+
downloadBlockBlobResponse.readableStreamBody
|
|
129
|
+
),
|
|
130
|
+
path: posix.relative(path, blob.name),
|
|
131
|
+
lastModifiedAt: blob.properties.lastModified
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
return this.deps.treeResponseFactory.fromReadableArray(responses);
|
|
135
|
+
} catch (e) {
|
|
136
|
+
throw new errors.ForwardedError(
|
|
137
|
+
"Could not retrieve file tree from Azure Blob Storage",
|
|
138
|
+
e
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
async search() {
|
|
143
|
+
throw new Error("AzureBlobStorageUrlReader does not implement search");
|
|
144
|
+
}
|
|
145
|
+
toString() {
|
|
146
|
+
const accountName = this.integration.config.accountName;
|
|
147
|
+
const accountKey = this.integration.config.accountKey;
|
|
148
|
+
return `azureBlobStorage{accountName=${accountName},authed=${Boolean(
|
|
149
|
+
accountKey
|
|
150
|
+
)}}`;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
exports.AzureBlobStorageUrlReader = AzureBlobStorageUrlReader;
|
|
155
|
+
exports.parseUrl = parseUrl;
|
|
156
|
+
//# sourceMappingURL=AzureBlobStorageUrlReader.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"AzureBlobStorageUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/AzureBlobStorageUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2024 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n BlobDownloadOptions,\n BlobServiceClient,\n ContainerClient,\n StorageSharedKeyCredential,\n} from '@azure/storage-blob';\nimport { ReaderFactory, ReadTreeResponseFactory } from './types';\nimport { ForwardedError, NotModifiedError } from '@backstage/errors';\nimport { Readable } from 'stream';\nimport { relative } from 'path/posix';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\nimport {\n AzureBlobStorageIntergation,\n AzureCredentialsManager,\n DefaultAzureCredentialsManager,\n ScmIntegrations,\n} from '@backstage/integration';\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\n\nexport function parseUrl(url: string): { path: string; container: string } {\n const parsedUrl = new URL(url);\n const pathSegments = parsedUrl.pathname.split('/').filter(Boolean);\n\n if (pathSegments.length < 2) {\n throw new Error(`Invalid Azure Blob Storage URL format: ${url}`);\n }\n\n // First segment is the container name, rest is the blob path\n const container = pathSegments[0];\n const path = pathSegments.slice(1).join('/');\n\n return { path, container };\n}\n\n/**\n * Implements a {@link @backstage/backend-plugin-api#UrlReaderService} for Azure storage accounts urls.\n *\n * @public\n */\nexport class AzureBlobStorageUrlReader implements UrlReaderService {\n static factory: ReaderFactory = ({ config, treeResponseFactory }) => {\n const integrations = ScmIntegrations.fromConfig(config);\n\n const credsManager =\n DefaultAzureCredentialsManager.fromIntegrations(integrations);\n\n return integrations.azureBlobStorage.list().map(integrationConfig => {\n const reader = new AzureBlobStorageUrlReader(\n credsManager,\n integrationConfig,\n {\n treeResponseFactory,\n },\n );\n\n const predicate = (url: URL) =>\n url.host.endsWith(\n `${integrationConfig.config.accountName}.${integrationConfig.config.host}`,\n );\n return { reader, predicate };\n });\n };\n\n // private readonly blobServiceClient: BlobServiceClient;\n\n constructor(\n private readonly credsManager: AzureCredentialsManager,\n private readonly integration: AzureBlobStorageIntergation,\n private readonly deps: {\n treeResponseFactory: ReadTreeResponseFactory;\n },\n ) {}\n\n private async createContainerClient(\n containerName: string,\n ): Promise<ContainerClient> {\n const accountName = this.integration.config.accountName; // Use the account name from the integration config\n const accountKey = this.integration.config.accountKey; // Get the account key if it exists\n\n if (accountKey && accountName) {\n const creds = new StorageSharedKeyCredential(accountName, accountKey);\n const blobServiceClient = new BlobServiceClient(\n `https://${accountName}.${this.integration.config.host}`,\n creds,\n );\n return blobServiceClient.getContainerClient(containerName);\n }\n // Use the credentials manager to get the correct credentials\n const credential = await this.credsManager.getCredentials(\n accountName as string,\n );\n\n let blobServiceClientUrl: string;\n\n if (this.integration.config.endpoint) {\n if (this.integration.config.sasToken) {\n blobServiceClientUrl = `${this.integration.config.endpoint}?${this.integration.config.sasToken}`;\n } else {\n blobServiceClientUrl = `${this.integration.config.endpoint}`;\n }\n } else {\n blobServiceClientUrl = `https://${this.integration.config.accountName}.${this.integration.config.host}`;\n }\n\n const blobServiceClient = new BlobServiceClient(\n blobServiceClientUrl,\n credential,\n );\n return blobServiceClient.getContainerClient(containerName);\n }\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n const { etag, lastModifiedAfter } = options ?? {};\n\n try {\n const { path, container } = parseUrl(url);\n\n const containerClient = await this.createContainerClient(container);\n const blobClient = containerClient.getBlobClient(path);\n\n const getBlobOptions: BlobDownloadOptions = {\n abortSignal: options?.signal,\n conditions: {\n ...(etag && { ifNoneMatch: etag }),\n ...(lastModifiedAfter && { ifModifiedSince: lastModifiedAfter }),\n },\n };\n\n const downloadBlockBlobResponse = await blobClient.download(\n 0,\n undefined,\n getBlobOptions,\n );\n\n return ReadUrlResponseFactory.fromReadable(\n downloadBlockBlobResponse.readableStreamBody as Readable,\n {\n etag: downloadBlockBlobResponse.etag,\n lastModifiedAt: downloadBlockBlobResponse.lastModified,\n },\n );\n } catch (e) {\n if (e.$metadata && e.$metadata.httpStatusCode === 304) {\n throw new NotModifiedError();\n }\n\n throw new ForwardedError(\n 'Could not retrieve file from Azure Blob Storage',\n e,\n );\n }\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n try {\n const { path, container } = parseUrl(url);\n\n const containerClient = await this.createContainerClient(container);\n\n const blobs = containerClient.listBlobsFlat({ prefix: path });\n\n const responses = [];\n\n for await (const blob of blobs) {\n const blobClient = containerClient.getBlobClient(blob.name);\n const downloadBlockBlobResponse = await blobClient.download(\n undefined,\n undefined,\n { abortSignal: options?.signal },\n );\n\n responses.push({\n data: Readable.from(\n downloadBlockBlobResponse.readableStreamBody as Readable,\n ),\n path: relative(path, blob.name),\n lastModifiedAt: blob.properties.lastModified,\n });\n }\n\n return this.deps.treeResponseFactory.fromReadableArray(responses);\n } catch (e) {\n throw new ForwardedError(\n 'Could not retrieve file tree from Azure Blob Storage',\n e,\n );\n }\n }\n\n async search(): Promise<UrlReaderServiceSearchResponse> {\n throw new Error('AzureBlobStorageUrlReader does not implement search');\n }\n\n toString() {\n const accountName = this.integration.config.accountName;\n const accountKey = this.integration.config.accountKey;\n return `azureBlobStorage{accountName=${accountName},authed=${Boolean(\n accountKey,\n )}}`;\n }\n}\n"],"names":["ScmIntegrations","DefaultAzureCredentialsManager","StorageSharedKeyCredential","blobServiceClient","BlobServiceClient","ReadUrlResponseFactory","NotModifiedError","ForwardedError","Readable","relative"],"mappings":";;;;;;;;;AA0CO,SAAS,SAAS,GAAkD,EAAA;AACzE,EAAM,MAAA,SAAA,GAAY,IAAI,GAAA,CAAI,GAAG,CAAA;AAC7B,EAAA,MAAM,eAAe,SAAU,CAAA,QAAA,CAAS,MAAM,GAAG,CAAA,CAAE,OAAO,OAAO,CAAA;AAEjE,EAAI,IAAA,YAAA,CAAa,SAAS,CAAG,EAAA;AAC3B,IAAA,MAAM,IAAI,KAAA,CAAM,CAA0C,uCAAA,EAAA,GAAG,CAAE,CAAA,CAAA;AAAA;AAIjE,EAAM,MAAA,SAAA,GAAY,aAAa,CAAC,CAAA;AAChC,EAAA,MAAM,OAAO,YAAa,CAAA,KAAA,CAAM,CAAC,CAAA,CAAE,KAAK,GAAG,CAAA;AAE3C,EAAO,OAAA,EAAE,MAAM,SAAU,EAAA;AAC3B;AAOO,MAAM,yBAAsD,CAAA;AAAA;AAAA,EA0BjE,WAAA,CACmB,YACA,EAAA,WAAA,EACA,IAGjB,EAAA;AALiB,IAAA,IAAA,CAAA,YAAA,GAAA,YAAA;AACA,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAAA;AAGhB,EA/BH,OAAO,OAAyB,GAAA,CAAC,EAAE,MAAA,EAAQ,qBAA0B,KAAA;AACnE,IAAM,MAAA,YAAA,GAAeA,2BAAgB,CAAA,UAAA,CAAW,MAAM,CAAA;AAEtD,IAAM,MAAA,YAAA,GACJC,0CAA+B,CAAA,gBAAA,CAAiB,YAAY,CAAA;AAE9D,IAAA,OAAO,YAAa,CAAA,gBAAA,CAAiB,IAAK,EAAA,CAAE,IAAI,CAAqB,iBAAA,KAAA;AACnE,MAAA,MAAM,SAAS,IAAI,yBAAA;AAAA,QACjB,YAAA;AAAA,QACA,iBAAA;AAAA,QACA;AAAA,UACE;AAAA;AACF,OACF;AAEA,MAAA,MAAM,SAAY,GAAA,CAAC,GACjB,KAAA,GAAA,CAAI,IAAK,CAAA,QAAA;AAAA,QACP,GAAG,iBAAkB,CAAA,MAAA,CAAO,WAAW,CAAI,CAAA,EAAA,iBAAA,CAAkB,OAAO,IAAI,CAAA;AAAA,OAC1E;AACF,MAAO,OAAA,EAAE,QAAQ,SAAU,EAAA;AAAA,KAC5B,CAAA;AAAA,GACH;AAAA,EAYA,MAAc,sBACZ,aAC0B,EAAA;AAC1B,IAAM,MAAA,WAAA,GAAc,IAAK,CAAA,WAAA,CAAY,MAAO,CAAA,WAAA;AAC5C,IAAM,MAAA,UAAA,GAAa,IAAK,CAAA,WAAA,CAAY,MAAO,CAAA,UAAA;AAE3C,IAAA,IAAI,cAAc,WAAa,EAAA;AAC7B,MAAA,MAAM,KAAQ,GAAA,IAAIC,sCAA2B,CAAA,WAAA,EAAa,UAAU,CAAA;AACpE,MAAA,MAAMC,qBAAoB,IAAIC,6BAAA;AAAA,QAC5B,WAAW,WAAW,CAAA,CAAA,EAAI,IAAK,CAAA,WAAA,CAAY,OAAO,IAAI,CAAA,CAAA;AAAA,QACtD;AAAA,OACF;AACA,MAAOD,OAAAA,kBAAAA,CAAkB,mBAAmB,aAAa,CAAA;AAAA;AAG3D,IAAM,MAAA,UAAA,GAAa,MAAM,IAAA,CAAK,YAAa,CAAA,cAAA;AAAA,MACzC;AAAA,KACF;AAEA,IAAI,IAAA,oBAAA;AAEJ,IAAI,IAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,QAAU,EAAA;AACpC,MAAI,IAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,QAAU,EAAA;AACpC,QAAuB,oBAAA,GAAA,CAAA,EAAG,KAAK,WAAY,CAAA,MAAA,CAAO,QAAQ,CAAI,CAAA,EAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,QAAQ,CAAA,CAAA;AAAA,OACzF,MAAA;AACL,QAAA,oBAAA,GAAuB,CAAG,EAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,QAAQ,CAAA,CAAA;AAAA;AAC5D,KACK,MAAA;AACL,MAAuB,oBAAA,GAAA,CAAA,QAAA,EAAW,KAAK,WAAY,CAAA,MAAA,CAAO,WAAW,CAAI,CAAA,EAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,IAAI,CAAA,CAAA;AAAA;AAGvG,IAAA,MAAM,oBAAoB,IAAIC,6BAAA;AAAA,MAC5B,oBAAA;AAAA,MACA;AAAA,KACF;AACA,IAAO,OAAA,iBAAA,CAAkB,mBAAmB,aAAa,CAAA;AAAA;AAC3D,EAEA,MAAM,KAAK,GAA8B,EAAA;AACvC,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAO,EAAA;AAAA;AACzB,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAC1C,IAAA,MAAM,EAAE,IAAA,EAAM,iBAAkB,EAAA,GAAI,WAAW,EAAC;AAEhD,IAAI,IAAA;AACF,MAAA,MAAM,EAAE,IAAA,EAAM,SAAU,EAAA,GAAI,SAAS,GAAG,CAAA;AAExC,MAAA,MAAM,eAAkB,GAAA,MAAM,IAAK,CAAA,qBAAA,CAAsB,SAAS,CAAA;AAClE,MAAM,MAAA,UAAA,GAAa,eAAgB,CAAA,aAAA,CAAc,IAAI,CAAA;AAErD,MAAA,MAAM,cAAsC,GAAA;AAAA,QAC1C,aAAa,OAAS,EAAA,MAAA;AAAA,QACtB,UAAY,EAAA;AAAA,UACV,GAAI,IAAA,IAAQ,EAAE,WAAA,EAAa,IAAK,EAAA;AAAA,UAChC,GAAI,iBAAA,IAAqB,EAAE,eAAA,EAAiB,iBAAkB;AAAA;AAChE,OACF;AAEA,MAAM,MAAA,yBAAA,GAA4B,MAAM,UAAW,CAAA,QAAA;AAAA,QACjD,CAAA;AAAA,QACA,KAAA,CAAA;AAAA,QACA;AAAA,OACF;AAEA,MAAA,OAAOC,6CAAuB,CAAA,YAAA;AAAA,QAC5B,yBAA0B,CAAA,kBAAA;AAAA,QAC1B;AAAA,UACE,MAAM,yBAA0B,CAAA,IAAA;AAAA,UAChC,gBAAgB,yBAA0B,CAAA;AAAA;AAC5C,OACF;AAAA,aACO,CAAG,EAAA;AACV,MAAA,IAAI,CAAE,CAAA,SAAA,IAAa,CAAE,CAAA,SAAA,CAAU,mBAAmB,GAAK,EAAA;AACrD,QAAA,MAAM,IAAIC,uBAAiB,EAAA;AAAA;AAG7B,MAAA,MAAM,IAAIC,qBAAA;AAAA,QACR,iDAAA;AAAA,QACA;AAAA,OACF;AAAA;AACF;AACF,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAI,IAAA;AACF,MAAA,MAAM,EAAE,IAAA,EAAM,SAAU,EAAA,GAAI,SAAS,GAAG,CAAA;AAExC,MAAA,MAAM,eAAkB,GAAA,MAAM,IAAK,CAAA,qBAAA,CAAsB,SAAS,CAAA;AAElE,MAAA,MAAM,QAAQ,eAAgB,CAAA,aAAA,CAAc,EAAE,MAAA,EAAQ,MAAM,CAAA;AAE5D,MAAA,MAAM,YAAY,EAAC;AAEnB,MAAA,WAAA,MAAiB,QAAQ,KAAO,EAAA;AAC9B,QAAA,MAAM,UAAa,GAAA,eAAA,CAAgB,aAAc,CAAA,IAAA,CAAK,IAAI,CAAA;AAC1D,QAAM,MAAA,yBAAA,GAA4B,MAAM,UAAW,CAAA,QAAA;AAAA,UACjD,KAAA,CAAA;AAAA,UACA,KAAA,CAAA;AAAA,UACA,EAAE,WAAa,EAAA,OAAA,EAAS,MAAO;AAAA,SACjC;AAEA,QAAA,SAAA,CAAU,IAAK,CAAA;AAAA,UACb,MAAMC,eAAS,CAAA,IAAA;AAAA,YACb,yBAA0B,CAAA;AAAA,WAC5B;AAAA,UACA,IAAM,EAAAC,cAAA,CAAS,IAAM,EAAA,IAAA,CAAK,IAAI,CAAA;AAAA,UAC9B,cAAA,EAAgB,KAAK,UAAW,CAAA;AAAA,SACjC,CAAA;AAAA;AAGH,MAAA,OAAO,IAAK,CAAA,IAAA,CAAK,mBAAoB,CAAA,iBAAA,CAAkB,SAAS,CAAA;AAAA,aACzD,CAAG,EAAA;AACV,MAAA,MAAM,IAAIF,qBAAA;AAAA,QACR,sDAAA;AAAA,QACA;AAAA,OACF;AAAA;AACF;AACF,EAEA,MAAM,MAAkD,GAAA;AACtD,IAAM,MAAA,IAAI,MAAM,qDAAqD,CAAA;AAAA;AACvE,EAEA,QAAW,GAAA;AACT,IAAM,MAAA,WAAA,GAAc,IAAK,CAAA,WAAA,CAAY,MAAO,CAAA,WAAA;AAC5C,IAAM,MAAA,UAAA,GAAa,IAAK,CAAA,WAAA,CAAY,MAAO,CAAA,UAAA;AAC3C,IAAO,OAAA,CAAA,6BAAA,EAAgC,WAAW,CAAW,QAAA,EAAA,OAAA;AAAA,MAC3D;AAAA,KACD,CAAA,CAAA,CAAA;AAAA;AAEL;;;;;"}
|
|
@@ -1,16 +1,10 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var integration = require('@backstage/integration');
|
|
4
|
-
var fetch = require('node-fetch');
|
|
5
4
|
var minimatch = require('minimatch');
|
|
6
|
-
var stream = require('stream');
|
|
7
5
|
var errors = require('@backstage/errors');
|
|
8
6
|
var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js');
|
|
9
7
|
|
|
10
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
11
|
-
|
|
12
|
-
var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
|
|
13
|
-
|
|
14
8
|
class AzureUrlReader {
|
|
15
9
|
constructor(integration, deps) {
|
|
16
10
|
this.integration = integration;
|
|
@@ -40,7 +34,7 @@ class AzureUrlReader {
|
|
|
40
34
|
const credentials = await this.deps.credentialsProvider.getCredentials({
|
|
41
35
|
url: builtUrl
|
|
42
36
|
});
|
|
43
|
-
response = await
|
|
37
|
+
response = await fetch(builtUrl, {
|
|
44
38
|
headers: credentials?.headers,
|
|
45
39
|
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
46
40
|
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
@@ -54,7 +48,7 @@ class AzureUrlReader {
|
|
|
54
48
|
throw new Error(`Unable to read ${url}, ${e}`);
|
|
55
49
|
}
|
|
56
50
|
if (response.ok && response.status !== 203) {
|
|
57
|
-
return ReadUrlResponseFactory.ReadUrlResponseFactory.
|
|
51
|
+
return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
|
|
58
52
|
}
|
|
59
53
|
const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
|
|
60
54
|
if (response.status === 404) {
|
|
@@ -67,7 +61,7 @@ class AzureUrlReader {
|
|
|
67
61
|
const credentials = await this.deps.credentialsProvider.getCredentials({
|
|
68
62
|
url
|
|
69
63
|
});
|
|
70
|
-
const commitsAzureResponse = await
|
|
64
|
+
const commitsAzureResponse = await fetch(integration.getAzureCommitsUrl(url), {
|
|
71
65
|
headers: credentials?.headers
|
|
72
66
|
});
|
|
73
67
|
if (!commitsAzureResponse.ok) {
|
|
@@ -81,7 +75,7 @@ class AzureUrlReader {
|
|
|
81
75
|
if (etag && etag === commitSha) {
|
|
82
76
|
throw new errors.NotModifiedError();
|
|
83
77
|
}
|
|
84
|
-
const archiveAzureResponse = await
|
|
78
|
+
const archiveAzureResponse = await fetch(integration.getAzureDownloadUrl(url), {
|
|
85
79
|
headers: {
|
|
86
80
|
...credentials?.headers,
|
|
87
81
|
Accept: "application/zip"
|
|
@@ -107,7 +101,7 @@ class AzureUrlReader {
|
|
|
107
101
|
subpath = path.split("/").filter(Boolean).slice(-1)[0];
|
|
108
102
|
}
|
|
109
103
|
return await this.deps.treeResponseFactory.fromZipArchive({
|
|
110
|
-
|
|
104
|
+
response: archiveAzureResponse,
|
|
111
105
|
etag: commitSha,
|
|
112
106
|
filter,
|
|
113
107
|
subpath
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AzureUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/AzureUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n getAzureCommitsUrl,\n getAzureDownloadUrl,\n getAzureFileFetchUrl,\n AzureDevOpsCredentialsProvider,\n DefaultAzureDevOpsCredentialsProvider,\n ScmIntegrations,\n AzureIntegration,\n} from '@backstage/integration';\nimport fetch, { Response } from 'node-fetch';\nimport { Minimatch } from 'minimatch';\nimport { Readable } from 'stream';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport { ReadTreeResponseFactory, ReaderFactory } from './types';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\n\n/**\n * Implements a {@link @backstage/backend-plugin-api#UrlReaderService} for Azure repos.\n *\n * @public\n */\nexport class AzureUrlReader implements UrlReaderService {\n static factory: ReaderFactory = ({ config, treeResponseFactory }) => {\n const integrations = ScmIntegrations.fromConfig(config);\n const credentialProvider =\n DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations);\n return integrations.azure.list().map(integration => {\n const reader = new AzureUrlReader(integration, {\n treeResponseFactory,\n credentialsProvider: credentialProvider,\n });\n const predicate = (url: URL) => url.host === integration.config.host;\n return { reader, predicate };\n });\n };\n\n constructor(\n private readonly integration: AzureIntegration,\n private readonly deps: {\n treeResponseFactory: ReadTreeResponseFactory;\n credentialsProvider: AzureDevOpsCredentialsProvider;\n },\n ) {}\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n // TODO: etag is not implemented yet.\n const { signal } = options ?? {};\n\n const builtUrl = getAzureFileFetchUrl(url);\n let response: Response;\n try {\n const credentials = await this.deps.credentialsProvider.getCredentials({\n url: builtUrl,\n });\n response = await fetch(builtUrl, {\n headers: credentials?.headers,\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n // for private repos when PAT is not valid, Azure API returns a http status code 203 with sign in page html\n if (response.ok && response.status !== 203) {\n return ReadUrlResponseFactory.fromNodeJSReadable(response.body);\n }\n\n const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n const { etag, filter, signal } = options ?? {};\n\n // TODO: Support filepath based reading tree feature like other providers\n\n // Get latest commit SHA\n\n const credentials = await this.deps.credentialsProvider.getCredentials({\n url: url,\n });\n\n const commitsAzureResponse = await fetch(getAzureCommitsUrl(url), {\n headers: credentials?.headers,\n });\n if (!commitsAzureResponse.ok) {\n const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`;\n if (commitsAzureResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n const commitSha = (await commitsAzureResponse.json()).value[0].commitId;\n if (etag && etag === commitSha) {\n throw new NotModifiedError();\n }\n\n const archiveAzureResponse = await fetch(getAzureDownloadUrl(url), {\n headers: {\n ...credentials?.headers,\n Accept: 'application/zip',\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can be\n // removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n if (!archiveAzureResponse.ok) {\n const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`;\n if (archiveAzureResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n // When downloading a zip archive from azure on a subpath we get an extra directory\n // layer added at the top. With for example the file /a/b/c.txt and a download of\n // /a/b, we'll see /b/c.txt in the zip archive. This picks out /b so that we can remove it.\n let subpath;\n const path = new URL(url).searchParams.get('path');\n if (path) {\n subpath = path.split('/').filter(Boolean).slice(-1)[0];\n }\n\n return await this.deps.treeResponseFactory.fromZipArchive({\n stream: Readable.from(archiveAzureResponse.body),\n etag: commitSha,\n filter,\n subpath,\n });\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const treeUrl = new URL(url);\n\n const path = treeUrl.searchParams.get('path');\n const matcher = path && new Minimatch(path.replace(/^\\/+/, ''));\n\n // TODO(freben): For now, read the entire repo and filter through that. In\n // a future improvement, we could be smart and try to deduce that non-glob\n // prefixes (like for filepaths such as some-prefix/**/a.yaml) can be used\n // to get just that part of the repo.\n treeUrl.searchParams.delete('path');\n\n const tree = await this.readTree(treeUrl.toString(), {\n etag: options?.etag,\n signal: options?.signal,\n filter: p => (matcher ? matcher.match(p) : true),\n });\n const files = await tree.files();\n\n return {\n etag: tree.etag,\n files: files.map(file => ({\n url: this.integration.resolveUrl({\n url: `/${file.path}`,\n base: url,\n }),\n content: file.content,\n lastModifiedAt: file.lastModifiedAt,\n })),\n };\n }\n\n toString() {\n const { host, credentials } = this.integration.config;\n return `azure{host=${host},authed=${Boolean(\n credentials !== undefined && credentials.length > 0,\n )}}`;\n }\n}\n"],"names":["ScmIntegrations","DefaultAzureDevOpsCredentialsProvider","getAzureFileFetchUrl","fetch","ReadUrlResponseFactory","NotFoundError","getAzureCommitsUrl","NotModifiedError","getAzureDownloadUrl","Readable","Minimatch"],"mappings":";;;;;;;;;;;;;AA8CO,MAAM,cAA2C,CAAA;AAAA,EAetD,WAAA,CACmB,aACA,IAIjB,EAAA;AALiB,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAAA;AAIhB,EApBH,OAAO,OAAyB,GAAA,CAAC,EAAE,MAAA,EAAQ,qBAA0B,KAAA;AACnE,IAAM,MAAA,YAAA,GAAeA,2BAAgB,CAAA,UAAA,CAAW,MAAM,CAAA;AACtD,IAAM,MAAA,kBAAA,GACJC,iDAAsC,CAAA,gBAAA,CAAiB,YAAY,CAAA;AACrE,IAAA,OAAO,YAAa,CAAA,KAAA,CAAM,IAAK,EAAA,CAAE,IAAI,CAAe,WAAA,KAAA;AAClD,MAAM,MAAA,MAAA,GAAS,IAAI,cAAA,CAAe,WAAa,EAAA;AAAA,QAC7C,mBAAA;AAAA,QACA,mBAAqB,EAAA;AAAA,OACtB,CAAA;AACD,MAAA,MAAM,YAAY,CAAC,GAAA,KAAa,GAAI,CAAA,IAAA,KAAS,YAAY,MAAO,CAAA,IAAA;AAChE,MAAO,OAAA,EAAE,QAAQ,SAAU,EAAA;AAAA,KAC5B,CAAA;AAAA,GACH;AAAA,EAUA,MAAM,KAAK,GAA8B,EAAA;AACvC,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAO,EAAA;AAAA;AACzB,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAE1C,IAAA,MAAM,EAAE,MAAA,EAAW,GAAA,OAAA,IAAW,EAAC;AAE/B,IAAM,MAAA,QAAA,GAAWC,iCAAqB,GAAG,CAAA;AACzC,IAAI,IAAA,QAAA;AACJ,IAAI,IAAA;AACF,MAAA,MAAM,WAAc,GAAA,MAAM,IAAK,CAAA,IAAA,CAAK,oBAAoB,cAAe,CAAA;AAAA,QACrE,GAAK,EAAA;AAAA,OACN,CAAA;AACD,MAAW,QAAA,GAAA,MAAMC,uBAAM,QAAU,EAAA;AAAA,QAC/B,SAAS,WAAa,EAAA,OAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOtB,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,OACvC,CAAA;AAAA,aACM,CAAG,EAAA;AACV,MAAA,MAAM,IAAI,KAAM,CAAA,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAE,CAAA,CAAA;AAAA;AAI/C,IAAA,IAAI,QAAS,CAAA,EAAA,IAAM,QAAS,CAAA,MAAA,KAAW,GAAK,EAAA;AAC1C,MAAO,OAAAC,6CAAA,CAAuB,kBAAmB,CAAA,QAAA,CAAS,IAAI,CAAA;AAAA;AAGhE,IAAM,MAAA,OAAA,GAAU,CAAG,EAAA,GAAG,CAAyB,sBAAA,EAAA,QAAQ,KAAK,QAAS,CAAA,MAAM,CAAI,CAAA,EAAA,QAAA,CAAS,UAAU,CAAA,CAAA;AAClG,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAM,MAAA,IAAIC,qBAAc,OAAO,CAAA;AAAA;AAEjC,IAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AACzB,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAA,MAAM,EAAE,IAAM,EAAA,MAAA,EAAQ,MAAO,EAAA,GAAI,WAAW,EAAC;AAM7C,IAAA,MAAM,WAAc,GAAA,MAAM,IAAK,CAAA,IAAA,CAAK,oBAAoB,cAAe,CAAA;AAAA,MACrE;AAAA,KACD,CAAA;AAED,IAAA,MAAM,oBAAuB,GAAA,MAAMF,sBAAM,CAAAG,8BAAA,CAAmB,GAAG,CAAG,EAAA;AAAA,MAChE,SAAS,WAAa,EAAA;AAAA,KACvB,CAAA;AACD,IAAI,IAAA,CAAC,qBAAqB,EAAI,EAAA;AAC5B,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,qBAAqB,MAAM,CAAA,CAAA,EAAI,qBAAqB,UAAU,CAAA,CAAA;AAClH,MAAI,IAAA,oBAAA,CAAqB,WAAW,GAAK,EAAA;AACvC,QAAM,MAAA,IAAID,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAA,MAAM,aAAa,MAAM,oBAAA,CAAqB,MAAQ,EAAA,KAAA,CAAM,CAAC,CAAE,CAAA,QAAA;AAC/D,IAAI,IAAA,IAAA,IAAQ,SAAS,SAAW,EAAA;AAC9B,MAAA,MAAM,IAAIE,uBAAiB,EAAA;AAAA;AAG7B,IAAA,MAAM,oBAAuB,GAAA,MAAMJ,sBAAM,CAAAK,+BAAA,CAAoB,GAAG,CAAG,EAAA;AAAA,MACjE,OAAS,EAAA;AAAA,QACP,GAAG,WAAa,EAAA,OAAA;AAAA,QAChB,MAAQ,EAAA;AAAA,OACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,KACvC,CAAA;AACD,IAAI,IAAA,CAAC,qBAAqB,EAAI,EAAA;AAC5B,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,qBAAqB,MAAM,CAAA,CAAA,EAAI,qBAAqB,UAAU,CAAA,CAAA;AAClH,MAAI,IAAA,oBAAA,CAAqB,WAAW,GAAK,EAAA;AACvC,QAAM,MAAA,IAAIH,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAMzB,IAAI,IAAA,OAAA;AACJ,IAAA,MAAM,OAAO,IAAI,GAAA,CAAI,GAAG,CAAE,CAAA,YAAA,CAAa,IAAI,MAAM,CAAA;AACjD,IAAA,IAAI,IAAM,EAAA;AACR,MAAU,OAAA,GAAA,IAAA,CAAK,KAAM,CAAA,GAAG,CAAE,CAAA,MAAA,CAAO,OAAO,CAAE,CAAA,KAAA,CAAM,CAAE,CAAA,CAAA,CAAE,CAAC,CAAA;AAAA;AAGvD,IAAA,OAAO,MAAM,IAAA,CAAK,IAAK,CAAA,mBAAA,CAAoB,cAAe,CAAA;AAAA,MACxD,MAAQ,EAAAI,eAAA,CAAS,IAAK,CAAA,oBAAA,CAAqB,IAAI,CAAA;AAAA,MAC/C,IAAM,EAAA,SAAA;AAAA,MACN,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA;AACH,EAEA,MAAM,MACJ,CAAA,GAAA,EACA,OACyC,EAAA;AACzC,IAAM,MAAA,OAAA,GAAU,IAAI,GAAA,CAAI,GAAG,CAAA;AAE3B,IAAA,MAAM,IAAO,GAAA,OAAA,CAAQ,YAAa,CAAA,GAAA,CAAI,MAAM,CAAA;AAC5C,IAAM,MAAA,OAAA,GAAU,QAAQ,IAAIC,mBAAA,CAAU,KAAK,OAAQ,CAAA,MAAA,EAAQ,EAAE,CAAC,CAAA;AAM9D,IAAQ,OAAA,CAAA,YAAA,CAAa,OAAO,MAAM,CAAA;AAElC,IAAA,MAAM,OAAO,MAAM,IAAA,CAAK,QAAS,CAAA,OAAA,CAAQ,UAAY,EAAA;AAAA,MACnD,MAAM,OAAS,EAAA,IAAA;AAAA,MACf,QAAQ,OAAS,EAAA,MAAA;AAAA,MACjB,QAAQ,CAAM,CAAA,KAAA,OAAA,GAAU,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAI,GAAA;AAAA,KAC5C,CAAA;AACD,IAAM,MAAA,KAAA,GAAQ,MAAM,IAAA,CAAK,KAAM,EAAA;AAE/B,IAAO,OAAA;AAAA,MACL,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,KAAA,EAAO,KAAM,CAAA,GAAA,CAAI,CAAS,IAAA,MAAA;AAAA,QACxB,GAAA,EAAK,IAAK,CAAA,WAAA,CAAY,UAAW,CAAA;AAAA,UAC/B,GAAA,EAAK,CAAI,CAAA,EAAA,IAAA,CAAK,IAAI,CAAA,CAAA;AAAA,UAClB,IAAM,EAAA;AAAA,SACP,CAAA;AAAA,QACD,SAAS,IAAK,CAAA,OAAA;AAAA,QACd,gBAAgB,IAAK,CAAA;AAAA,OACrB,CAAA;AAAA,KACJ;AAAA;AACF,EAEA,QAAW,GAAA;AACT,IAAA,MAAM,EAAE,IAAA,EAAM,WAAY,EAAA,GAAI,KAAK,WAAY,CAAA,MAAA;AAC/C,IAAO,OAAA,CAAA,WAAA,EAAc,IAAI,CAAW,QAAA,EAAA,OAAA;AAAA,MAClC,WAAA,KAAgB,KAAa,CAAA,IAAA,WAAA,CAAY,MAAS,GAAA;AAAA,KACnD,CAAA,CAAA,CAAA;AAAA;AAEL;;;;"}
|
|
1
|
+
{"version":3,"file":"AzureUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/AzureUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n getAzureCommitsUrl,\n getAzureDownloadUrl,\n getAzureFileFetchUrl,\n AzureDevOpsCredentialsProvider,\n DefaultAzureDevOpsCredentialsProvider,\n ScmIntegrations,\n AzureIntegration,\n} from '@backstage/integration';\nimport { Minimatch } from 'minimatch';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport { ReadTreeResponseFactory, ReaderFactory } from './types';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\n\n/**\n * Implements a {@link @backstage/backend-plugin-api#UrlReaderService} for Azure repos.\n *\n * @public\n */\nexport class AzureUrlReader implements UrlReaderService {\n static factory: ReaderFactory = ({ config, treeResponseFactory }) => {\n const integrations = ScmIntegrations.fromConfig(config);\n const credentialProvider =\n DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations);\n return integrations.azure.list().map(integration => {\n const reader = new AzureUrlReader(integration, {\n treeResponseFactory,\n credentialsProvider: credentialProvider,\n });\n const predicate = (url: URL) => url.host === integration.config.host;\n return { reader, predicate };\n });\n };\n\n constructor(\n private readonly integration: AzureIntegration,\n private readonly deps: {\n treeResponseFactory: ReadTreeResponseFactory;\n credentialsProvider: AzureDevOpsCredentialsProvider;\n },\n ) {}\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n // TODO: etag is not implemented yet.\n const { signal } = options ?? {};\n\n const builtUrl = getAzureFileFetchUrl(url);\n let response: Response;\n try {\n const credentials = await this.deps.credentialsProvider.getCredentials({\n url: builtUrl,\n });\n response = await fetch(builtUrl, {\n headers: credentials?.headers,\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n // for private repos when PAT is not valid, Azure API returns a http status code 203 with sign in page html\n if (response.ok && response.status !== 203) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n const { etag, filter, signal } = options ?? {};\n\n // TODO: Support filepath based reading tree feature like other providers\n\n // Get latest commit SHA\n\n const credentials = await this.deps.credentialsProvider.getCredentials({\n url: url,\n });\n\n const commitsAzureResponse = await fetch(getAzureCommitsUrl(url), {\n headers: credentials?.headers,\n });\n if (!commitsAzureResponse.ok) {\n const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`;\n if (commitsAzureResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n const commitSha = (await commitsAzureResponse.json()).value[0].commitId;\n if (etag && etag === commitSha) {\n throw new NotModifiedError();\n }\n\n const archiveAzureResponse = await fetch(getAzureDownloadUrl(url), {\n headers: {\n ...credentials?.headers,\n Accept: 'application/zip',\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can be\n // removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n if (!archiveAzureResponse.ok) {\n const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`;\n if (archiveAzureResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n // When downloading a zip archive from azure on a subpath we get an extra directory\n // layer added at the top. With for example the file /a/b/c.txt and a download of\n // /a/b, we'll see /b/c.txt in the zip archive. This picks out /b so that we can remove it.\n let subpath;\n const path = new URL(url).searchParams.get('path');\n if (path) {\n subpath = path.split('/').filter(Boolean).slice(-1)[0];\n }\n\n return await this.deps.treeResponseFactory.fromZipArchive({\n response: archiveAzureResponse,\n etag: commitSha,\n filter,\n subpath,\n });\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const treeUrl = new URL(url);\n\n const path = treeUrl.searchParams.get('path');\n const matcher = path && new Minimatch(path.replace(/^\\/+/, ''));\n\n // TODO(freben): For now, read the entire repo and filter through that. In\n // a future improvement, we could be smart and try to deduce that non-glob\n // prefixes (like for filepaths such as some-prefix/**/a.yaml) can be used\n // to get just that part of the repo.\n treeUrl.searchParams.delete('path');\n\n const tree = await this.readTree(treeUrl.toString(), {\n etag: options?.etag,\n signal: options?.signal,\n filter: p => (matcher ? matcher.match(p) : true),\n });\n const files = await tree.files();\n\n return {\n etag: tree.etag,\n files: files.map(file => ({\n url: this.integration.resolveUrl({\n url: `/${file.path}`,\n base: url,\n }),\n content: file.content,\n lastModifiedAt: file.lastModifiedAt,\n })),\n };\n }\n\n toString() {\n const { host, credentials } = this.integration.config;\n return `azure{host=${host},authed=${Boolean(\n credentials !== undefined && credentials.length > 0,\n )}}`;\n }\n}\n"],"names":["ScmIntegrations","DefaultAzureDevOpsCredentialsProvider","getAzureFileFetchUrl","ReadUrlResponseFactory","NotFoundError","getAzureCommitsUrl","NotModifiedError","getAzureDownloadUrl","Minimatch"],"mappings":";;;;;;;AA4CO,MAAM,cAA2C,CAAA;AAAA,EAetD,WAAA,CACmB,aACA,IAIjB,EAAA;AALiB,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAAA;AAIhB,EApBH,OAAO,OAAyB,GAAA,CAAC,EAAE,MAAA,EAAQ,qBAA0B,KAAA;AACnE,IAAM,MAAA,YAAA,GAAeA,2BAAgB,CAAA,UAAA,CAAW,MAAM,CAAA;AACtD,IAAM,MAAA,kBAAA,GACJC,iDAAsC,CAAA,gBAAA,CAAiB,YAAY,CAAA;AACrE,IAAA,OAAO,YAAa,CAAA,KAAA,CAAM,IAAK,EAAA,CAAE,IAAI,CAAe,WAAA,KAAA;AAClD,MAAM,MAAA,MAAA,GAAS,IAAI,cAAA,CAAe,WAAa,EAAA;AAAA,QAC7C,mBAAA;AAAA,QACA,mBAAqB,EAAA;AAAA,OACtB,CAAA;AACD,MAAA,MAAM,YAAY,CAAC,GAAA,KAAa,GAAI,CAAA,IAAA,KAAS,YAAY,MAAO,CAAA,IAAA;AAChE,MAAO,OAAA,EAAE,QAAQ,SAAU,EAAA;AAAA,KAC5B,CAAA;AAAA,GACH;AAAA,EAUA,MAAM,KAAK,GAA8B,EAAA;AACvC,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAO,EAAA;AAAA;AACzB,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAE1C,IAAA,MAAM,EAAE,MAAA,EAAW,GAAA,OAAA,IAAW,EAAC;AAE/B,IAAM,MAAA,QAAA,GAAWC,iCAAqB,GAAG,CAAA;AACzC,IAAI,IAAA,QAAA;AACJ,IAAI,IAAA;AACF,MAAA,MAAM,WAAc,GAAA,MAAM,IAAK,CAAA,IAAA,CAAK,oBAAoB,cAAe,CAAA;AAAA,QACrE,GAAK,EAAA;AAAA,OACN,CAAA;AACD,MAAW,QAAA,GAAA,MAAM,MAAM,QAAU,EAAA;AAAA,QAC/B,SAAS,WAAa,EAAA,OAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOtB,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,OACvC,CAAA;AAAA,aACM,CAAG,EAAA;AACV,MAAA,MAAM,IAAI,KAAM,CAAA,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAE,CAAA,CAAA;AAAA;AAI/C,IAAA,IAAI,QAAS,CAAA,EAAA,IAAM,QAAS,CAAA,MAAA,KAAW,GAAK,EAAA;AAC1C,MAAO,OAAAC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA;AAGrD,IAAM,MAAA,OAAA,GAAU,CAAG,EAAA,GAAG,CAAyB,sBAAA,EAAA,QAAQ,KAAK,QAAS,CAAA,MAAM,CAAI,CAAA,EAAA,QAAA,CAAS,UAAU,CAAA,CAAA;AAClG,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAM,MAAA,IAAIC,qBAAc,OAAO,CAAA;AAAA;AAEjC,IAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AACzB,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAA,MAAM,EAAE,IAAM,EAAA,MAAA,EAAQ,MAAO,EAAA,GAAI,WAAW,EAAC;AAM7C,IAAA,MAAM,WAAc,GAAA,MAAM,IAAK,CAAA,IAAA,CAAK,oBAAoB,cAAe,CAAA;AAAA,MACrE;AAAA,KACD,CAAA;AAED,IAAA,MAAM,oBAAuB,GAAA,MAAM,KAAM,CAAAC,8BAAA,CAAmB,GAAG,CAAG,EAAA;AAAA,MAChE,SAAS,WAAa,EAAA;AAAA,KACvB,CAAA;AACD,IAAI,IAAA,CAAC,qBAAqB,EAAI,EAAA;AAC5B,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,qBAAqB,MAAM,CAAA,CAAA,EAAI,qBAAqB,UAAU,CAAA,CAAA;AAClH,MAAI,IAAA,oBAAA,CAAqB,WAAW,GAAK,EAAA;AACvC,QAAM,MAAA,IAAID,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAA,MAAM,aAAa,MAAM,oBAAA,CAAqB,MAAQ,EAAA,KAAA,CAAM,CAAC,CAAE,CAAA,QAAA;AAC/D,IAAI,IAAA,IAAA,IAAQ,SAAS,SAAW,EAAA;AAC9B,MAAA,MAAM,IAAIE,uBAAiB,EAAA;AAAA;AAG7B,IAAA,MAAM,oBAAuB,GAAA,MAAM,KAAM,CAAAC,+BAAA,CAAoB,GAAG,CAAG,EAAA;AAAA,MACjE,OAAS,EAAA;AAAA,QACP,GAAG,WAAa,EAAA,OAAA;AAAA,QAChB,MAAQ,EAAA;AAAA,OACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,KACvC,CAAA;AACD,IAAI,IAAA,CAAC,qBAAqB,EAAI,EAAA;AAC5B,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,qBAAqB,MAAM,CAAA,CAAA,EAAI,qBAAqB,UAAU,CAAA,CAAA;AAClH,MAAI,IAAA,oBAAA,CAAqB,WAAW,GAAK,EAAA;AACvC,QAAM,MAAA,IAAIH,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAMzB,IAAI,IAAA,OAAA;AACJ,IAAA,MAAM,OAAO,IAAI,GAAA,CAAI,GAAG,CAAE,CAAA,YAAA,CAAa,IAAI,MAAM,CAAA;AACjD,IAAA,IAAI,IAAM,EAAA;AACR,MAAU,OAAA,GAAA,IAAA,CAAK,KAAM,CAAA,GAAG,CAAE,CAAA,MAAA,CAAO,OAAO,CAAE,CAAA,KAAA,CAAM,CAAE,CAAA,CAAA,CAAE,CAAC,CAAA;AAAA;AAGvD,IAAA,OAAO,MAAM,IAAA,CAAK,IAAK,CAAA,mBAAA,CAAoB,cAAe,CAAA;AAAA,MACxD,QAAU,EAAA,oBAAA;AAAA,MACV,IAAM,EAAA,SAAA;AAAA,MACN,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA;AACH,EAEA,MAAM,MACJ,CAAA,GAAA,EACA,OACyC,EAAA;AACzC,IAAM,MAAA,OAAA,GAAU,IAAI,GAAA,CAAI,GAAG,CAAA;AAE3B,IAAA,MAAM,IAAO,GAAA,OAAA,CAAQ,YAAa,CAAA,GAAA,CAAI,MAAM,CAAA;AAC5C,IAAM,MAAA,OAAA,GAAU,QAAQ,IAAII,mBAAA,CAAU,KAAK,OAAQ,CAAA,MAAA,EAAQ,EAAE,CAAC,CAAA;AAM9D,IAAQ,OAAA,CAAA,YAAA,CAAa,OAAO,MAAM,CAAA;AAElC,IAAA,MAAM,OAAO,MAAM,IAAA,CAAK,QAAS,CAAA,OAAA,CAAQ,UAAY,EAAA;AAAA,MACnD,MAAM,OAAS,EAAA,IAAA;AAAA,MACf,QAAQ,OAAS,EAAA,MAAA;AAAA,MACjB,QAAQ,CAAM,CAAA,KAAA,OAAA,GAAU,OAAQ,CAAA,KAAA,CAAM,CAAC,CAAI,GAAA;AAAA,KAC5C,CAAA;AACD,IAAM,MAAA,KAAA,GAAQ,MAAM,IAAA,CAAK,KAAM,EAAA;AAE/B,IAAO,OAAA;AAAA,MACL,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,KAAA,EAAO,KAAM,CAAA,GAAA,CAAI,CAAS,IAAA,MAAA;AAAA,QACxB,GAAA,EAAK,IAAK,CAAA,WAAA,CAAY,UAAW,CAAA;AAAA,UAC/B,GAAA,EAAK,CAAI,CAAA,EAAA,IAAA,CAAK,IAAI,CAAA,CAAA;AAAA,UAClB,IAAM,EAAA;AAAA,SACP,CAAA;AAAA,QACD,SAAS,IAAK,CAAA,OAAA;AAAA,QACd,gBAAgB,IAAK,CAAA;AAAA,OACrB,CAAA;AAAA,KACJ;AAAA;AACF,EAEA,QAAW,GAAA;AACT,IAAA,MAAM,EAAE,IAAA,EAAM,WAAY,EAAA,GAAI,KAAK,WAAY,CAAA,MAAA;AAC/C,IAAO,OAAA,CAAA,WAAA,EAAc,IAAI,CAAW,QAAA,EAAA,OAAA;AAAA,MAClC,WAAA,KAAgB,KAAa,CAAA,IAAA,WAAA,CAAY,MAAS,GAAA;AAAA,KACnD,CAAA,CAAA,CAAA;AAAA;AAEL;;;;"}
|
|
@@ -2,17 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
var errors = require('@backstage/errors');
|
|
4
4
|
var integration = require('@backstage/integration');
|
|
5
|
-
var fetch = require('node-fetch');
|
|
6
5
|
var parseGitUrl = require('git-url-parse');
|
|
7
6
|
var lodash = require('lodash');
|
|
8
7
|
var minimatch = require('minimatch');
|
|
9
|
-
var stream = require('stream');
|
|
10
8
|
var ReadUrlResponseFactory = require('./ReadUrlResponseFactory.cjs.js');
|
|
11
|
-
var util = require('./util.cjs.js');
|
|
12
9
|
|
|
13
10
|
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
14
11
|
|
|
15
|
-
var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
|
|
16
12
|
var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
|
|
17
13
|
|
|
18
14
|
class BitbucketCloudUrlReader {
|
|
@@ -51,7 +47,7 @@ class BitbucketCloudUrlReader {
|
|
|
51
47
|
);
|
|
52
48
|
let response;
|
|
53
49
|
try {
|
|
54
|
-
response = await
|
|
50
|
+
response = await fetch(bitbucketUrl.toString(), {
|
|
55
51
|
headers: {
|
|
56
52
|
...requestOptions.headers,
|
|
57
53
|
...etag && { "If-None-Match": etag },
|
|
@@ -74,12 +70,7 @@ class BitbucketCloudUrlReader {
|
|
|
74
70
|
throw new errors.NotModifiedError();
|
|
75
71
|
}
|
|
76
72
|
if (response.ok) {
|
|
77
|
-
return ReadUrlResponseFactory.ReadUrlResponseFactory.
|
|
78
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
79
|
-
lastModifiedAt: util.parseLastModified(
|
|
80
|
-
response.headers.get("Last-Modified")
|
|
81
|
-
)
|
|
82
|
-
});
|
|
73
|
+
return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
|
|
83
74
|
}
|
|
84
75
|
const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
|
|
85
76
|
if (response.status === 404) {
|
|
@@ -97,7 +88,7 @@ class BitbucketCloudUrlReader {
|
|
|
97
88
|
url,
|
|
98
89
|
this.integration.config
|
|
99
90
|
);
|
|
100
|
-
const archiveResponse = await
|
|
91
|
+
const archiveResponse = await fetch(
|
|
101
92
|
downloadUrl,
|
|
102
93
|
integration.getBitbucketCloudRequestOptions(this.integration.config)
|
|
103
94
|
);
|
|
@@ -109,7 +100,7 @@ class BitbucketCloudUrlReader {
|
|
|
109
100
|
throw new Error(message);
|
|
110
101
|
}
|
|
111
102
|
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
112
|
-
|
|
103
|
+
response: archiveResponse,
|
|
113
104
|
subpath: filepath,
|
|
114
105
|
etag: lastCommitShortHash,
|
|
115
106
|
filter: options?.filter
|
|
@@ -151,7 +142,7 @@ class BitbucketCloudUrlReader {
|
|
|
151
142
|
);
|
|
152
143
|
}
|
|
153
144
|
const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;
|
|
154
|
-
const commitsResponse = await
|
|
145
|
+
const commitsResponse = await fetch(
|
|
155
146
|
commitsApiUrl,
|
|
156
147
|
integration.getBitbucketCloudRequestOptions(this.integration.config)
|
|
157
148
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"BitbucketCloudUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/BitbucketCloudUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport {\n BitbucketCloudIntegration,\n getBitbucketCloudDefaultBranch,\n getBitbucketCloudDownloadUrl,\n getBitbucketCloudFileFetchUrl,\n getBitbucketCloudRequestOptions,\n ScmIntegrations,\n} from '@backstage/integration';\nimport fetch, { Response } from 'node-fetch';\nimport parseGitUrl from 'git-url-parse';\nimport { trimEnd } from 'lodash';\nimport { Minimatch } from 'minimatch';\nimport { Readable } from 'stream';\nimport { ReaderFactory, ReadTreeResponseFactory } from './types';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\nimport { parseLastModified } from './util';\n\n/**\n * Implements a {@link @backstage/backend-plugin-api#UrlReaderService} for files from Bitbucket Cloud.\n *\n * @public\n */\nexport class BitbucketCloudUrlReader implements UrlReaderService {\n static factory: ReaderFactory = ({ config, treeResponseFactory }) => {\n const integrations = ScmIntegrations.fromConfig(config);\n return integrations.bitbucketCloud.list().map(integration => {\n const reader = new BitbucketCloudUrlReader(integration, {\n treeResponseFactory,\n });\n const predicate = (url: URL) => url.host === integration.config.host;\n return { reader, predicate };\n });\n };\n\n constructor(\n private readonly integration: BitbucketCloudIntegration,\n private readonly deps: { treeResponseFactory: ReadTreeResponseFactory },\n ) {\n const { host, username, appPassword } = integration.config;\n\n if (username && !appPassword) {\n throw new Error(\n `Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.`,\n );\n }\n }\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n const { etag, lastModifiedAfter, signal } = options ?? {};\n const bitbucketUrl = getBitbucketCloudFileFetchUrl(\n url,\n this.integration.config,\n );\n const requestOptions = getBitbucketCloudRequestOptions(\n this.integration.config,\n );\n\n let response: Response;\n try {\n response = await fetch(bitbucketUrl.toString(), {\n headers: {\n ...requestOptions.headers,\n ...(etag && { 'If-None-Match': etag }),\n ...(lastModifiedAfter && {\n 'If-Modified-Since': lastModifiedAfter.toUTCString(),\n }),\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can be\n // removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {\n etag: response.headers.get('ETag') ?? undefined,\n lastModifiedAt: parseLastModified(\n response.headers.get('Last-Modified'),\n ),\n });\n }\n\n const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n const { filepath } = parseGitUrl(url);\n\n const lastCommitShortHash = await this.getLastCommitShortHash(url);\n if (options?.etag && options.etag === lastCommitShortHash) {\n throw new NotModifiedError();\n }\n\n const downloadUrl = await getBitbucketCloudDownloadUrl(\n url,\n this.integration.config,\n );\n const archiveResponse = await fetch(\n downloadUrl,\n getBitbucketCloudRequestOptions(this.integration.config),\n );\n if (!archiveResponse.ok) {\n const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;\n if (archiveResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n return await this.deps.treeResponseFactory.fromTarArchive({\n stream: Readable.from(archiveResponse.body),\n subpath: filepath,\n etag: lastCommitShortHash,\n filter: options?.filter,\n });\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { filepath } = parseGitUrl(url);\n const matcher = new Minimatch(filepath);\n\n // TODO(freben): For now, read the entire repo and filter through that. In\n // a future improvement, we could be smart and try to deduce that non-glob\n // prefixes (like for filepaths such as some-prefix/**/a.yaml) can be used\n // to get just that part of the repo.\n const treeUrl = trimEnd(url.replace(filepath, ''), '/');\n\n const tree = await this.readTree(treeUrl, {\n etag: options?.etag,\n filter: path => matcher.match(path),\n });\n const files = await tree.files();\n\n return {\n etag: tree.etag,\n files: files.map(file => ({\n url: this.integration.resolveUrl({\n url: `/${file.path}`,\n base: url,\n }),\n content: file.content,\n lastModifiedAt: file.lastModifiedAt,\n })),\n };\n }\n\n toString() {\n const { host, username, appPassword } = this.integration.config;\n const authed = Boolean(username && appPassword);\n return `bitbucketCloud{host=${host},authed=${authed}}`;\n }\n\n private async getLastCommitShortHash(url: string): Promise<string> {\n const { name: repoName, owner: project, ref } = parseGitUrl(url);\n\n let branch = ref;\n if (!branch) {\n branch = await getBitbucketCloudDefaultBranch(\n url,\n this.integration.config,\n );\n }\n\n const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;\n\n const commitsResponse = await fetch(\n commitsApiUrl,\n getBitbucketCloudRequestOptions(this.integration.config),\n );\n if (!commitsResponse.ok) {\n const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;\n if (commitsResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n const commits = await commitsResponse.json();\n if (\n commits &&\n commits.values &&\n commits.values.length > 0 &&\n commits.values[0].hash\n ) {\n return commits.values[0].hash.substring(0, 12);\n }\n\n throw new Error(`Failed to read response from ${commitsApiUrl}`);\n }\n}\n"],"names":["ScmIntegrations","getBitbucketCloudFileFetchUrl","getBitbucketCloudRequestOptions","fetch","NotModifiedError","ReadUrlResponseFactory","parseLastModified","NotFoundError","parseGitUrl","getBitbucketCloudDownloadUrl","Readable","Minimatch","trimEnd","getBitbucketCloudDefaultBranch"],"mappings":";;;;;;;;;;;;;;;;;AAgDO,MAAM,uBAAoD,CAAA;AAAA,EAY/D,WAAA,CACmB,aACA,IACjB,EAAA;AAFiB,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAEjB,IAAA,MAAM,EAAE,IAAA,EAAM,QAAU,EAAA,WAAA,KAAgB,WAAY,CAAA,MAAA;AAEpD,IAAI,IAAA,QAAA,IAAY,CAAC,WAAa,EAAA;AAC5B,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,oCAAoC,IAAI,CAAA,kEAAA;AAAA,OAC1C;AAAA;AACF;AACF,EAtBA,OAAO,OAAyB,GAAA,CAAC,EAAE,MAAA,EAAQ,qBAA0B,KAAA;AACnE,IAAM,MAAA,YAAA,GAAeA,2BAAgB,CAAA,UAAA,CAAW,MAAM,CAAA;AACtD,IAAA,OAAO,YAAa,CAAA,cAAA,CAAe,IAAK,EAAA,CAAE,IAAI,CAAe,WAAA,KAAA;AAC3D,MAAM,MAAA,MAAA,GAAS,IAAI,uBAAA,CAAwB,WAAa,EAAA;AAAA,QACtD;AAAA,OACD,CAAA;AACD,MAAA,MAAM,YAAY,CAAC,GAAA,KAAa,GAAI,CAAA,IAAA,KAAS,YAAY,MAAO,CAAA,IAAA;AAChE,MAAO,OAAA,EAAE,QAAQ,SAAU,EAAA;AAAA,KAC5B,CAAA;AAAA,GACH;AAAA,EAeA,MAAM,KAAK,GAA8B,EAAA;AACvC,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAO,EAAA;AAAA;AACzB,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAC1C,IAAA,MAAM,EAAE,IAAM,EAAA,iBAAA,EAAmB,MAAO,EAAA,GAAI,WAAW,EAAC;AACxD,IAAA,MAAM,YAAe,GAAAC,yCAAA;AAAA,MACnB,GAAA;AAAA,MACA,KAAK,WAAY,CAAA;AAAA,KACnB;AACA,IAAA,MAAM,cAAiB,GAAAC,2CAAA;AAAA,MACrB,KAAK,WAAY,CAAA;AAAA,KACnB;AAEA,IAAI,IAAA,QAAA;AACJ,IAAI,IAAA;AACF,MAAA,QAAA,GAAW,MAAMC,sBAAA,CAAM,YAAa,CAAA,QAAA,EAAY,EAAA;AAAA,QAC9C,OAAS,EAAA;AAAA,UACP,GAAG,cAAe,CAAA,OAAA;AAAA,UAClB,GAAI,IAAA,IAAQ,EAAE,eAAA,EAAiB,IAAK,EAAA;AAAA,UACpC,GAAI,iBAAqB,IAAA;AAAA,YACvB,mBAAA,EAAqB,kBAAkB,WAAY;AAAA;AACrD,SACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOA,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,OACvC,CAAA;AAAA,aACM,CAAG,EAAA;AACV,MAAA,MAAM,IAAI,KAAM,CAAA,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAE,CAAA,CAAA;AAAA;AAG/C,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAA,MAAM,IAAIC,uBAAiB,EAAA;AAAA;AAG7B,IAAA,IAAI,SAAS,EAAI,EAAA;AACf,MAAO,OAAAC,6CAAA,CAAuB,kBAAmB,CAAA,QAAA,CAAS,IAAM,EAAA;AAAA,QAC9D,IAAM,EAAA,QAAA,CAAS,OAAQ,CAAA,GAAA,CAAI,MAAM,CAAK,IAAA,KAAA,CAAA;AAAA,QACtC,cAAgB,EAAAC,sBAAA;AAAA,UACd,QAAA,CAAS,OAAQ,CAAA,GAAA,CAAI,eAAe;AAAA;AACtC,OACD,CAAA;AAAA;AAGH,IAAM,MAAA,OAAA,GAAU,CAAG,EAAA,GAAG,CAAyB,sBAAA,EAAA,YAAY,KAAK,QAAS,CAAA,MAAM,CAAI,CAAA,EAAA,QAAA,CAAS,UAAU,CAAA,CAAA;AACtG,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAM,MAAA,IAAIC,qBAAc,OAAO,CAAA;AAAA;AAEjC,IAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AACzB,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAA,MAAM,EAAE,QAAA,EAAa,GAAAC,4BAAA,CAAY,GAAG,CAAA;AAEpC,IAAA,MAAM,mBAAsB,GAAA,MAAM,IAAK,CAAA,sBAAA,CAAuB,GAAG,CAAA;AACjE,IAAA,IAAI,OAAS,EAAA,IAAA,IAAQ,OAAQ,CAAA,IAAA,KAAS,mBAAqB,EAAA;AACzD,MAAA,MAAM,IAAIJ,uBAAiB,EAAA;AAAA;AAG7B,IAAA,MAAM,cAAc,MAAMK,wCAAA;AAAA,MACxB,GAAA;AAAA,MACA,KAAK,WAAY,CAAA;AAAA,KACnB;AACA,IAAA,MAAM,kBAAkB,MAAMN,sBAAA;AAAA,MAC5B,WAAA;AAAA,MACAD,2CAAA,CAAgC,IAAK,CAAA,WAAA,CAAY,MAAM;AAAA,KACzD;AACA,IAAI,IAAA,CAAC,gBAAgB,EAAI,EAAA;AACvB,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,gBAAgB,MAAM,CAAA,CAAA,EAAI,gBAAgB,UAAU,CAAA,CAAA;AACxG,MAAI,IAAA,eAAA,CAAgB,WAAW,GAAK,EAAA;AAClC,QAAM,MAAA,IAAIK,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAA,OAAO,MAAM,IAAA,CAAK,IAAK,CAAA,mBAAA,CAAoB,cAAe,CAAA;AAAA,MACxD,MAAQ,EAAAG,eAAA,CAAS,IAAK,CAAA,eAAA,CAAgB,IAAI,CAAA;AAAA,MAC1C,OAAS,EAAA,QAAA;AAAA,MACT,IAAM,EAAA,mBAAA;AAAA,MACN,QAAQ,OAAS,EAAA;AAAA,KAClB,CAAA;AAAA;AACH,EAEA,MAAM,MACJ,CAAA,GAAA,EACA,OACyC,EAAA;AACzC,IAAA,MAAM,EAAE,QAAA,EAAa,GAAAF,4BAAA,CAAY,GAAG,CAAA;AACpC,IAAM,MAAA,OAAA,GAAU,IAAIG,mBAAA,CAAU,QAAQ,CAAA;AAMtC,IAAA,MAAM,UAAUC,cAAQ,CAAA,GAAA,CAAI,QAAQ,QAAU,EAAA,EAAE,GAAG,GAAG,CAAA;AAEtD,IAAA,MAAM,IAAO,GAAA,MAAM,IAAK,CAAA,QAAA,CAAS,OAAS,EAAA;AAAA,MACxC,MAAM,OAAS,EAAA,IAAA;AAAA,MACf,MAAQ,EAAA,CAAA,IAAA,KAAQ,OAAQ,CAAA,KAAA,CAAM,IAAI;AAAA,KACnC,CAAA;AACD,IAAM,MAAA,KAAA,GAAQ,MAAM,IAAA,CAAK,KAAM,EAAA;AAE/B,IAAO,OAAA;AAAA,MACL,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,KAAA,EAAO,KAAM,CAAA,GAAA,CAAI,CAAS,IAAA,MAAA;AAAA,QACxB,GAAA,EAAK,IAAK,CAAA,WAAA,CAAY,UAAW,CAAA;AAAA,UAC/B,GAAA,EAAK,CAAI,CAAA,EAAA,IAAA,CAAK,IAAI,CAAA,CAAA;AAAA,UAClB,IAAM,EAAA;AAAA,SACP,CAAA;AAAA,QACD,SAAS,IAAK,CAAA,OAAA;AAAA,QACd,gBAAgB,IAAK,CAAA;AAAA,OACrB,CAAA;AAAA,KACJ;AAAA;AACF,EAEA,QAAW,GAAA;AACT,IAAA,MAAM,EAAE,IAAM,EAAA,QAAA,EAAU,WAAY,EAAA,GAAI,KAAK,WAAY,CAAA,MAAA;AACzD,IAAM,MAAA,MAAA,GAAS,OAAQ,CAAA,QAAA,IAAY,WAAW,CAAA;AAC9C,IAAO,OAAA,CAAA,oBAAA,EAAuB,IAAI,CAAA,QAAA,EAAW,MAAM,CAAA,CAAA,CAAA;AAAA;AACrD,EAEA,MAAc,uBAAuB,GAA8B,EAAA;AACjE,IAAM,MAAA,EAAE,MAAM,QAAU,EAAA,KAAA,EAAO,SAAS,GAAI,EAAA,GAAIJ,6BAAY,GAAG,CAAA;AAE/D,IAAA,IAAI,MAAS,GAAA,GAAA;AACb,IAAA,IAAI,CAAC,MAAQ,EAAA;AACX,MAAA,MAAA,GAAS,MAAMK,0CAAA;AAAA,QACb,GAAA;AAAA,QACA,KAAK,WAAY,CAAA;AAAA,OACnB;AAAA;AAGF,IAAM,MAAA,aAAA,GAAgB,CAAG,EAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,UAAU,CAAA,cAAA,EAAiB,OAAO,CAAA,CAAA,EAAI,QAAQ,CAAA,SAAA,EAAY,MAAM,CAAA,CAAA;AAEjH,IAAA,MAAM,kBAAkB,MAAMV,sBAAA;AAAA,MAC5B,aAAA;AAAA,MACAD,2CAAA,CAAgC,IAAK,CAAA,WAAA,CAAY,MAAM;AAAA,KACzD;AACA,IAAI,IAAA,CAAC,gBAAgB,EAAI,EAAA;AACvB,MAAM,MAAA,OAAA,GAAU,mCAAmC,aAAa,CAAA,EAAA,EAAK,gBAAgB,MAAM,CAAA,CAAA,EAAI,gBAAgB,UAAU,CAAA,CAAA;AACzH,MAAI,IAAA,eAAA,CAAgB,WAAW,GAAK,EAAA;AAClC,QAAM,MAAA,IAAIK,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAM,MAAA,OAAA,GAAU,MAAM,eAAA,CAAgB,IAAK,EAAA;AAC3C,IACE,IAAA,OAAA,IACA,OAAQ,CAAA,MAAA,IACR,OAAQ,CAAA,MAAA,CAAO,MAAS,GAAA,CAAA,IACxB,OAAQ,CAAA,MAAA,CAAO,CAAC,CAAA,CAAE,IAClB,EAAA;AACA,MAAA,OAAO,QAAQ,MAAO,CAAA,CAAC,EAAE,IAAK,CAAA,SAAA,CAAU,GAAG,EAAE,CAAA;AAAA;AAG/C,IAAA,MAAM,IAAI,KAAA,CAAM,CAAgC,6BAAA,EAAA,aAAa,CAAE,CAAA,CAAA;AAAA;AAEnE;;;;"}
|
|
1
|
+
{"version":3,"file":"BitbucketCloudUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/BitbucketCloudUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport {\n BitbucketCloudIntegration,\n getBitbucketCloudDefaultBranch,\n getBitbucketCloudDownloadUrl,\n getBitbucketCloudFileFetchUrl,\n getBitbucketCloudRequestOptions,\n ScmIntegrations,\n} from '@backstage/integration';\nimport parseGitUrl from 'git-url-parse';\nimport { trimEnd } from 'lodash';\nimport { Minimatch } from 'minimatch';\nimport { ReaderFactory, ReadTreeResponseFactory } from './types';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\n\n/**\n * Implements a {@link @backstage/backend-plugin-api#UrlReaderService} for files from Bitbucket Cloud.\n *\n * @public\n */\nexport class BitbucketCloudUrlReader implements UrlReaderService {\n static factory: ReaderFactory = ({ config, treeResponseFactory }) => {\n const integrations = ScmIntegrations.fromConfig(config);\n return integrations.bitbucketCloud.list().map(integration => {\n const reader = new BitbucketCloudUrlReader(integration, {\n treeResponseFactory,\n });\n const predicate = (url: URL) => url.host === integration.config.host;\n return { reader, predicate };\n });\n };\n\n constructor(\n private readonly integration: BitbucketCloudIntegration,\n private readonly deps: { treeResponseFactory: ReadTreeResponseFactory },\n ) {\n const { host, username, appPassword } = integration.config;\n\n if (username && !appPassword) {\n throw new Error(\n `Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.`,\n );\n }\n }\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n const { etag, lastModifiedAfter, signal } = options ?? {};\n const bitbucketUrl = getBitbucketCloudFileFetchUrl(\n url,\n this.integration.config,\n );\n const requestOptions = getBitbucketCloudRequestOptions(\n this.integration.config,\n );\n\n let response: Response;\n try {\n response = await fetch(bitbucketUrl.toString(), {\n headers: {\n ...requestOptions.headers,\n ...(etag && { 'If-None-Match': etag }),\n ...(lastModifiedAfter && {\n 'If-Modified-Since': lastModifiedAfter.toUTCString(),\n }),\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can be\n // removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n ...(signal && { signal: signal as any }),\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n const { filepath } = parseGitUrl(url);\n\n const lastCommitShortHash = await this.getLastCommitShortHash(url);\n if (options?.etag && options.etag === lastCommitShortHash) {\n throw new NotModifiedError();\n }\n\n const downloadUrl = await getBitbucketCloudDownloadUrl(\n url,\n this.integration.config,\n );\n const archiveResponse = await fetch(\n downloadUrl,\n getBitbucketCloudRequestOptions(this.integration.config),\n );\n if (!archiveResponse.ok) {\n const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;\n if (archiveResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n return await this.deps.treeResponseFactory.fromTarArchive({\n response: archiveResponse,\n subpath: filepath,\n etag: lastCommitShortHash,\n filter: options?.filter,\n });\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { filepath } = parseGitUrl(url);\n const matcher = new Minimatch(filepath);\n\n // TODO(freben): For now, read the entire repo and filter through that. In\n // a future improvement, we could be smart and try to deduce that non-glob\n // prefixes (like for filepaths such as some-prefix/**/a.yaml) can be used\n // to get just that part of the repo.\n const treeUrl = trimEnd(url.replace(filepath, ''), '/');\n\n const tree = await this.readTree(treeUrl, {\n etag: options?.etag,\n filter: path => matcher.match(path),\n });\n const files = await tree.files();\n\n return {\n etag: tree.etag,\n files: files.map(file => ({\n url: this.integration.resolveUrl({\n url: `/${file.path}`,\n base: url,\n }),\n content: file.content,\n lastModifiedAt: file.lastModifiedAt,\n })),\n };\n }\n\n toString() {\n const { host, username, appPassword } = this.integration.config;\n const authed = Boolean(username && appPassword);\n return `bitbucketCloud{host=${host},authed=${authed}}`;\n }\n\n private async getLastCommitShortHash(url: string): Promise<string> {\n const { name: repoName, owner: project, ref } = parseGitUrl(url);\n\n let branch = ref;\n if (!branch) {\n branch = await getBitbucketCloudDefaultBranch(\n url,\n this.integration.config,\n );\n }\n\n const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;\n\n const commitsResponse = await fetch(\n commitsApiUrl,\n getBitbucketCloudRequestOptions(this.integration.config),\n );\n if (!commitsResponse.ok) {\n const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;\n if (commitsResponse.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n const commits = await commitsResponse.json();\n if (\n commits &&\n commits.values &&\n commits.values.length > 0 &&\n commits.values[0].hash\n ) {\n return commits.values[0].hash.substring(0, 12);\n }\n\n throw new Error(`Failed to read response from ${commitsApiUrl}`);\n }\n}\n"],"names":["ScmIntegrations","getBitbucketCloudFileFetchUrl","getBitbucketCloudRequestOptions","NotModifiedError","ReadUrlResponseFactory","NotFoundError","parseGitUrl","getBitbucketCloudDownloadUrl","Minimatch","trimEnd","getBitbucketCloudDefaultBranch"],"mappings":";;;;;;;;;;;;;AA6CO,MAAM,uBAAoD,CAAA;AAAA,EAY/D,WAAA,CACmB,aACA,IACjB,EAAA;AAFiB,IAAA,IAAA,CAAA,WAAA,GAAA,WAAA;AACA,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAEjB,IAAA,MAAM,EAAE,IAAA,EAAM,QAAU,EAAA,WAAA,KAAgB,WAAY,CAAA,MAAA;AAEpD,IAAI,IAAA,QAAA,IAAY,CAAC,WAAa,EAAA;AAC5B,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,oCAAoC,IAAI,CAAA,kEAAA;AAAA,OAC1C;AAAA;AACF;AACF,EAtBA,OAAO,OAAyB,GAAA,CAAC,EAAE,MAAA,EAAQ,qBAA0B,KAAA;AACnE,IAAM,MAAA,YAAA,GAAeA,2BAAgB,CAAA,UAAA,CAAW,MAAM,CAAA;AACtD,IAAA,OAAO,YAAa,CAAA,cAAA,CAAe,IAAK,EAAA,CAAE,IAAI,CAAe,WAAA,KAAA;AAC3D,MAAM,MAAA,MAAA,GAAS,IAAI,uBAAA,CAAwB,WAAa,EAAA;AAAA,QACtD;AAAA,OACD,CAAA;AACD,MAAA,MAAM,YAAY,CAAC,GAAA,KAAa,GAAI,CAAA,IAAA,KAAS,YAAY,MAAO,CAAA,IAAA;AAChE,MAAO,OAAA,EAAE,QAAQ,SAAU,EAAA;AAAA,KAC5B,CAAA;AAAA,GACH;AAAA,EAeA,MAAM,KAAK,GAA8B,EAAA;AACvC,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAO,EAAA;AAAA;AACzB,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAC1C,IAAA,MAAM,EAAE,IAAM,EAAA,iBAAA,EAAmB,MAAO,EAAA,GAAI,WAAW,EAAC;AACxD,IAAA,MAAM,YAAe,GAAAC,yCAAA;AAAA,MACnB,GAAA;AAAA,MACA,KAAK,WAAY,CAAA;AAAA,KACnB;AACA,IAAA,MAAM,cAAiB,GAAAC,2CAAA;AAAA,MACrB,KAAK,WAAY,CAAA;AAAA,KACnB;AAEA,IAAI,IAAA,QAAA;AACJ,IAAI,IAAA;AACF,MAAA,QAAA,GAAW,MAAM,KAAA,CAAM,YAAa,CAAA,QAAA,EAAY,EAAA;AAAA,QAC9C,OAAS,EAAA;AAAA,UACP,GAAG,cAAe,CAAA,OAAA;AAAA,UAClB,GAAI,IAAA,IAAQ,EAAE,eAAA,EAAiB,IAAK,EAAA;AAAA,UACpC,GAAI,iBAAqB,IAAA;AAAA,YACvB,mBAAA,EAAqB,kBAAkB,WAAY;AAAA;AACrD,SACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOA,GAAI,MAAU,IAAA,EAAE,MAAsB;AAAA,OACvC,CAAA;AAAA,aACM,CAAG,EAAA;AACV,MAAA,MAAM,IAAI,KAAM,CAAA,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAE,CAAA,CAAA;AAAA;AAG/C,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAA,MAAM,IAAIC,uBAAiB,EAAA;AAAA;AAG7B,IAAA,IAAI,SAAS,EAAI,EAAA;AACf,MAAO,OAAAC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA;AAGrD,IAAM,MAAA,OAAA,GAAU,CAAG,EAAA,GAAG,CAAyB,sBAAA,EAAA,YAAY,KAAK,QAAS,CAAA,MAAM,CAAI,CAAA,EAAA,QAAA,CAAS,UAAU,CAAA,CAAA;AACtG,IAAI,IAAA,QAAA,CAAS,WAAW,GAAK,EAAA;AAC3B,MAAM,MAAA,IAAIC,qBAAc,OAAO,CAAA;AAAA;AAEjC,IAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AACzB,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAA,MAAM,EAAE,QAAA,EAAa,GAAAC,4BAAA,CAAY,GAAG,CAAA;AAEpC,IAAA,MAAM,mBAAsB,GAAA,MAAM,IAAK,CAAA,sBAAA,CAAuB,GAAG,CAAA;AACjE,IAAA,IAAI,OAAS,EAAA,IAAA,IAAQ,OAAQ,CAAA,IAAA,KAAS,mBAAqB,EAAA;AACzD,MAAA,MAAM,IAAIH,uBAAiB,EAAA;AAAA;AAG7B,IAAA,MAAM,cAAc,MAAMI,wCAAA;AAAA,MACxB,GAAA;AAAA,MACA,KAAK,WAAY,CAAA;AAAA,KACnB;AACA,IAAA,MAAM,kBAAkB,MAAM,KAAA;AAAA,MAC5B,WAAA;AAAA,MACAL,2CAAA,CAAgC,IAAK,CAAA,WAAA,CAAY,MAAM;AAAA,KACzD;AACA,IAAI,IAAA,CAAC,gBAAgB,EAAI,EAAA;AACvB,MAAM,MAAA,OAAA,GAAU,4BAA4B,GAAG,CAAA,EAAA,EAAK,gBAAgB,MAAM,CAAA,CAAA,EAAI,gBAAgB,UAAU,CAAA,CAAA;AACxG,MAAI,IAAA,eAAA,CAAgB,WAAW,GAAK,EAAA;AAClC,QAAM,MAAA,IAAIG,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAA,OAAO,MAAM,IAAA,CAAK,IAAK,CAAA,mBAAA,CAAoB,cAAe,CAAA;AAAA,MACxD,QAAU,EAAA,eAAA;AAAA,MACV,OAAS,EAAA,QAAA;AAAA,MACT,IAAM,EAAA,mBAAA;AAAA,MACN,QAAQ,OAAS,EAAA;AAAA,KAClB,CAAA;AAAA;AACH,EAEA,MAAM,MACJ,CAAA,GAAA,EACA,OACyC,EAAA;AACzC,IAAA,MAAM,EAAE,QAAA,EAAa,GAAAC,4BAAA,CAAY,GAAG,CAAA;AACpC,IAAM,MAAA,OAAA,GAAU,IAAIE,mBAAA,CAAU,QAAQ,CAAA;AAMtC,IAAA,MAAM,UAAUC,cAAQ,CAAA,GAAA,CAAI,QAAQ,QAAU,EAAA,EAAE,GAAG,GAAG,CAAA;AAEtD,IAAA,MAAM,IAAO,GAAA,MAAM,IAAK,CAAA,QAAA,CAAS,OAAS,EAAA;AAAA,MACxC,MAAM,OAAS,EAAA,IAAA;AAAA,MACf,MAAQ,EAAA,CAAA,IAAA,KAAQ,OAAQ,CAAA,KAAA,CAAM,IAAI;AAAA,KACnC,CAAA;AACD,IAAM,MAAA,KAAA,GAAQ,MAAM,IAAA,CAAK,KAAM,EAAA;AAE/B,IAAO,OAAA;AAAA,MACL,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,KAAA,EAAO,KAAM,CAAA,GAAA,CAAI,CAAS,IAAA,MAAA;AAAA,QACxB,GAAA,EAAK,IAAK,CAAA,WAAA,CAAY,UAAW,CAAA;AAAA,UAC/B,GAAA,EAAK,CAAI,CAAA,EAAA,IAAA,CAAK,IAAI,CAAA,CAAA;AAAA,UAClB,IAAM,EAAA;AAAA,SACP,CAAA;AAAA,QACD,SAAS,IAAK,CAAA,OAAA;AAAA,QACd,gBAAgB,IAAK,CAAA;AAAA,OACrB,CAAA;AAAA,KACJ;AAAA;AACF,EAEA,QAAW,GAAA;AACT,IAAA,MAAM,EAAE,IAAM,EAAA,QAAA,EAAU,WAAY,EAAA,GAAI,KAAK,WAAY,CAAA,MAAA;AACzD,IAAM,MAAA,MAAA,GAAS,OAAQ,CAAA,QAAA,IAAY,WAAW,CAAA;AAC9C,IAAO,OAAA,CAAA,oBAAA,EAAuB,IAAI,CAAA,QAAA,EAAW,MAAM,CAAA,CAAA,CAAA;AAAA;AACrD,EAEA,MAAc,uBAAuB,GAA8B,EAAA;AACjE,IAAM,MAAA,EAAE,MAAM,QAAU,EAAA,KAAA,EAAO,SAAS,GAAI,EAAA,GAAIH,6BAAY,GAAG,CAAA;AAE/D,IAAA,IAAI,MAAS,GAAA,GAAA;AACb,IAAA,IAAI,CAAC,MAAQ,EAAA;AACX,MAAA,MAAA,GAAS,MAAMI,0CAAA;AAAA,QACb,GAAA;AAAA,QACA,KAAK,WAAY,CAAA;AAAA,OACnB;AAAA;AAGF,IAAM,MAAA,aAAA,GAAgB,CAAG,EAAA,IAAA,CAAK,WAAY,CAAA,MAAA,CAAO,UAAU,CAAA,cAAA,EAAiB,OAAO,CAAA,CAAA,EAAI,QAAQ,CAAA,SAAA,EAAY,MAAM,CAAA,CAAA;AAEjH,IAAA,MAAM,kBAAkB,MAAM,KAAA;AAAA,MAC5B,aAAA;AAAA,MACAR,2CAAA,CAAgC,IAAK,CAAA,WAAA,CAAY,MAAM;AAAA,KACzD;AACA,IAAI,IAAA,CAAC,gBAAgB,EAAI,EAAA;AACvB,MAAM,MAAA,OAAA,GAAU,mCAAmC,aAAa,CAAA,EAAA,EAAK,gBAAgB,MAAM,CAAA,CAAA,EAAI,gBAAgB,UAAU,CAAA,CAAA;AACzH,MAAI,IAAA,eAAA,CAAgB,WAAW,GAAK,EAAA;AAClC,QAAM,MAAA,IAAIG,qBAAc,OAAO,CAAA;AAAA;AAEjC,MAAM,MAAA,IAAI,MAAM,OAAO,CAAA;AAAA;AAGzB,IAAM,MAAA,OAAA,GAAU,MAAM,eAAA,CAAgB,IAAK,EAAA;AAC3C,IACE,IAAA,OAAA,IACA,OAAQ,CAAA,MAAA,IACR,OAAQ,CAAA,MAAA,CAAO,MAAS,GAAA,CAAA,IACxB,OAAQ,CAAA,MAAA,CAAO,CAAC,CAAA,CAAE,IAClB,EAAA;AACA,MAAA,OAAO,QAAQ,MAAO,CAAA,CAAC,EAAE,IAAK,CAAA,SAAA,CAAU,GAAG,EAAE,CAAA;AAAA;AAG/C,IAAA,MAAM,IAAI,KAAA,CAAM,CAAgC,6BAAA,EAAA,aAAa,CAAE,CAAA,CAAA;AAAA;AAEnE;;;;"}
|