@backstage/backend-defaults 0.12.1-next.1 → 0.12.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,33 @@
1
1
  # @backstage/backend-defaults
2
2
 
3
+ ## 0.12.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Backport security fixes
8
+
9
+ ## 0.12.1
10
+
11
+ ### Patch Changes
12
+
13
+ - 33bd4d0: Deduplicate discovered features discovered with discoveryFeatureLoader
14
+ - 4eda590: Fixed cache namespace and key prefix separator configuration to properly use configured values instead of hardcoded plugin ID. The cache manager now correctly combines the configured namespace with plugin IDs using the configured separator for Redis and Valkey. Memcache and memory store continue to use plugin ID as namespace.
15
+ - f244e61: Add `backend.logger` config options to configure the `RootLoggerService`.
16
+
17
+ Read more about the new configuration options in the
18
+ [Root Logger Service](https://backstage.io/docs/backend-system/core-services/root-logger/)
19
+ documentation.
20
+
21
+ - Updated dependencies
22
+ - @backstage/config-loader@1.10.3
23
+ - @backstage/plugin-auth-node@0.6.7
24
+ - @backstage/plugin-events-node@0.4.15
25
+ - @backstage/integration@1.18.0
26
+ - @backstage/types@1.2.2
27
+ - @backstage/backend-app-api@1.2.7
28
+ - @backstage/backend-plugin-api@1.4.3
29
+ - @backstage/plugin-permission-node@0.10.4
30
+
3
31
  ## 0.12.1-next.1
4
32
 
5
33
  ### Patch Changes
package/config.d.ts CHANGED
@@ -14,7 +14,7 @@
14
14
  * limitations under the License.
15
15
  */
16
16
 
17
- import { HumanDuration } from '@backstage/types';
17
+ import { HumanDuration, JsonObject } from '@backstage/types';
18
18
 
19
19
  export interface Config {
20
20
  app: {
@@ -985,6 +985,66 @@ export interface Config {
985
985
  headers?: { [name: string]: string };
986
986
  };
987
987
 
988
+ /**
989
+ * Options to configure the default RootLoggerService.
990
+ */
991
+ logger?: {
992
+ /**
993
+ * Configures the global log level for messages.
994
+ *
995
+ * This can also be configured using the LOG_LEVEL environment variable, which
996
+ * takes precedence over this configuration.
997
+ *
998
+ * Defaults to 'info'.
999
+ */
1000
+ level?: 'debug' | 'info' | 'warn' | 'error';
1001
+
1002
+ /**
1003
+ * Additional metadata to include with every log entry.
1004
+ */
1005
+ meta?: JsonObject;
1006
+
1007
+ /**
1008
+ * List of logger overrides.
1009
+ *
1010
+ * Can be used to configure a different level for logs matching certain criterias.
1011
+ * For example, it can be used to ignore 'info' logs of given plugins.
1012
+ *
1013
+ * @example
1014
+ *
1015
+ * ```yaml
1016
+ * logger:
1017
+ * level: info
1018
+ * overrides:
1019
+ * # For catalog and auth plugins, messages less important than 'warn' will be ignored.
1020
+ * - matchers:
1021
+ * plugin: [catalog, auth]
1022
+ * level: warn
1023
+ * # Ignore all messages that starts with 'Forget'
1024
+ * - matchers:
1025
+ * message: '/^Forget/'
1026
+ * level: warn
1027
+ * ```
1028
+ */
1029
+ overrides?: Array<{
1030
+ /**
1031
+ * Conditions that must be met to override the log level.
1032
+ *
1033
+ * A matcher can be:
1034
+ *
1035
+ * - A string (exact match or regex pattern delimited by slashes, e.g. `/pattern/`)
1036
+ * - A non-string value (compared by strict equality)
1037
+ * - An array of matchers (returns true if any matcher matches)
1038
+ */
1039
+ matchers: JsonObject;
1040
+
1041
+ /**
1042
+ * Log level to use for matched entries.
1043
+ */
1044
+ level: 'debug' | 'info' | 'warn' | 'error';
1045
+ }>;
1046
+ };
1047
+
988
1048
  /**
989
1049
  * Rate limiting options. Defining this as `true` will enable rate limiting with default values.
990
1050
  */
@@ -111,7 +111,7 @@ class PackageDiscoveryService {
111
111
  }
112
112
  }
113
113
  }
114
- return { features };
114
+ return { features: Array.from(new Set(features)) };
115
115
  }
116
116
  }
117
117
 
@@ -1 +1 @@
1
- {"version":3,"file":"PackageDiscoveryService.cjs.js","sources":["../src/PackageDiscoveryService.ts"],"sourcesContent":["/*\n * Copyright 2024 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport fs from 'fs-extra';\nimport { resolve as resolvePath, dirname } from 'path';\n\nimport {\n BackendFeature,\n RootConfigService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { BackstagePackageJson } from '@backstage/cli-node';\nimport { isError } from '@backstage/errors';\n\nconst DETECTED_PACKAGE_ROLES = [\n 'node-library',\n 'backend',\n 'backend-plugin',\n 'backend-plugin-module',\n];\n\n/** @internal */\nfunction isBackendFeature(value: unknown): value is BackendFeature {\n return (\n !!value &&\n ['object', 'function'].includes(typeof value) &&\n (value as BackendFeature).$$type === '@backstage/BackendFeature'\n );\n}\n\n/** @internal */\nfunction isBackendFeatureFactory(\n value: unknown,\n): value is () => BackendFeature {\n return (\n !!value &&\n typeof value === 'function' &&\n (value as any).$$type === '@backstage/BackendFeatureFactory'\n );\n}\n\n/** @internal */\nasync function findClosestPackageDir(\n searchDir: string,\n): Promise<string | undefined> {\n let path = searchDir;\n\n // Some confidence check to avoid infinite loop\n for (let i = 0; i < 1000; i++) {\n const packagePath = resolvePath(path, 'package.json');\n const exists = await fs.pathExists(packagePath);\n if (exists) {\n return path;\n }\n\n const newPath = dirname(path);\n if (newPath === path) {\n return undefined;\n }\n path = newPath;\n }\n\n throw new Error(\n `Iteration limit reached when searching for root package.json at ${searchDir}`,\n );\n}\n\n/** @internal */\nexport class PackageDiscoveryService {\n constructor(\n private readonly config: RootConfigService,\n private readonly logger: RootLoggerService,\n ) {}\n\n getDependencyNames(path: string) {\n const { dependencies } = require(path) as BackstagePackageJson;\n const packagesConfig = this.config.getOptional('backend.packages');\n\n const dependencyNames = Object.keys(dependencies || {});\n\n if (packagesConfig === 'all') {\n return dependencyNames;\n }\n\n const includedPackagesConfig = this.config.getOptionalStringArray(\n 'backend.packages.include',\n );\n\n const includedPackages = includedPackagesConfig\n ? new Set(includedPackagesConfig)\n : dependencyNames;\n const excludedPackagesSet = new Set(\n this.config.getOptionalStringArray('backend.packages.exclude'),\n );\n\n return [...includedPackages].filter(name => !excludedPackagesSet.has(name));\n }\n\n async getBackendFeatures(): Promise<{ features: Array<BackendFeature> }> {\n const packagesConfig = this.config.getOptional('backend.packages');\n if (!packagesConfig || Object.keys(packagesConfig).length === 0) {\n return { features: [] };\n }\n\n const packageDir = await findClosestPackageDir(process.argv[1]);\n if (!packageDir) {\n throw new Error('Package discovery failed to find package.json');\n }\n const dependencyNames = this.getDependencyNames(\n resolvePath(packageDir, 'package.json'),\n );\n\n const features: BackendFeature[] = [];\n\n for (const name of dependencyNames) {\n let depPkg: BackstagePackageJson;\n try {\n const packageJsonPath = require.resolve(`${name}/package.json`, {\n paths: [packageDir],\n });\n depPkg = require(packageJsonPath) as BackstagePackageJson;\n } catch (error) {\n // Handle packages with \"exports\" field that don't export ./package.json\n if (isError(error) && error.code === 'ERR_PACKAGE_PATH_NOT_EXPORTED') {\n continue; // Skip packages that don't export package.json - they can't be Backstage packages\n }\n throw error;\n }\n if (\n !depPkg?.backstage?.role ||\n !DETECTED_PACKAGE_ROLES.includes(depPkg.backstage.role)\n ) {\n continue; // Not a backstage backend package, ignore\n }\n\n const exportedModulePaths = [\n require.resolve(name, {\n paths: [packageDir],\n }),\n ];\n\n // Find modules exported as alpha\n try {\n exportedModulePaths.push(\n require.resolve(`${name}/alpha`, { paths: [packageDir] }),\n );\n } catch {\n /* ignore */\n }\n\n for (const modulePath of exportedModulePaths) {\n const mod = require(modulePath);\n\n if (isBackendFeature(mod.default)) {\n this.logger.info(`Detected: ${name}`);\n features.push(mod.default);\n }\n if (isBackendFeatureFactory(mod.default)) {\n this.logger.info(`Detected: ${name}`);\n features.push(mod.default());\n }\n }\n }\n\n return { features };\n }\n}\n"],"names":["resolvePath","fs","dirname","isError"],"mappings":";;;;;;;;;;AA2BA,MAAM,sBAAA,GAAyB;AAAA,EAC7B,cAAA;AAAA,EACA,SAAA;AAAA,EACA,gBAAA;AAAA,EACA;AACF,CAAA;AAGA,SAAS,iBAAiB,KAAA,EAAyC;AACjE,EAAA,OACE,CAAC,CAAC,KAAA,IACF,CAAC,QAAA,EAAU,UAAU,CAAA,CAAE,QAAA,CAAS,OAAO,KAAK,CAAA,IAC3C,KAAA,CAAyB,MAAA,KAAW,2BAAA;AAEzC;AAGA,SAAS,wBACP,KAAA,EAC+B;AAC/B,EAAA,OACE,CAAC,CAAC,KAAA,IACF,OAAO,KAAA,KAAU,UAAA,IAChB,MAAc,MAAA,KAAW,kCAAA;AAE9B;AAGA,eAAe,sBACb,SAAA,EAC6B;AAC7B,EAAA,IAAI,IAAA,GAAO,SAAA;AAGX,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,IAAA,MAAM,WAAA,GAAcA,oBAAA,CAAY,IAAA,EAAM,cAAc,CAAA;AACpD,IAAA,MAAM,MAAA,GAAS,MAAMC,mBAAA,CAAG,UAAA,CAAW,WAAW,CAAA;AAC9C,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,MAAM,OAAA,GAAUC,qBAAQ,IAAI,CAAA;AAC5B,IAAA,IAAI,YAAY,IAAA,EAAM;AACpB,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,IAAA,GAAO,OAAA;AAAA,EACT;AAEA,EAAA,MAAM,IAAI,KAAA;AAAA,IACR,mEAAmE,SAAS,CAAA;AAAA,GAC9E;AACF;AAGO,MAAM,uBAAA,CAAwB;AAAA,EACnC,WAAA,CACmB,QACA,MAAA,EACjB;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAAA,EAChB;AAAA,EAEH,mBAAmB,IAAA,EAAc;AAC/B,IAAA,MAAM,EAAE,YAAA,EAAa,GAAI,OAAA,CAAQ,IAAI,CAAA;AACrC,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,WAAA,CAAY,kBAAkB,CAAA;AAEjE,IAAA,MAAM,eAAA,GAAkB,MAAA,CAAO,IAAA,CAAK,YAAA,IAAgB,EAAE,CAAA;AAEtD,IAAA,IAAI,mBAAmB,KAAA,EAAO;AAC5B,MAAA,OAAO,eAAA;AAAA,IACT;AAEA,IAAA,MAAM,sBAAA,GAAyB,KAAK,MAAA,CAAO,sBAAA;AAAA,MACzC;AAAA,KACF;AAEA,IAAA,MAAM,gBAAA,GAAmB,sBAAA,GACrB,IAAI,GAAA,CAAI,sBAAsB,CAAA,GAC9B,eAAA;AACJ,IAAA,MAAM,sBAAsB,IAAI,GAAA;AAAA,MAC9B,IAAA,CAAK,MAAA,CAAO,sBAAA,CAAuB,0BAA0B;AAAA,KAC/D;AAEA,IAAA,OAAO,CAAC,GAAG,gBAAgB,CAAA,CAAE,MAAA,CAAO,UAAQ,CAAC,mBAAA,CAAoB,GAAA,CAAI,IAAI,CAAC,CAAA;AAAA,EAC5E;AAAA,EAEA,MAAM,kBAAA,GAAmE;AACvE,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,WAAA,CAAY,kBAAkB,CAAA;AACjE,IAAA,IAAI,CAAC,cAAA,IAAkB,MAAA,CAAO,KAAK,cAAc,CAAA,CAAE,WAAW,CAAA,EAAG;AAC/D,MAAA,OAAO,EAAE,QAAA,EAAU,EAAC,EAAE;AAAA,IACxB;AAEA,IAAA,MAAM,aAAa,MAAM,qBAAA,CAAsB,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAC,CAAA;AAC9D,IAAA,IAAI,CAAC,UAAA,EAAY;AACf,MAAA,MAAM,IAAI,MAAM,+CAA+C,CAAA;AAAA,IACjE;AACA,IAAA,MAAM,kBAAkB,IAAA,CAAK,kBAAA;AAAA,MAC3BF,oBAAA,CAAY,YAAY,cAAc;AAAA,KACxC;AAEA,IAAA,MAAM,WAA6B,EAAC;AAEpC,IAAA,KAAA,MAAW,QAAQ,eAAA,EAAiB;AAClC,MAAA,IAAI,MAAA;AACJ,MAAA,IAAI;AACF,QAAA,MAAM,eAAA,GAAkB,OAAA,CAAQ,OAAA,CAAQ,CAAA,EAAG,IAAI,CAAA,aAAA,CAAA,EAAiB;AAAA,UAC9D,KAAA,EAAO,CAAC,UAAU;AAAA,SACnB,CAAA;AACD,QAAA,MAAA,GAAS,QAAQ,eAAe,CAAA;AAAA,MAClC,SAAS,KAAA,EAAO;AAEd,QAAA,IAAIG,cAAA,CAAQ,KAAK,CAAA,IAAK,KAAA,CAAM,SAAS,+BAAA,EAAiC;AACpE,UAAA;AAAA,QACF;AACA,QAAA,MAAM,KAAA;AAAA,MACR;AACA,MAAA,IACE,CAAC,MAAA,EAAQ,SAAA,EAAW,IAAA,IACpB,CAAC,uBAAuB,QAAA,CAAS,MAAA,CAAO,SAAA,CAAU,IAAI,CAAA,EACtD;AACA,QAAA;AAAA,MACF;AAEA,MAAA,MAAM,mBAAA,GAAsB;AAAA,QAC1B,OAAA,CAAQ,QAAQ,IAAA,EAAM;AAAA,UACpB,KAAA,EAAO,CAAC,UAAU;AAAA,SACnB;AAAA,OACH;AAGA,MAAA,IAAI;AACF,QAAA,mBAAA,CAAoB,IAAA;AAAA,UAClB,OAAA,CAAQ,OAAA,CAAQ,CAAA,EAAG,IAAI,CAAA,MAAA,CAAA,EAAU,EAAE,KAAA,EAAO,CAAC,UAAU,CAAA,EAAG;AAAA,SAC1D;AAAA,MACF,CAAA,CAAA,MAAQ;AAAA,MAER;AAEA,MAAA,KAAA,MAAW,cAAc,mBAAA,EAAqB;AAC5C,QAAA,MAAM,GAAA,GAAM,QAAQ,UAAU,CAAA;AAE9B,QAAA,IAAI,gBAAA,CAAiB,GAAA,CAAI,OAAO,CAAA,EAAG;AACjC,UAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,UAAA,EAAa,IAAI,CAAA,CAAE,CAAA;AACpC,UAAA,QAAA,CAAS,IAAA,CAAK,IAAI,OAAO,CAAA;AAAA,QAC3B;AACA,QAAA,IAAI,uBAAA,CAAwB,GAAA,CAAI,OAAO,CAAA,EAAG;AACxC,UAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,UAAA,EAAa,IAAI,CAAA,CAAE,CAAA;AACpC,UAAA,QAAA,CAAS,IAAA,CAAK,GAAA,CAAI,OAAA,EAAS,CAAA;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,EAAE,QAAA,EAAS;AAAA,EACpB;AACF;;;;"}
1
+ {"version":3,"file":"PackageDiscoveryService.cjs.js","sources":["../src/PackageDiscoveryService.ts"],"sourcesContent":["/*\n * Copyright 2024 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport fs from 'fs-extra';\nimport { resolve as resolvePath, dirname } from 'path';\n\nimport {\n BackendFeature,\n RootConfigService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { BackstagePackageJson } from '@backstage/cli-node';\nimport { isError } from '@backstage/errors';\n\nconst DETECTED_PACKAGE_ROLES = [\n 'node-library',\n 'backend',\n 'backend-plugin',\n 'backend-plugin-module',\n];\n\n/** @internal */\nfunction isBackendFeature(value: unknown): value is BackendFeature {\n return (\n !!value &&\n ['object', 'function'].includes(typeof value) &&\n (value as BackendFeature).$$type === '@backstage/BackendFeature'\n );\n}\n\n/** @internal */\nfunction isBackendFeatureFactory(\n value: unknown,\n): value is () => BackendFeature {\n return (\n !!value &&\n typeof value === 'function' &&\n (value as any).$$type === '@backstage/BackendFeatureFactory'\n );\n}\n\n/** @internal */\nasync function findClosestPackageDir(\n searchDir: string,\n): Promise<string | undefined> {\n let path = searchDir;\n\n // Some confidence check to avoid infinite loop\n for (let i = 0; i < 1000; i++) {\n const packagePath = resolvePath(path, 'package.json');\n const exists = await fs.pathExists(packagePath);\n if (exists) {\n return path;\n }\n\n const newPath = dirname(path);\n if (newPath === path) {\n return undefined;\n }\n path = newPath;\n }\n\n throw new Error(\n `Iteration limit reached when searching for root package.json at ${searchDir}`,\n );\n}\n\n/** @internal */\nexport class PackageDiscoveryService {\n constructor(\n private readonly config: RootConfigService,\n private readonly logger: RootLoggerService,\n ) {}\n\n getDependencyNames(path: string) {\n const { dependencies } = require(path) as BackstagePackageJson;\n const packagesConfig = this.config.getOptional('backend.packages');\n\n const dependencyNames = Object.keys(dependencies || {});\n\n if (packagesConfig === 'all') {\n return dependencyNames;\n }\n\n const includedPackagesConfig = this.config.getOptionalStringArray(\n 'backend.packages.include',\n );\n\n const includedPackages = includedPackagesConfig\n ? new Set(includedPackagesConfig)\n : dependencyNames;\n const excludedPackagesSet = new Set(\n this.config.getOptionalStringArray('backend.packages.exclude'),\n );\n\n return [...includedPackages].filter(name => !excludedPackagesSet.has(name));\n }\n\n async getBackendFeatures(): Promise<{ features: Array<BackendFeature> }> {\n const packagesConfig = this.config.getOptional('backend.packages');\n if (!packagesConfig || Object.keys(packagesConfig).length === 0) {\n return { features: [] };\n }\n\n const packageDir = await findClosestPackageDir(process.argv[1]);\n if (!packageDir) {\n throw new Error('Package discovery failed to find package.json');\n }\n const dependencyNames = this.getDependencyNames(\n resolvePath(packageDir, 'package.json'),\n );\n\n const features: BackendFeature[] = [];\n\n for (const name of dependencyNames) {\n let depPkg: BackstagePackageJson;\n try {\n const packageJsonPath = require.resolve(`${name}/package.json`, {\n paths: [packageDir],\n });\n depPkg = require(packageJsonPath) as BackstagePackageJson;\n } catch (error) {\n // Handle packages with \"exports\" field that don't export ./package.json\n if (isError(error) && error.code === 'ERR_PACKAGE_PATH_NOT_EXPORTED') {\n continue; // Skip packages that don't export package.json - they can't be Backstage packages\n }\n throw error;\n }\n if (\n !depPkg?.backstage?.role ||\n !DETECTED_PACKAGE_ROLES.includes(depPkg.backstage.role)\n ) {\n continue; // Not a backstage backend package, ignore\n }\n\n const exportedModulePaths = [\n require.resolve(name, {\n paths: [packageDir],\n }),\n ];\n\n // Find modules exported as alpha\n try {\n exportedModulePaths.push(\n require.resolve(`${name}/alpha`, { paths: [packageDir] }),\n );\n } catch {\n /* ignore */\n }\n\n for (const modulePath of exportedModulePaths) {\n const mod = require(modulePath);\n\n if (isBackendFeature(mod.default)) {\n this.logger.info(`Detected: ${name}`);\n features.push(mod.default);\n }\n if (isBackendFeatureFactory(mod.default)) {\n this.logger.info(`Detected: ${name}`);\n features.push(mod.default());\n }\n }\n }\n\n return { features: Array.from(new Set(features)) };\n }\n}\n"],"names":["resolvePath","fs","dirname","isError"],"mappings":";;;;;;;;;;AA2BA,MAAM,sBAAA,GAAyB;AAAA,EAC7B,cAAA;AAAA,EACA,SAAA;AAAA,EACA,gBAAA;AAAA,EACA;AACF,CAAA;AAGA,SAAS,iBAAiB,KAAA,EAAyC;AACjE,EAAA,OACE,CAAC,CAAC,KAAA,IACF,CAAC,QAAA,EAAU,UAAU,CAAA,CAAE,QAAA,CAAS,OAAO,KAAK,CAAA,IAC3C,KAAA,CAAyB,MAAA,KAAW,2BAAA;AAEzC;AAGA,SAAS,wBACP,KAAA,EAC+B;AAC/B,EAAA,OACE,CAAC,CAAC,KAAA,IACF,OAAO,KAAA,KAAU,UAAA,IAChB,MAAc,MAAA,KAAW,kCAAA;AAE9B;AAGA,eAAe,sBACb,SAAA,EAC6B;AAC7B,EAAA,IAAI,IAAA,GAAO,SAAA;AAGX,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,IAAA,MAAM,WAAA,GAAcA,oBAAA,CAAY,IAAA,EAAM,cAAc,CAAA;AACpD,IAAA,MAAM,MAAA,GAAS,MAAMC,mBAAA,CAAG,UAAA,CAAW,WAAW,CAAA;AAC9C,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,MAAM,OAAA,GAAUC,qBAAQ,IAAI,CAAA;AAC5B,IAAA,IAAI,YAAY,IAAA,EAAM;AACpB,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,IAAA,GAAO,OAAA;AAAA,EACT;AAEA,EAAA,MAAM,IAAI,KAAA;AAAA,IACR,mEAAmE,SAAS,CAAA;AAAA,GAC9E;AACF;AAGO,MAAM,uBAAA,CAAwB;AAAA,EACnC,WAAA,CACmB,QACA,MAAA,EACjB;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAAA,EAChB;AAAA,EAEH,mBAAmB,IAAA,EAAc;AAC/B,IAAA,MAAM,EAAE,YAAA,EAAa,GAAI,OAAA,CAAQ,IAAI,CAAA;AACrC,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,WAAA,CAAY,kBAAkB,CAAA;AAEjE,IAAA,MAAM,eAAA,GAAkB,MAAA,CAAO,IAAA,CAAK,YAAA,IAAgB,EAAE,CAAA;AAEtD,IAAA,IAAI,mBAAmB,KAAA,EAAO;AAC5B,MAAA,OAAO,eAAA;AAAA,IACT;AAEA,IAAA,MAAM,sBAAA,GAAyB,KAAK,MAAA,CAAO,sBAAA;AAAA,MACzC;AAAA,KACF;AAEA,IAAA,MAAM,gBAAA,GAAmB,sBAAA,GACrB,IAAI,GAAA,CAAI,sBAAsB,CAAA,GAC9B,eAAA;AACJ,IAAA,MAAM,sBAAsB,IAAI,GAAA;AAAA,MAC9B,IAAA,CAAK,MAAA,CAAO,sBAAA,CAAuB,0BAA0B;AAAA,KAC/D;AAEA,IAAA,OAAO,CAAC,GAAG,gBAAgB,CAAA,CAAE,MAAA,CAAO,UAAQ,CAAC,mBAAA,CAAoB,GAAA,CAAI,IAAI,CAAC,CAAA;AAAA,EAC5E;AAAA,EAEA,MAAM,kBAAA,GAAmE;AACvE,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,WAAA,CAAY,kBAAkB,CAAA;AACjE,IAAA,IAAI,CAAC,cAAA,IAAkB,MAAA,CAAO,KAAK,cAAc,CAAA,CAAE,WAAW,CAAA,EAAG;AAC/D,MAAA,OAAO,EAAE,QAAA,EAAU,EAAC,EAAE;AAAA,IACxB;AAEA,IAAA,MAAM,aAAa,MAAM,qBAAA,CAAsB,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAC,CAAA;AAC9D,IAAA,IAAI,CAAC,UAAA,EAAY;AACf,MAAA,MAAM,IAAI,MAAM,+CAA+C,CAAA;AAAA,IACjE;AACA,IAAA,MAAM,kBAAkB,IAAA,CAAK,kBAAA;AAAA,MAC3BF,oBAAA,CAAY,YAAY,cAAc;AAAA,KACxC;AAEA,IAAA,MAAM,WAA6B,EAAC;AAEpC,IAAA,KAAA,MAAW,QAAQ,eAAA,EAAiB;AAClC,MAAA,IAAI,MAAA;AACJ,MAAA,IAAI;AACF,QAAA,MAAM,eAAA,GAAkB,OAAA,CAAQ,OAAA,CAAQ,CAAA,EAAG,IAAI,CAAA,aAAA,CAAA,EAAiB;AAAA,UAC9D,KAAA,EAAO,CAAC,UAAU;AAAA,SACnB,CAAA;AACD,QAAA,MAAA,GAAS,QAAQ,eAAe,CAAA;AAAA,MAClC,SAAS,KAAA,EAAO;AAEd,QAAA,IAAIG,cAAA,CAAQ,KAAK,CAAA,IAAK,KAAA,CAAM,SAAS,+BAAA,EAAiC;AACpE,UAAA;AAAA,QACF;AACA,QAAA,MAAM,KAAA;AAAA,MACR;AACA,MAAA,IACE,CAAC,MAAA,EAAQ,SAAA,EAAW,IAAA,IACpB,CAAC,uBAAuB,QAAA,CAAS,MAAA,CAAO,SAAA,CAAU,IAAI,CAAA,EACtD;AACA,QAAA;AAAA,MACF;AAEA,MAAA,MAAM,mBAAA,GAAsB;AAAA,QAC1B,OAAA,CAAQ,QAAQ,IAAA,EAAM;AAAA,UACpB,KAAA,EAAO,CAAC,UAAU;AAAA,SACnB;AAAA,OACH;AAGA,MAAA,IAAI;AACF,QAAA,mBAAA,CAAoB,IAAA;AAAA,UAClB,OAAA,CAAQ,OAAA,CAAQ,CAAA,EAAG,IAAI,CAAA,MAAA,CAAA,EAAU,EAAE,KAAA,EAAO,CAAC,UAAU,CAAA,EAAG;AAAA,SAC1D;AAAA,MACF,CAAA,CAAA,MAAQ;AAAA,MAER;AAEA,MAAA,KAAA,MAAW,cAAc,mBAAA,EAAqB;AAC5C,QAAA,MAAM,GAAA,GAAM,QAAQ,UAAU,CAAA;AAE9B,QAAA,IAAI,gBAAA,CAAiB,GAAA,CAAI,OAAO,CAAA,EAAG;AACjC,UAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,UAAA,EAAa,IAAI,CAAA,CAAE,CAAA;AACpC,UAAA,QAAA,CAAS,IAAA,CAAK,IAAI,OAAO,CAAA;AAAA,QAC3B;AACA,QAAA,IAAI,uBAAA,CAAwB,GAAA,CAAI,OAAO,CAAA,EAAG;AACxC,UAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,UAAA,EAAa,IAAI,CAAA,CAAE,CAAA;AACpC,UAAA,QAAA,CAAS,IAAA,CAAK,GAAA,CAAI,OAAA,EAAS,CAAA;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,EAAE,UAAU,KAAA,CAAM,IAAA,CAAK,IAAI,GAAA,CAAI,QAAQ,CAAC,CAAA,EAAE;AAAA,EACnD;AACF;;;;"}
@@ -3,19 +3,26 @@
3
3
  var winston = require('winston');
4
4
  var tripleBeam = require('triple-beam');
5
5
  var escapeRegExp = require('../../lib/escapeRegExp.cjs.js');
6
+ var types = require('./types.cjs.js');
7
+ var utils = require('./utils.cjs.js');
6
8
 
7
9
  class WinstonLogger {
8
10
  #winston;
9
11
  #addRedactions;
12
+ #setLevelOverrides;
10
13
  /**
11
14
  * Creates a {@link WinstonLogger} instance.
12
15
  */
13
16
  static create(options) {
17
+ const defaultLogLevel = process.env.LOG_LEVEL || options.level || "info";
14
18
  const redacter = WinstonLogger.redacter();
19
+ const logLevelFilter = WinstonLogger.logLevelFilter(defaultLogLevel);
15
20
  const defaultFormatter = process.env.NODE_ENV === "production" ? winston.format.json() : WinstonLogger.colorFormat();
16
21
  let logger = winston.createLogger({
17
- level: process.env.LOG_LEVEL || options.level || "info",
22
+ // Lowest level possible as we let the logLevelFilter do the filtering
23
+ level: "silly",
18
24
  format: winston.format.combine(
25
+ logLevelFilter.format,
19
26
  options.format ?? defaultFormatter,
20
27
  redacter.format
21
28
  ),
@@ -24,7 +31,7 @@ class WinstonLogger {
24
31
  if (options.meta) {
25
32
  logger = logger.child(options.meta);
26
33
  }
27
- return new WinstonLogger(logger, redacter.add);
34
+ return new WinstonLogger(logger, redacter.add, logLevelFilter.setOverrides);
28
35
  }
29
36
  /**
30
37
  * Creates a winston log formatter for redacting secrets.
@@ -95,9 +102,39 @@ class WinstonLogger {
95
102
  })
96
103
  );
97
104
  }
98
- constructor(winston, addRedactions) {
105
+ /**
106
+ * Formatter that filters log levels using overrides, falling back to the default level when no criteria match.
107
+ */
108
+ static logLevelFilter(defaultLogLevel) {
109
+ const overrides = [];
110
+ return {
111
+ format: winston.format((log) => {
112
+ for (const override of overrides) {
113
+ if (override.predicate(log)) {
114
+ if (types.winstonLevels[log.level] > types.winstonLevels[override.level]) {
115
+ return false;
116
+ }
117
+ return log;
118
+ }
119
+ }
120
+ if (types.winstonLevels[log.level] > types.winstonLevels[defaultLogLevel]) {
121
+ return false;
122
+ }
123
+ return log;
124
+ })(),
125
+ setOverrides: (newOverrides) => {
126
+ const newOverridesPredicates = newOverrides.map((o) => ({
127
+ predicate: utils.createLogMatcher(o.matchers),
128
+ level: o.level
129
+ }));
130
+ overrides.splice(0, overrides.length, ...newOverridesPredicates);
131
+ }
132
+ };
133
+ }
134
+ constructor(winston, addRedactions, setLevelOverrides) {
99
135
  this.#winston = winston;
100
136
  this.#addRedactions = addRedactions;
137
+ this.#setLevelOverrides = setLevelOverrides;
101
138
  }
102
139
  error(message, meta) {
103
140
  this.#winston.error(message, meta);
@@ -117,6 +154,9 @@ class WinstonLogger {
117
154
  addRedactions(redactions) {
118
155
  this.#addRedactions?.(redactions);
119
156
  }
157
+ setLevelOverrides(overrides) {
158
+ this.#setLevelOverrides?.(overrides);
159
+ }
120
160
  }
121
161
 
122
162
  exports.WinstonLogger = WinstonLogger;
@@ -1 +1 @@
1
- {"version":3,"file":"WinstonLogger.cjs.js","sources":["../../../src/entrypoints/rootLogger/WinstonLogger.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { JsonObject } from '@backstage/types';\nimport { Format, TransformableInfo } from 'logform';\nimport {\n Logger,\n format,\n createLogger,\n transports,\n transport as Transport,\n} from 'winston';\nimport { MESSAGE } from 'triple-beam';\nimport { escapeRegExp } from '../../lib/escapeRegExp';\n\n/**\n * @public\n */\nexport interface WinstonLoggerOptions {\n meta?: JsonObject;\n level?: string;\n format?: Format;\n transports?: Transport[];\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#LoggerService} implementation based on winston.\n *\n * @public\n */\nexport class WinstonLogger implements RootLoggerService {\n #winston: Logger;\n #addRedactions?: (redactions: Iterable<string>) => void;\n\n /**\n * Creates a {@link WinstonLogger} instance.\n */\n static create(options: WinstonLoggerOptions): WinstonLogger {\n const redacter = WinstonLogger.redacter();\n const defaultFormatter =\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat();\n\n let logger = createLogger({\n level: process.env.LOG_LEVEL || options.level || 'info',\n format: format.combine(\n options.format ?? defaultFormatter,\n redacter.format,\n ),\n transports: options.transports ?? new transports.Console(),\n });\n\n if (options.meta) {\n logger = logger.child(options.meta);\n }\n\n return new WinstonLogger(logger, redacter.add);\n }\n\n /**\n * Creates a winston log formatter for redacting secrets.\n */\n static redacter(): {\n format: Format;\n add: (redactions: Iterable<string>) => void;\n } {\n const redactionSet = new Set<string>();\n\n let redactionPattern: RegExp | undefined = undefined;\n\n return {\n format: format((obj: TransformableInfo) => {\n if (!redactionPattern || !obj) {\n return obj;\n }\n\n obj[MESSAGE] = obj[MESSAGE]?.replace?.(redactionPattern, '***');\n\n return obj;\n })(),\n add(newRedactions) {\n let added = 0;\n for (const redactionToTrim of newRedactions) {\n // Skip null or undefined values\n if (redactionToTrim === null || redactionToTrim === undefined) {\n continue;\n }\n // Trimming the string ensures that we don't accdentally get extra\n // newlines or other whitespace interfering with the redaction; this\n // can happen for example when using string literals in yaml\n const redaction = redactionToTrim.trim();\n // Exclude secrets that are empty or just one character in length. These\n // typically mean that you are running local dev or tests, or using the\n // --lax flag which sets things to just 'x'.\n if (redaction.length <= 1) {\n continue;\n }\n if (!redactionSet.has(redaction)) {\n redactionSet.add(redaction);\n added += 1;\n }\n }\n if (added > 0) {\n const redactions = Array.from(redactionSet)\n .map(r => escapeRegExp(r))\n .join('|');\n redactionPattern = new RegExp(`(${redactions})`, 'g');\n }\n },\n };\n }\n\n /**\n * Creates a pretty printed winston log formatter.\n */\n static colorFormat(): Format {\n const colorizer = format.colorize();\n\n return format.combine(\n format.timestamp(),\n format.colorize({\n colors: {\n timestamp: 'dim',\n prefix: 'blue',\n field: 'cyan',\n debug: 'grey',\n },\n }),\n format.printf((info: TransformableInfo) => {\n const { timestamp, level, message, plugin, service, ...fields } = info;\n const prefix = plugin || service;\n const timestampColor = colorizer.colorize('timestamp', timestamp);\n const prefixColor = colorizer.colorize('prefix', prefix);\n\n const extraFields = Object.entries(fields)\n .map(([key, value]) => {\n let stringValue = '';\n\n try {\n stringValue = JSON.stringify(value);\n } catch (e) {\n stringValue = '[field value not castable to string]';\n }\n\n return `${colorizer.colorize('field', `${key}`)}=${stringValue}`;\n })\n .join(' ');\n\n return `${timestampColor} ${prefixColor} ${level} ${message} ${extraFields}`;\n }),\n );\n }\n\n private constructor(\n winston: Logger,\n addRedactions?: (redactions: Iterable<string>) => void,\n ) {\n this.#winston = winston;\n this.#addRedactions = addRedactions;\n }\n\n error(message: string, meta?: JsonObject): void {\n this.#winston.error(message, meta);\n }\n\n warn(message: string, meta?: JsonObject): void {\n this.#winston.warn(message, meta);\n }\n\n info(message: string, meta?: JsonObject): void {\n this.#winston.info(message, meta);\n }\n\n debug(message: string, meta?: JsonObject): void {\n this.#winston.debug(message, meta);\n }\n\n child(meta: JsonObject): LoggerService {\n return new WinstonLogger(this.#winston.child(meta));\n }\n\n addRedactions(redactions: Iterable<string>) {\n this.#addRedactions?.(redactions);\n }\n}\n"],"names":["format","createLogger","transports","MESSAGE","escapeRegExp"],"mappings":";;;;;;AA+CO,MAAM,aAAA,CAA2C;AAAA,EACtD,QAAA;AAAA,EACA,cAAA;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,OAAA,EAA8C;AAC1D,IAAA,MAAM,QAAA,GAAW,cAAc,QAAA,EAAS;AACxC,IAAA,MAAM,gBAAA,GACJ,QAAQ,GAAA,CAAI,QAAA,KAAa,eACrBA,cAAA,CAAO,IAAA,EAAK,GACZ,aAAA,CAAc,WAAA,EAAY;AAEhC,IAAA,IAAI,SAASC,oBAAA,CAAa;AAAA,MACxB,KAAA,EAAO,OAAA,CAAQ,GAAA,CAAI,SAAA,IAAa,QAAQ,KAAA,IAAS,MAAA;AAAA,MACjD,QAAQD,cAAA,CAAO,OAAA;AAAA,QACb,QAAQ,MAAA,IAAU,gBAAA;AAAA,QAClB,QAAA,CAAS;AAAA,OACX;AAAA,MACA,UAAA,EAAY,OAAA,CAAQ,UAAA,IAAc,IAAIE,mBAAW,OAAA;AAAQ,KAC1D,CAAA;AAED,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,MAAA,GAAS,MAAA,CAAO,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA;AAAA,IACpC;AAEA,IAAA,OAAO,IAAI,aAAA,CAAc,MAAA,EAAQ,QAAA,CAAS,GAAG,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,QAAA,GAGL;AACA,IAAA,MAAM,YAAA,uBAAmB,GAAA,EAAY;AAErC,IAAA,IAAI,gBAAA,GAAuC,MAAA;AAE3C,IAAA,OAAO;AAAA,MACL,MAAA,EAAQF,cAAA,CAAO,CAAC,GAAA,KAA2B;AACzC,QAAA,IAAI,CAAC,gBAAA,IAAoB,CAAC,GAAA,EAAK;AAC7B,UAAA,OAAO,GAAA;AAAA,QACT;AAEA,QAAA,GAAA,CAAIG,kBAAO,CAAA,GAAI,GAAA,CAAIA,kBAAO,CAAA,EAAG,OAAA,GAAU,kBAAkB,KAAK,CAAA;AAE9D,QAAA,OAAO,GAAA;AAAA,MACT,CAAC,CAAA,EAAE;AAAA,MACH,IAAI,aAAA,EAAe;AACjB,QAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,QAAA,KAAA,MAAW,mBAAmB,aAAA,EAAe;AAE3C,UAAA,IAAI,eAAA,KAAoB,IAAA,IAAQ,eAAA,KAAoB,MAAA,EAAW;AAC7D,YAAA;AAAA,UACF;AAIA,UAAA,MAAM,SAAA,GAAY,gBAAgB,IAAA,EAAK;AAIvC,UAAA,IAAI,SAAA,CAAU,UAAU,CAAA,EAAG;AACzB,YAAA;AAAA,UACF;AACA,UAAA,IAAI,CAAC,YAAA,CAAa,GAAA,CAAI,SAAS,CAAA,EAAG;AAChC,YAAA,YAAA,CAAa,IAAI,SAAS,CAAA;AAC1B,YAAA,KAAA,IAAS,CAAA;AAAA,UACX;AAAA,QACF;AACA,QAAA,IAAI,QAAQ,CAAA,EAAG;AACb,UAAA,MAAM,UAAA,GAAa,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA,CACvC,GAAA,CAAI,CAAA,CAAA,KAAKC,yBAAA,CAAa,CAAC,CAAC,CAAA,CACxB,IAAA,CAAK,GAAG,CAAA;AACX,UAAA,gBAAA,GAAmB,IAAI,MAAA,CAAO,CAAA,CAAA,EAAI,UAAU,KAAK,GAAG,CAAA;AAAA,QACtD;AAAA,MACF;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAAA,GAAsB;AAC3B,IAAA,MAAM,SAAA,GAAYJ,eAAO,QAAA,EAAS;AAElC,IAAA,OAAOA,cAAA,CAAO,OAAA;AAAA,MACZA,eAAO,SAAA,EAAU;AAAA,MACjBA,eAAO,QAAA,CAAS;AAAA,QACd,MAAA,EAAQ;AAAA,UACN,SAAA,EAAW,KAAA;AAAA,UACX,MAAA,EAAQ,MAAA;AAAA,UACR,KAAA,EAAO,MAAA;AAAA,UACP,KAAA,EAAO;AAAA;AACT,OACD,CAAA;AAAA,MACDA,cAAA,CAAO,MAAA,CAAO,CAAC,IAAA,KAA4B;AACzC,QAAA,MAAM,EAAE,WAAW,KAAA,EAAO,OAAA,EAAS,QAAQ,OAAA,EAAS,GAAG,QAAO,GAAI,IAAA;AAClE,QAAA,MAAM,SAAS,MAAA,IAAU,OAAA;AACzB,QAAA,MAAM,cAAA,GAAiB,SAAA,CAAU,QAAA,CAAS,WAAA,EAAa,SAAS,CAAA;AAChE,QAAA,MAAM,WAAA,GAAc,SAAA,CAAU,QAAA,CAAS,QAAA,EAAU,MAAM,CAAA;AAEvD,QAAA,MAAM,WAAA,GAAc,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CACtC,IAAI,CAAC,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AACrB,UAAA,IAAI,WAAA,GAAc,EAAA;AAElB,UAAA,IAAI;AACF,YAAA,WAAA,GAAc,IAAA,CAAK,UAAU,KAAK,CAAA;AAAA,UACpC,SAAS,CAAA,EAAG;AACV,YAAA,WAAA,GAAc,sCAAA;AAAA,UAChB;AAEA,UAAA,OAAO,CAAA,EAAG,UAAU,QAAA,CAAS,OAAA,EAAS,GAAG,GAAG,CAAA,CAAE,CAAC,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,QAChE,CAAC,CAAA,CACA,IAAA,CAAK,GAAG,CAAA;AAEX,QAAA,OAAO,CAAA,EAAG,cAAc,CAAA,CAAA,EAAI,WAAW,IAAI,KAAK,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,MAC5E,CAAC;AAAA,KACH;AAAA,EACF;AAAA,EAEQ,WAAA,CACN,SACA,aAAA,EACA;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,OAAA;AAChB,IAAA,IAAA,CAAK,cAAA,GAAiB,aAAA;AAAA,EACxB;AAAA,EAEA,KAAA,CAAM,SAAiB,IAAA,EAAyB;AAC9C,IAAA,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,OAAA,EAAS,IAAI,CAAA;AAAA,EACnC;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAyB;AAC7C,IAAA,IAAA,CAAK,QAAA,CAAS,IAAA,CAAK,OAAA,EAAS,IAAI,CAAA;AAAA,EAClC;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAyB;AAC7C,IAAA,IAAA,CAAK,QAAA,CAAS,IAAA,CAAK,OAAA,EAAS,IAAI,CAAA;AAAA,EAClC;AAAA,EAEA,KAAA,CAAM,SAAiB,IAAA,EAAyB;AAC9C,IAAA,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,OAAA,EAAS,IAAI,CAAA;AAAA,EACnC;AAAA,EAEA,MAAM,IAAA,EAAiC;AACrC,IAAA,OAAO,IAAI,aAAA,CAAc,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,IAAI,CAAC,CAAA;AAAA,EACpD;AAAA,EAEA,cAAc,UAAA,EAA8B;AAC1C,IAAA,IAAA,CAAK,iBAAiB,UAAU,CAAA;AAAA,EAClC;AACF;;;;"}
1
+ {"version":3,"file":"WinstonLogger.cjs.js","sources":["../../../src/entrypoints/rootLogger/WinstonLogger.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootLoggerService,\n} from '@backstage/backend-plugin-api';\nimport { JsonObject } from '@backstage/types';\nimport { Format, TransformableInfo } from 'logform';\nimport {\n Logger,\n format,\n createLogger,\n transports,\n transport as Transport,\n config as winstonConfig,\n} from 'winston';\nimport { MESSAGE } from 'triple-beam';\nimport { escapeRegExp } from '../../lib/escapeRegExp';\nimport { winstonLevels, WinstonLoggerLevelOverride } from './types';\nimport { createLogMatcher } from './utils';\n\n/**\n * @public\n */\nexport interface WinstonLoggerOptions {\n meta?: JsonObject;\n level?: string;\n format?: Format;\n transports?: Transport[];\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#LoggerService} implementation based on winston.\n *\n * @public\n */\nexport class WinstonLogger implements RootLoggerService {\n #winston: Logger;\n #addRedactions?: (redactions: Iterable<string>) => void;\n #setLevelOverrides?: (overrides: WinstonLoggerLevelOverride[]) => void;\n\n /**\n * Creates a {@link WinstonLogger} instance.\n */\n static create(options: WinstonLoggerOptions): WinstonLogger {\n const defaultLogLevel = process.env.LOG_LEVEL || options.level || 'info';\n\n const redacter = WinstonLogger.redacter();\n const logLevelFilter = WinstonLogger.logLevelFilter(defaultLogLevel);\n\n const defaultFormatter =\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat();\n\n let logger = createLogger({\n // Lowest level possible as we let the logLevelFilter do the filtering\n level: 'silly',\n format: format.combine(\n logLevelFilter.format,\n options.format ?? defaultFormatter,\n redacter.format,\n ),\n transports: options.transports ?? new transports.Console(),\n });\n\n if (options.meta) {\n logger = logger.child(options.meta);\n }\n\n return new WinstonLogger(logger, redacter.add, logLevelFilter.setOverrides);\n }\n\n /**\n * Creates a winston log formatter for redacting secrets.\n */\n static redacter(): {\n format: Format;\n add: (redactions: Iterable<string>) => void;\n } {\n const redactionSet = new Set<string>();\n\n let redactionPattern: RegExp | undefined = undefined;\n\n return {\n format: format((obj: TransformableInfo) => {\n if (!redactionPattern || !obj) {\n return obj;\n }\n\n obj[MESSAGE] = obj[MESSAGE]?.replace?.(redactionPattern, '***');\n\n return obj;\n })(),\n add(newRedactions) {\n let added = 0;\n for (const redactionToTrim of newRedactions) {\n // Skip null or undefined values\n if (redactionToTrim === null || redactionToTrim === undefined) {\n continue;\n }\n // Trimming the string ensures that we don't accdentally get extra\n // newlines or other whitespace interfering with the redaction; this\n // can happen for example when using string literals in yaml\n const redaction = redactionToTrim.trim();\n // Exclude secrets that are empty or just one character in length. These\n // typically mean that you are running local dev or tests, or using the\n // --lax flag which sets things to just 'x'.\n if (redaction.length <= 1) {\n continue;\n }\n if (!redactionSet.has(redaction)) {\n redactionSet.add(redaction);\n added += 1;\n }\n }\n if (added > 0) {\n const redactions = Array.from(redactionSet)\n .map(r => escapeRegExp(r))\n .join('|');\n redactionPattern = new RegExp(`(${redactions})`, 'g');\n }\n },\n };\n }\n\n /**\n * Creates a pretty printed winston log formatter.\n */\n static colorFormat(): Format {\n const colorizer = format.colorize();\n\n return format.combine(\n format.timestamp(),\n format.colorize({\n colors: {\n timestamp: 'dim',\n prefix: 'blue',\n field: 'cyan',\n debug: 'grey',\n },\n }),\n format.printf((info: TransformableInfo) => {\n const { timestamp, level, message, plugin, service, ...fields } = info;\n const prefix = plugin || service;\n const timestampColor = colorizer.colorize('timestamp', timestamp);\n const prefixColor = colorizer.colorize('prefix', prefix);\n\n const extraFields = Object.entries(fields)\n .map(([key, value]) => {\n let stringValue = '';\n\n try {\n stringValue = JSON.stringify(value);\n } catch (e) {\n stringValue = '[field value not castable to string]';\n }\n\n return `${colorizer.colorize('field', `${key}`)}=${stringValue}`;\n })\n .join(' ');\n\n return `${timestampColor} ${prefixColor} ${level} ${message} ${extraFields}`;\n }),\n );\n }\n\n /**\n * Formatter that filters log levels using overrides, falling back to the default level when no criteria match.\n */\n static logLevelFilter(\n defaultLogLevel: keyof winstonConfig.NpmConfigSetLevels,\n ): {\n format: Format;\n setOverrides: (overrides: WinstonLoggerLevelOverride[]) => void;\n } {\n const overrides: {\n predicate: (log: TransformableInfo) => boolean;\n level: string;\n }[] = [];\n\n return {\n format: format(log => {\n for (const override of overrides) {\n if (override.predicate(log)) {\n // Discard the log if the log level is below the override\n // eg, if the override level is 'warn' (1) and the log is 'debug' (5)\n if (winstonLevels[log.level] > winstonLevels[override.level]) {\n return false;\n }\n\n return log;\n }\n }\n\n // Ignore logs that are below the global level\n // eg, if the global level is 'warn' (1) and the log level is 'debug' (5)\n if (winstonLevels[log.level] > winstonLevels[defaultLogLevel]) {\n return false;\n }\n\n return log;\n })(),\n setOverrides: newOverrides => {\n const newOverridesPredicates = newOverrides.map(o => ({\n predicate: createLogMatcher(o.matchers),\n level: o.level,\n }));\n // Replace the content while preserving the reference to support live config updates\n overrides.splice(0, overrides.length, ...newOverridesPredicates);\n },\n };\n }\n\n private constructor(\n winston: Logger,\n addRedactions?: (redactions: Iterable<string>) => void,\n setLevelOverrides?: (overrides: WinstonLoggerLevelOverride[]) => void,\n ) {\n this.#winston = winston;\n this.#addRedactions = addRedactions;\n this.#setLevelOverrides = setLevelOverrides;\n }\n\n error(message: string, meta?: JsonObject): void {\n this.#winston.error(message, meta);\n }\n\n warn(message: string, meta?: JsonObject): void {\n this.#winston.warn(message, meta);\n }\n\n info(message: string, meta?: JsonObject): void {\n this.#winston.info(message, meta);\n }\n\n debug(message: string, meta?: JsonObject): void {\n this.#winston.debug(message, meta);\n }\n\n child(meta: JsonObject): LoggerService {\n return new WinstonLogger(this.#winston.child(meta));\n }\n\n addRedactions(redactions: Iterable<string>) {\n this.#addRedactions?.(redactions);\n }\n\n setLevelOverrides(overrides: WinstonLoggerLevelOverride[]) {\n this.#setLevelOverrides?.(overrides);\n }\n}\n"],"names":["format","createLogger","transports","MESSAGE","escapeRegExp","winstonLevels","createLogMatcher"],"mappings":";;;;;;;;AAkDO,MAAM,aAAA,CAA2C;AAAA,EACtD,QAAA;AAAA,EACA,cAAA;AAAA,EACA,kBAAA;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,OAAA,EAA8C;AAC1D,IAAA,MAAM,eAAA,GAAkB,OAAA,CAAQ,GAAA,CAAI,SAAA,IAAa,QAAQ,KAAA,IAAS,MAAA;AAElE,IAAA,MAAM,QAAA,GAAW,cAAc,QAAA,EAAS;AACxC,IAAA,MAAM,cAAA,GAAiB,aAAA,CAAc,cAAA,CAAe,eAAe,CAAA;AAEnE,IAAA,MAAM,gBAAA,GACJ,QAAQ,GAAA,CAAI,QAAA,KAAa,eACrBA,cAAA,CAAO,IAAA,EAAK,GACZ,aAAA,CAAc,WAAA,EAAY;AAEhC,IAAA,IAAI,SAASC,oBAAA,CAAa;AAAA;AAAA,MAExB,KAAA,EAAO,OAAA;AAAA,MACP,QAAQD,cAAA,CAAO,OAAA;AAAA,QACb,cAAA,CAAe,MAAA;AAAA,QACf,QAAQ,MAAA,IAAU,gBAAA;AAAA,QAClB,QAAA,CAAS;AAAA,OACX;AAAA,MACA,UAAA,EAAY,OAAA,CAAQ,UAAA,IAAc,IAAIE,mBAAW,OAAA;AAAQ,KAC1D,CAAA;AAED,IAAA,IAAI,QAAQ,IAAA,EAAM;AAChB,MAAA,MAAA,GAAS,MAAA,CAAO,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA;AAAA,IACpC;AAEA,IAAA,OAAO,IAAI,aAAA,CAAc,MAAA,EAAQ,QAAA,CAAS,GAAA,EAAK,eAAe,YAAY,CAAA;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,QAAA,GAGL;AACA,IAAA,MAAM,YAAA,uBAAmB,GAAA,EAAY;AAErC,IAAA,IAAI,gBAAA,GAAuC,MAAA;AAE3C,IAAA,OAAO;AAAA,MACL,MAAA,EAAQF,cAAA,CAAO,CAAC,GAAA,KAA2B;AACzC,QAAA,IAAI,CAAC,gBAAA,IAAoB,CAAC,GAAA,EAAK;AAC7B,UAAA,OAAO,GAAA;AAAA,QACT;AAEA,QAAA,GAAA,CAAIG,kBAAO,CAAA,GAAI,GAAA,CAAIA,kBAAO,CAAA,EAAG,OAAA,GAAU,kBAAkB,KAAK,CAAA;AAE9D,QAAA,OAAO,GAAA;AAAA,MACT,CAAC,CAAA,EAAE;AAAA,MACH,IAAI,aAAA,EAAe;AACjB,QAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,QAAA,KAAA,MAAW,mBAAmB,aAAA,EAAe;AAE3C,UAAA,IAAI,eAAA,KAAoB,IAAA,IAAQ,eAAA,KAAoB,MAAA,EAAW;AAC7D,YAAA;AAAA,UACF;AAIA,UAAA,MAAM,SAAA,GAAY,gBAAgB,IAAA,EAAK;AAIvC,UAAA,IAAI,SAAA,CAAU,UAAU,CAAA,EAAG;AACzB,YAAA;AAAA,UACF;AACA,UAAA,IAAI,CAAC,YAAA,CAAa,GAAA,CAAI,SAAS,CAAA,EAAG;AAChC,YAAA,YAAA,CAAa,IAAI,SAAS,CAAA;AAC1B,YAAA,KAAA,IAAS,CAAA;AAAA,UACX;AAAA,QACF;AACA,QAAA,IAAI,QAAQ,CAAA,EAAG;AACb,UAAA,MAAM,UAAA,GAAa,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA,CACvC,GAAA,CAAI,CAAA,CAAA,KAAKC,yBAAA,CAAa,CAAC,CAAC,CAAA,CACxB,IAAA,CAAK,GAAG,CAAA;AACX,UAAA,gBAAA,GAAmB,IAAI,MAAA,CAAO,CAAA,CAAA,EAAI,UAAU,KAAK,GAAG,CAAA;AAAA,QACtD;AAAA,MACF;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,WAAA,GAAsB;AAC3B,IAAA,MAAM,SAAA,GAAYJ,eAAO,QAAA,EAAS;AAElC,IAAA,OAAOA,cAAA,CAAO,OAAA;AAAA,MACZA,eAAO,SAAA,EAAU;AAAA,MACjBA,eAAO,QAAA,CAAS;AAAA,QACd,MAAA,EAAQ;AAAA,UACN,SAAA,EAAW,KAAA;AAAA,UACX,MAAA,EAAQ,MAAA;AAAA,UACR,KAAA,EAAO,MAAA;AAAA,UACP,KAAA,EAAO;AAAA;AACT,OACD,CAAA;AAAA,MACDA,cAAA,CAAO,MAAA,CAAO,CAAC,IAAA,KAA4B;AACzC,QAAA,MAAM,EAAE,WAAW,KAAA,EAAO,OAAA,EAAS,QAAQ,OAAA,EAAS,GAAG,QAAO,GAAI,IAAA;AAClE,QAAA,MAAM,SAAS,MAAA,IAAU,OAAA;AACzB,QAAA,MAAM,cAAA,GAAiB,SAAA,CAAU,QAAA,CAAS,WAAA,EAAa,SAAS,CAAA;AAChE,QAAA,MAAM,WAAA,GAAc,SAAA,CAAU,QAAA,CAAS,QAAA,EAAU,MAAM,CAAA;AAEvD,QAAA,MAAM,WAAA,GAAc,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,CACtC,IAAI,CAAC,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AACrB,UAAA,IAAI,WAAA,GAAc,EAAA;AAElB,UAAA,IAAI;AACF,YAAA,WAAA,GAAc,IAAA,CAAK,UAAU,KAAK,CAAA;AAAA,UACpC,SAAS,CAAA,EAAG;AACV,YAAA,WAAA,GAAc,sCAAA;AAAA,UAChB;AAEA,UAAA,OAAO,CAAA,EAAG,UAAU,QAAA,CAAS,OAAA,EAAS,GAAG,GAAG,CAAA,CAAE,CAAC,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,QAChE,CAAC,CAAA,CACA,IAAA,CAAK,GAAG,CAAA;AAEX,QAAA,OAAO,CAAA,EAAG,cAAc,CAAA,CAAA,EAAI,WAAW,IAAI,KAAK,CAAA,CAAA,EAAI,OAAO,CAAA,CAAA,EAAI,WAAW,CAAA,CAAA;AAAA,MAC5E,CAAC;AAAA,KACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,eACL,eAAA,EAIA;AACA,IAAA,MAAM,YAGA,EAAC;AAEP,IAAA,OAAO;AAAA,MACL,MAAA,EAAQA,eAAO,CAAA,GAAA,KAAO;AACpB,QAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,UAAA,IAAI,QAAA,CAAS,SAAA,CAAU,GAAG,CAAA,EAAG;AAG3B,YAAA,IAAIK,oBAAc,GAAA,CAAI,KAAK,IAAIA,mBAAA,CAAc,QAAA,CAAS,KAAK,CAAA,EAAG;AAC5D,cAAA,OAAO,KAAA;AAAA,YACT;AAEA,YAAA,OAAO,GAAA;AAAA,UACT;AAAA,QACF;AAIA,QAAA,IAAIA,oBAAc,GAAA,CAAI,KAAK,CAAA,GAAIA,mBAAA,CAAc,eAAe,CAAA,EAAG;AAC7D,UAAA,OAAO,KAAA;AAAA,QACT;AAEA,QAAA,OAAO,GAAA;AAAA,MACT,CAAC,CAAA,EAAE;AAAA,MACH,cAAc,CAAA,YAAA,KAAgB;AAC5B,QAAA,MAAM,sBAAA,GAAyB,YAAA,CAAa,GAAA,CAAI,CAAA,CAAA,MAAM;AAAA,UACpD,SAAA,EAAWC,sBAAA,CAAiB,CAAA,CAAE,QAAQ,CAAA;AAAA,UACtC,OAAO,CAAA,CAAE;AAAA,SACX,CAAE,CAAA;AAEF,QAAA,SAAA,CAAU,MAAA,CAAO,CAAA,EAAG,SAAA,CAAU,MAAA,EAAQ,GAAG,sBAAsB,CAAA;AAAA,MACjE;AAAA,KACF;AAAA,EACF;AAAA,EAEQ,WAAA,CACN,OAAA,EACA,aAAA,EACA,iBAAA,EACA;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,OAAA;AAChB,IAAA,IAAA,CAAK,cAAA,GAAiB,aAAA;AACtB,IAAA,IAAA,CAAK,kBAAA,GAAqB,iBAAA;AAAA,EAC5B;AAAA,EAEA,KAAA,CAAM,SAAiB,IAAA,EAAyB;AAC9C,IAAA,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,OAAA,EAAS,IAAI,CAAA;AAAA,EACnC;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAyB;AAC7C,IAAA,IAAA,CAAK,QAAA,CAAS,IAAA,CAAK,OAAA,EAAS,IAAI,CAAA;AAAA,EAClC;AAAA,EAEA,IAAA,CAAK,SAAiB,IAAA,EAAyB;AAC7C,IAAA,IAAA,CAAK,QAAA,CAAS,IAAA,CAAK,OAAA,EAAS,IAAI,CAAA;AAAA,EAClC;AAAA,EAEA,KAAA,CAAM,SAAiB,IAAA,EAAyB;AAC9C,IAAA,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,OAAA,EAAS,IAAI,CAAA;AAAA,EACnC;AAAA,EAEA,MAAM,IAAA,EAAiC;AACrC,IAAA,OAAO,IAAI,aAAA,CAAc,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,IAAI,CAAC,CAAA;AAAA,EACpD;AAAA,EAEA,cAAc,UAAA,EAA8B;AAC1C,IAAA,IAAA,CAAK,iBAAiB,UAAU,CAAA;AAAA,EAClC;AAAA,EAEA,kBAAkB,SAAA,EAAyC;AACzD,IAAA,IAAA,CAAK,qBAAqB,SAAS,CAAA;AAAA,EACrC;AACF;;;;"}
@@ -0,0 +1,32 @@
1
+ 'use strict';
2
+
3
+ var types = require('./types.cjs.js');
4
+
5
+ const getRootLoggerConfig = (config) => {
6
+ const level = config.getOptionalString("backend.logger.level");
7
+ const meta = config.getOptionalConfig("backend.logger.meta")?.get();
8
+ const overridesConfig = config.getOptionalConfigArray(
9
+ "backend.logger.overrides"
10
+ );
11
+ const overrides = overridesConfig?.map((override, i) => {
12
+ const overrideLevel = override.getString("level");
13
+ if (types.winstonLevels[overrideLevel] === void 0) {
14
+ throw new Error(
15
+ `Invalid config at backend.logger.overrides[${i}].level, '${overrideLevel}' is not a valid logging level, must be one of 'error', 'warn', 'info' or 'debug'.`
16
+ );
17
+ }
18
+ const matchers = override.getConfig("matchers").get();
19
+ return {
20
+ matchers,
21
+ level: overrideLevel
22
+ };
23
+ });
24
+ return {
25
+ meta,
26
+ level,
27
+ overrides
28
+ };
29
+ };
30
+
31
+ exports.getRootLoggerConfig = getRootLoggerConfig;
32
+ //# sourceMappingURL=config.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.cjs.js","sources":["../../../src/entrypoints/rootLogger/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { RootConfigService } from '@backstage/backend-plugin-api';\nimport { JsonObject } from '@backstage/types';\nimport {\n RootLoggerConfig,\n winstonLevels,\n WinstonLoggerLevelOverrideMatchers,\n} from './types';\n\nexport const getRootLoggerConfig = (\n config: RootConfigService,\n): RootLoggerConfig => {\n const level = config.getOptionalString('backend.logger.level');\n const meta = config\n .getOptionalConfig('backend.logger.meta')\n ?.get<JsonObject>();\n\n const overridesConfig = config.getOptionalConfigArray(\n 'backend.logger.overrides',\n );\n const overrides = overridesConfig?.map((override, i) => {\n const overrideLevel = override.getString('level');\n if (winstonLevels[overrideLevel] === undefined) {\n throw new Error(\n `Invalid config at backend.logger.overrides[${i}].level, '${overrideLevel}' is not a valid logging level, must be one of 'error', 'warn', 'info' or 'debug'.`,\n );\n }\n\n const matchers = override\n .getConfig('matchers')\n .get<WinstonLoggerLevelOverrideMatchers>();\n\n return {\n matchers,\n level: overrideLevel,\n };\n });\n\n return {\n meta,\n level,\n overrides,\n };\n};\n"],"names":["winstonLevels"],"mappings":";;;;AAuBO,MAAM,mBAAA,GAAsB,CACjC,MAAA,KACqB;AACrB,EAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,iBAAA,CAAkB,sBAAsB,CAAA;AAC7D,EAAA,MAAM,IAAA,GAAO,MAAA,CACV,iBAAA,CAAkB,qBAAqB,GACtC,GAAA,EAAgB;AAEpB,EAAA,MAAM,kBAAkB,MAAA,CAAO,sBAAA;AAAA,IAC7B;AAAA,GACF;AACA,EAAA,MAAM,SAAA,GAAY,eAAA,EAAiB,GAAA,CAAI,CAAC,UAAU,CAAA,KAAM;AACtD,IAAA,MAAM,aAAA,GAAgB,QAAA,CAAS,SAAA,CAAU,OAAO,CAAA;AAChD,IAAA,IAAIA,mBAAA,CAAc,aAAa,CAAA,KAAM,MAAA,EAAW;AAC9C,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,CAAC,CAAA,UAAA,EAAa,aAAa,CAAA,kFAAA;AAAA,OAC3E;AAAA,IACF;AAEA,IAAA,MAAM,QAAA,GAAW,QAAA,CACd,SAAA,CAAU,UAAU,EACpB,GAAA,EAAwC;AAE3C,IAAA,OAAO;AAAA,MACL,QAAA;AAAA,MACA,KAAA,EAAO;AAAA,KACT;AAAA,EACF,CAAC,CAAA;AAED,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,KAAA;AAAA,IACA;AAAA,GACF;AACF;;;;"}
@@ -2,26 +2,33 @@
2
2
 
3
3
  var backendPluginApi = require('@backstage/backend-plugin-api');
4
4
  var winston = require('winston');
5
- var WinstonLogger = require('./WinstonLogger.cjs.js');
6
5
  var createConfigSecretEnumerator = require('../rootConfig/createConfigSecretEnumerator.cjs.js');
6
+ var WinstonLogger = require('./WinstonLogger.cjs.js');
7
+ var config = require('./config.cjs.js');
7
8
 
8
9
  const rootLoggerServiceFactory = backendPluginApi.createServiceFactory({
9
10
  service: backendPluginApi.coreServices.rootLogger,
10
11
  deps: {
11
12
  config: backendPluginApi.coreServices.rootConfig
12
13
  },
13
- async factory({ config }) {
14
+ async factory({ config: config$1 }) {
15
+ const rootLoggerConfig = config.getRootLoggerConfig(config$1);
14
16
  const logger = WinstonLogger.WinstonLogger.create({
15
17
  meta: {
16
- service: "backstage"
18
+ service: "backstage",
19
+ ...rootLoggerConfig.meta
17
20
  },
18
- level: process.env.LOG_LEVEL || "info",
21
+ level: process.env.LOG_LEVEL || rootLoggerConfig.level || "info",
19
22
  format: process.env.NODE_ENV === "production" ? winston.format.json() : WinstonLogger.WinstonLogger.colorFormat(),
20
23
  transports: [new winston.transports.Console()]
21
24
  });
22
25
  const secretEnumerator = await createConfigSecretEnumerator.createConfigSecretEnumerator({ logger });
23
- logger.addRedactions(secretEnumerator(config));
24
- config.subscribe?.(() => logger.addRedactions(secretEnumerator(config)));
26
+ logger.addRedactions(secretEnumerator(config$1));
27
+ config$1.subscribe?.(() => logger.addRedactions(secretEnumerator(config$1)));
28
+ logger.setLevelOverrides(rootLoggerConfig.overrides ?? []);
29
+ config$1.subscribe?.(
30
+ () => logger.setLevelOverrides(config.getRootLoggerConfig(config$1).overrides ?? [])
31
+ );
25
32
  return logger;
26
33
  }
27
34
  });
@@ -1 +1 @@
1
- {"version":3,"file":"rootLoggerServiceFactory.cjs.js","sources":["../../../src/entrypoints/rootLogger/rootLoggerServiceFactory.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n createServiceFactory,\n coreServices,\n} from '@backstage/backend-plugin-api';\nimport { transports, format } from 'winston';\nimport { WinstonLogger } from '../rootLogger/WinstonLogger';\nimport { createConfigSecretEnumerator } from '../rootConfig/createConfigSecretEnumerator';\n\n/**\n * Root-level logging.\n *\n * See {@link @backstage/backend-plugin-api#RootLoggerService}\n * and {@link https://backstage.io/docs/backend-system/core-services/root-logger | the service docs}\n * for more information.\n *\n * @public\n */\nexport const rootLoggerServiceFactory = createServiceFactory({\n service: coreServices.rootLogger,\n deps: {\n config: coreServices.rootConfig,\n },\n async factory({ config }) {\n const logger = WinstonLogger.create({\n meta: {\n service: 'backstage',\n },\n level: process.env.LOG_LEVEL || 'info',\n format:\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat(),\n transports: [new transports.Console()],\n });\n\n const secretEnumerator = await createConfigSecretEnumerator({ logger });\n logger.addRedactions(secretEnumerator(config));\n config.subscribe?.(() => logger.addRedactions(secretEnumerator(config)));\n\n return logger;\n },\n});\n"],"names":["createServiceFactory","coreServices","WinstonLogger","format","transports","createConfigSecretEnumerator"],"mappings":";;;;;;;AAiCO,MAAM,2BAA2BA,qCAAA,CAAqB;AAAA,EAC3D,SAASC,6BAAA,CAAa,UAAA;AAAA,EACtB,IAAA,EAAM;AAAA,IACJ,QAAQA,6BAAA,CAAa;AAAA,GACvB;AAAA,EACA,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,IAAA,MAAM,MAAA,GAASC,4BAAc,MAAA,CAAO;AAAA,MAClC,IAAA,EAAM;AAAA,QACJ,OAAA,EAAS;AAAA,OACX;AAAA,MACA,KAAA,EAAO,OAAA,CAAQ,GAAA,CAAI,SAAA,IAAa,MAAA;AAAA,MAChC,MAAA,EACE,QAAQ,GAAA,CAAI,QAAA,KAAa,eACrBC,cAAA,CAAO,IAAA,EAAK,GACZD,2BAAA,CAAc,WAAA,EAAY;AAAA,MAChC,UAAA,EAAY,CAAC,IAAIE,kBAAA,CAAW,SAAS;AAAA,KACtC,CAAA;AAED,IAAA,MAAM,gBAAA,GAAmB,MAAMC,yDAAA,CAA6B,EAAE,QAAQ,CAAA;AACtE,IAAA,MAAA,CAAO,aAAA,CAAc,gBAAA,CAAiB,MAAM,CAAC,CAAA;AAC7C,IAAA,MAAA,CAAO,YAAY,MAAM,MAAA,CAAO,cAAc,gBAAA,CAAiB,MAAM,CAAC,CAAC,CAAA;AAEvE,IAAA,OAAO,MAAA;AAAA,EACT;AACF,CAAC;;;;"}
1
+ {"version":3,"file":"rootLoggerServiceFactory.cjs.js","sources":["../../../src/entrypoints/rootLogger/rootLoggerServiceFactory.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n coreServices,\n createServiceFactory,\n} from '@backstage/backend-plugin-api';\nimport { format, transports } from 'winston';\nimport { createConfigSecretEnumerator } from '../rootConfig/createConfigSecretEnumerator';\nimport { WinstonLogger } from '../rootLogger/WinstonLogger';\nimport { getRootLoggerConfig } from './config';\n\n/**\n * Root-level logging.\n *\n * See {@link @backstage/backend-plugin-api#RootLoggerService}\n * and {@link https://backstage.io/docs/backend-system/core-services/root-logger | the service docs}\n * for more information.\n *\n * @public\n */\nexport const rootLoggerServiceFactory = createServiceFactory({\n service: coreServices.rootLogger,\n deps: {\n config: coreServices.rootConfig,\n },\n async factory({ config }) {\n const rootLoggerConfig = getRootLoggerConfig(config);\n\n const logger = WinstonLogger.create({\n meta: {\n service: 'backstage',\n ...rootLoggerConfig.meta,\n },\n level: process.env.LOG_LEVEL || rootLoggerConfig.level || 'info',\n format:\n process.env.NODE_ENV === 'production'\n ? format.json()\n : WinstonLogger.colorFormat(),\n transports: [new transports.Console()],\n });\n\n const secretEnumerator = await createConfigSecretEnumerator({ logger });\n logger.addRedactions(secretEnumerator(config));\n config.subscribe?.(() => logger.addRedactions(secretEnumerator(config)));\n\n logger.setLevelOverrides(rootLoggerConfig.overrides ?? []);\n config.subscribe?.(() =>\n logger.setLevelOverrides(getRootLoggerConfig(config).overrides ?? []),\n );\n\n return logger;\n },\n});\n"],"names":["createServiceFactory","coreServices","config","getRootLoggerConfig","WinstonLogger","format","transports","createConfigSecretEnumerator"],"mappings":";;;;;;;;AAkCO,MAAM,2BAA2BA,qCAAA,CAAqB;AAAA,EAC3D,SAASC,6BAAA,CAAa,UAAA;AAAA,EACtB,IAAA,EAAM;AAAA,IACJ,QAAQA,6BAAA,CAAa;AAAA,GACvB;AAAA,EACA,MAAM,OAAA,CAAQ,UAAEC,QAAA,EAAO,EAAG;AACxB,IAAA,MAAM,gBAAA,GAAmBC,2BAAoBD,QAAM,CAAA;AAEnD,IAAA,MAAM,MAAA,GAASE,4BAAc,MAAA,CAAO;AAAA,MAClC,IAAA,EAAM;AAAA,QACJ,OAAA,EAAS,WAAA;AAAA,QACT,GAAG,gBAAA,CAAiB;AAAA,OACtB;AAAA,MACA,KAAA,EAAO,OAAA,CAAQ,GAAA,CAAI,SAAA,IAAa,iBAAiB,KAAA,IAAS,MAAA;AAAA,MAC1D,MAAA,EACE,QAAQ,GAAA,CAAI,QAAA,KAAa,eACrBC,cAAA,CAAO,IAAA,EAAK,GACZD,2BAAA,CAAc,WAAA,EAAY;AAAA,MAChC,UAAA,EAAY,CAAC,IAAIE,kBAAA,CAAW,SAAS;AAAA,KACtC,CAAA;AAED,IAAA,MAAM,gBAAA,GAAmB,MAAMC,yDAAA,CAA6B,EAAE,QAAQ,CAAA;AACtE,IAAA,MAAA,CAAO,aAAA,CAAc,gBAAA,CAAiBL,QAAM,CAAC,CAAA;AAC7C,IAAAA,QAAA,CAAO,YAAY,MAAM,MAAA,CAAO,cAAc,gBAAA,CAAiBA,QAAM,CAAC,CAAC,CAAA;AAEvE,IAAA,MAAA,CAAO,iBAAA,CAAkB,gBAAA,CAAiB,SAAA,IAAa,EAAE,CAAA;AACzD,IAAAA,QAAA,CAAO,SAAA;AAAA,MAAY,MACjB,OAAO,iBAAA,CAAkBC,0BAAA,CAAoBD,QAAM,CAAA,CAAE,SAAA,IAAa,EAAE;AAAA,KACtE;AAEA,IAAA,OAAO,MAAA;AAAA,EACT;AACF,CAAC;;;;"}
@@ -0,0 +1,8 @@
1
+ 'use strict';
2
+
3
+ var winston = require('winston');
4
+
5
+ const winstonLevels = winston.config.npm.levels;
6
+
7
+ exports.winstonLevels = winstonLevels;
8
+ //# sourceMappingURL=types.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.cjs.js","sources":["../../../src/entrypoints/rootLogger/types.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { JsonObject, JsonPrimitive } from '@backstage/types';\nimport { config as winstonConfig } from 'winston';\n\n/**\n * @public\n */\nexport type WinstonLoggerLevelOverrideMatchers = {\n [key: string]: JsonPrimitive | JsonPrimitive[] | undefined;\n};\n\n/**\n * @public\n */\nexport type WinstonLoggerLevelOverride = {\n matchers: WinstonLoggerLevelOverrideMatchers;\n level: string;\n};\n\nexport type RootLoggerConfig = {\n level?: string;\n meta?: JsonObject;\n overrides?: WinstonLoggerLevelOverride[];\n};\n\nexport const winstonLevels = winstonConfig.npm.levels;\n"],"names":["winstonConfig"],"mappings":";;;;AAuCO,MAAM,aAAA,GAAgBA,eAAc,GAAA,CAAI;;;;"}
@@ -0,0 +1,38 @@
1
+ 'use strict';
2
+
3
+ const parseRegex = (s) => {
4
+ if (!s.startsWith("/")) return null;
5
+ const lastSlash = s.lastIndexOf("/");
6
+ if (lastSlash <= 0) return null;
7
+ const pattern = s.slice(1, lastSlash);
8
+ const flags = s.slice(lastSlash + 1);
9
+ try {
10
+ return new RegExp(pattern, flags);
11
+ } catch {
12
+ return null;
13
+ }
14
+ };
15
+ const createLogFieldMatcher = (matcher) => {
16
+ if (Array.isArray(matcher)) {
17
+ const fns = matcher.map((m) => createLogFieldMatcher(m));
18
+ return (logField) => fns.some((fn) => fn(logField));
19
+ }
20
+ if (typeof matcher !== "string") {
21
+ return (logField) => logField === matcher;
22
+ }
23
+ const regex = parseRegex(matcher);
24
+ if (regex) {
25
+ return (logField) => typeof logField === "string" && regex.test(logField);
26
+ }
27
+ return (logField) => logField === matcher;
28
+ };
29
+ const createLogMatcher = (matchers) => {
30
+ const logFieldMatchers = Object.entries(matchers).map(([key, m]) => {
31
+ const fn = createLogFieldMatcher(m);
32
+ return [key, fn];
33
+ });
34
+ return (log) => logFieldMatchers.every(([key, fn]) => fn(log[key]));
35
+ };
36
+
37
+ exports.createLogMatcher = createLogMatcher;
38
+ //# sourceMappingURL=utils.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.cjs.js","sources":["../../../src/entrypoints/rootLogger/utils.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TransformableInfo } from 'logform';\nimport { WinstonLoggerLevelOverrideMatchers } from './types';\n\n/** Parse a slash-delimited regex like `/pattern/flags` into a RegExp, or null if not a regex-string */\nconst parseRegex = (s: string): RegExp | null => {\n if (!s.startsWith('/')) return null;\n const lastSlash = s.lastIndexOf('/');\n if (lastSlash <= 0) return null;\n\n const pattern = s.slice(1, lastSlash);\n const flags = s.slice(lastSlash + 1);\n\n try {\n return new RegExp(pattern, flags);\n } catch {\n return null; // fall back to treating it as a plain string\n }\n};\n\n/**\n * Create a predicate function that determines whether a log field matches a given matcher.\n *\n * The matcher can be:\n * - A string (exact match or regex pattern delimited by slashes, e.g. `/pattern/`)\n * - A non-string value (compared by strict equality)\n * - An array of matchers (returns true if any matcher matches)\n *\n * @param matcher - The matcher or array of matchers to compare against the log field.\n * @returns A function that takes a log field and returns `true` if it matches the matcher, otherwise `false`.\n */\nconst createLogFieldMatcher = (\n matcher: WinstonLoggerLevelOverrideMatchers[0],\n): ((logField: unknown) => boolean) => {\n // Array of matchers: create predicates for each element and OR them together\n if (Array.isArray(matcher)) {\n const fns = matcher.map(m => createLogFieldMatcher(m));\n return (logField: unknown) => fns.some(fn => fn(logField));\n }\n\n // Non-string matcher: strict equality\n if (typeof matcher !== 'string') {\n return (logField: unknown) => logField === matcher;\n }\n\n // String matcher: maybe a slash-delimited regex (/pattern/flags)\n const regex = parseRegex(matcher);\n if (regex) {\n return (logField: unknown) =>\n typeof logField === 'string' && regex.test(logField);\n }\n\n // Plain string matcher: strict equality\n return (logField: unknown) => logField === matcher;\n};\n\n/**\n * Create a predicate function that determines whether a log entry matches\n * all specified override matchers.\n *\n * Iterates over each key-matcher pair in the provided `matchers` object,\n * retrieves the corresponding field from the `log` object, and checks if\n * the field matches the matcher using `isLogFieldMatching`. Returns `true`\n * only if all matchers are satisfied.\n *\n * @param matchers - An object where each key corresponds to a log field and each value is a matcher to test against that field.\n * @returns A function that takes a log entry and returns `true` if it matches all specified matchers, otherwise `false`.\n */\nexport const createLogMatcher = (\n matchers: WinstonLoggerLevelOverrideMatchers,\n): ((log: TransformableInfo) => boolean) => {\n const logFieldMatchers = Object.entries(matchers).map(([key, m]) => {\n const fn = createLogFieldMatcher(m);\n return [key, fn] as const;\n });\n\n return (log: TransformableInfo) =>\n logFieldMatchers.every(([key, fn]) => fn(log[key]));\n};\n"],"names":[],"mappings":";;AAoBA,MAAM,UAAA,GAAa,CAAC,CAAA,KAA6B;AAC/C,EAAA,IAAI,CAAC,CAAA,CAAE,UAAA,CAAW,GAAG,GAAG,OAAO,IAAA;AAC/B,EAAA,MAAM,SAAA,GAAY,CAAA,CAAE,WAAA,CAAY,GAAG,CAAA;AACnC,EAAA,IAAI,SAAA,IAAa,GAAG,OAAO,IAAA;AAE3B,EAAA,MAAM,OAAA,GAAU,CAAA,CAAE,KAAA,CAAM,CAAA,EAAG,SAAS,CAAA;AACpC,EAAA,MAAM,KAAA,GAAQ,CAAA,CAAE,KAAA,CAAM,SAAA,GAAY,CAAC,CAAA;AAEnC,EAAA,IAAI;AACF,IAAA,OAAO,IAAI,MAAA,CAAO,OAAA,EAAS,KAAK,CAAA;AAAA,EAClC,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,IAAA;AAAA,EACT;AACF,CAAA;AAaA,MAAM,qBAAA,GAAwB,CAC5B,OAAA,KACqC;AAErC,EAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,OAAO,CAAA,EAAG;AAC1B,IAAA,MAAM,MAAM,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK,qBAAA,CAAsB,CAAC,CAAC,CAAA;AACrD,IAAA,OAAO,CAAC,QAAA,KAAsB,GAAA,CAAI,KAAK,CAAA,EAAA,KAAM,EAAA,CAAG,QAAQ,CAAC,CAAA;AAAA,EAC3D;AAGA,EAAA,IAAI,OAAO,YAAY,QAAA,EAAU;AAC/B,IAAA,OAAO,CAAC,aAAsB,QAAA,KAAa,OAAA;AAAA,EAC7C;AAGA,EAAA,MAAM,KAAA,GAAQ,WAAW,OAAO,CAAA;AAChC,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,OAAO,CAAC,QAAA,KACN,OAAO,aAAa,QAAA,IAAY,KAAA,CAAM,KAAK,QAAQ,CAAA;AAAA,EACvD;AAGA,EAAA,OAAO,CAAC,aAAsB,QAAA,KAAa,OAAA;AAC7C,CAAA;AAcO,MAAM,gBAAA,GAAmB,CAC9B,QAAA,KAC0C;AAC1C,EAAA,MAAM,gBAAA,GAAmB,MAAA,CAAO,OAAA,CAAQ,QAAQ,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,CAAC,CAAA,KAAM;AAClE,IAAA,MAAM,EAAA,GAAK,sBAAsB,CAAC,CAAA;AAClC,IAAA,OAAO,CAAC,KAAK,EAAE,CAAA;AAAA,EACjB,CAAC,CAAA;AAED,EAAA,OAAO,CAAC,GAAA,KACN,gBAAA,CAAiB,KAAA,CAAM,CAAC,CAAC,GAAA,EAAK,EAAE,CAAA,KAAM,EAAA,CAAG,GAAA,CAAI,GAAG,CAAC,CAAC,CAAA;AACtD;;;;"}
@@ -8,6 +8,8 @@ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'defau
8
8
 
9
9
  var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
10
10
 
11
+ const REDIRECT_STATUS_CODES = [301, 302, 307, 308];
12
+ const MAX_REDIRECTS = 5;
11
13
  const isInRange = (num, [start, end]) => {
12
14
  return num >= start && num <= end;
13
15
  };
@@ -36,6 +38,26 @@ const parsePortPredicate = (port) => {
36
38
  }
37
39
  return (url) => !url.port;
38
40
  };
41
+ function predicateFromConfig(config) {
42
+ const allow = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
43
+ const paths = allowConfig.getOptionalStringArray("paths");
44
+ const checkPath = paths ? (url) => {
45
+ const targetPath = platformPath__default.default.posix.normalize(url.pathname);
46
+ return paths.some(
47
+ (allowedPath) => targetPath.startsWith(allowedPath)
48
+ );
49
+ } : (_url) => true;
50
+ const host = allowConfig.getString("host");
51
+ const [hostname, port] = host.split(":");
52
+ const checkPort = parsePortPredicate(port);
53
+ if (hostname.startsWith("*.")) {
54
+ const suffix = hostname.slice(1);
55
+ return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
56
+ }
57
+ return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
58
+ });
59
+ return allow?.length ? (url) => allow.some((p) => p(url)) : () => false;
60
+ }
39
61
  class FetchUrlReader {
40
62
  /**
41
63
  * The factory creates a single reader that will be used for reading any URL that's listed
@@ -50,64 +72,72 @@ class FetchUrlReader {
50
72
  * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
51
73
  */
52
74
  static factory = ({ config }) => {
53
- const predicates = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
54
- const paths = allowConfig.getOptionalStringArray("paths");
55
- const checkPath = paths ? (url) => {
56
- const targetPath = platformPath__default.default.posix.normalize(url.pathname);
57
- return paths.some(
58
- (allowedPath) => targetPath.startsWith(allowedPath)
59
- );
60
- } : (_url) => true;
61
- const host = allowConfig.getString("host");
62
- const [hostname, port] = host.split(":");
63
- const checkPort = parsePortPredicate(port);
64
- if (hostname.startsWith("*.")) {
65
- const suffix = hostname.slice(1);
66
- return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
67
- }
68
- return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
69
- }) ?? [];
70
- const reader = new FetchUrlReader();
71
- const predicate = (url) => predicates.some((p) => p(url));
75
+ const predicate = predicateFromConfig(config);
76
+ const reader = new FetchUrlReader({ predicate });
72
77
  return [{ reader, predicate }];
73
78
  };
79
+ static fromConfig(config) {
80
+ return new FetchUrlReader({ predicate: predicateFromConfig(config) });
81
+ }
82
+ #predicate;
83
+ constructor(options) {
84
+ this.#predicate = options.predicate;
85
+ }
74
86
  async read(url) {
75
87
  const response = await this.readUrl(url);
76
88
  return response.buffer();
77
89
  }
78
90
  async readUrl(url, options) {
79
- let response;
80
- try {
81
- response = await fetch(url, {
82
- headers: {
83
- ...options?.etag && { "If-None-Match": options.etag },
84
- ...options?.lastModifiedAfter && {
85
- "If-Modified-Since": options.lastModifiedAfter.toUTCString()
91
+ let currentUrl = url;
92
+ for (let redirectCount = 0; redirectCount < MAX_REDIRECTS; redirectCount += 1) {
93
+ const parsedUrl = new URL(currentUrl);
94
+ if (!this.#predicate(parsedUrl)) {
95
+ throw new Error(
96
+ `URL not allowed by backend.reading.allow configuration: ${currentUrl}`
97
+ );
98
+ }
99
+ let response;
100
+ try {
101
+ response = await fetch(currentUrl, {
102
+ headers: {
103
+ ...options?.etag && { "If-None-Match": options.etag },
104
+ ...options?.lastModifiedAfter && {
105
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
106
+ },
107
+ ...options?.token && { Authorization: `Bearer ${options.token}` }
86
108
  },
87
- ...options?.token && { Authorization: `Bearer ${options.token}` }
88
- },
89
- // TODO(freben): The signal cast is there because pre-3.x versions of
90
- // node-fetch have a very slightly deviating AbortSignal type signature.
91
- // The difference does not affect us in practice however. The cast can
92
- // be removed after we support ESM for CLI dependencies and migrate to
93
- // version 3 of node-fetch.
94
- // https://github.com/backstage/backstage/issues/8242
95
- signal: options?.signal
96
- });
97
- } catch (e) {
98
- throw new Error(`Unable to read ${url}, ${e}`);
99
- }
100
- if (response.status === 304) {
101
- throw new errors.NotModifiedError();
102
- }
103
- if (response.ok) {
104
- return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
105
- }
106
- const message = `could not read ${url}, ${response.status} ${response.statusText}`;
107
- if (response.status === 404) {
108
- throw new errors.NotFoundError(message);
109
+ // Handle redirects manually to validate targets against the allowlist
110
+ redirect: "manual",
111
+ // TODO(freben): The signal cast is there because pre-3.x versions of
112
+ // node-fetch have a very slightly deviating AbortSignal type signature.
113
+ // The difference does not affect us in practice however. The cast can
114
+ // be removed after we support ESM for CLI dependencies and migrate to
115
+ // version 3 of node-fetch.
116
+ // https://github.com/backstage/backstage/issues/8242
117
+ signal: options?.signal
118
+ });
119
+ } catch (e) {
120
+ throw new Error(`Unable to read ${currentUrl}, ${e}`);
121
+ }
122
+ if (response.ok) {
123
+ return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
124
+ }
125
+ if (response.status === 304) {
126
+ throw new errors.NotModifiedError();
127
+ }
128
+ const location = response.headers.get("location");
129
+ if (!REDIRECT_STATUS_CODES.includes(response.status) || !location) {
130
+ const message = `could not read ${currentUrl}, ${response.status} ${response.statusText}`;
131
+ if (response.status === 404) {
132
+ throw new errors.NotFoundError(message);
133
+ }
134
+ throw new Error(message);
135
+ }
136
+ currentUrl = new URL(location, currentUrl).toString();
109
137
  }
110
- throw new Error(message);
138
+ throw new Error(
139
+ `Too many redirects (max ${MAX_REDIRECTS}) when reading ${url}`
140
+ );
111
141
  }
112
142
  async readTree() {
113
143
  throw new Error("FetchUrlReader does not implement readTree");
@@ -1 +1 @@
1
- {"version":3,"file":"FetchUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/FetchUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n assertError,\n NotFoundError,\n NotModifiedError,\n} from '@backstage/errors';\nimport { ReaderFactory } from './types';\nimport path from 'path';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\n\nconst isInRange = (num: number, [start, end]: [number, number]) => {\n return num >= start && num <= end;\n};\n\nconst parsePortRange = (port: string): [number, number] => {\n const isRange = port.includes('-');\n if (isRange) {\n const range = port\n .split('-')\n .map(v => parseInt(v, 10))\n .filter(Boolean) as [number, number];\n if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`);\n const [start, end] = range;\n if (start <= 0 || end <= 0 || start > end)\n throw new Error(`Port range is not valid: [${start}, ${end}]`);\n return range;\n }\n const parsedPort = parseInt(port, 10);\n return [parsedPort, parsedPort];\n};\n\nconst parsePortPredicate = (port: string | undefined) => {\n if (port) {\n const range = parsePortRange(port);\n return (url: URL) => {\n if (url.port) return isInRange(parseInt(url.port, 10), range);\n\n if (url.protocol === 'http:') return isInRange(80, range);\n if (url.protocol === 'https:') return isInRange(443, range);\n return false;\n };\n }\n return (url: URL) => !url.port;\n};\n\n/**\n * A {@link @backstage/backend-plugin-api#UrlReaderService} that does a plain fetch of the URL.\n *\n * @public\n */\nexport class FetchUrlReader implements UrlReaderService {\n /**\n * The factory creates a single reader that will be used for reading any URL that's listed\n * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing\n * targets to allow, containing the following fields:\n *\n * `host`:\n * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.\n * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.\n *\n * `paths`:\n * An optional list of paths which are allowed. If the list is omitted all paths are allowed.\n */\n static factory: ReaderFactory = ({ config }) => {\n const predicates =\n config\n .getOptionalConfigArray('backend.reading.allow')\n ?.map(allowConfig => {\n const paths = allowConfig.getOptionalStringArray('paths');\n const checkPath = paths\n ? (url: URL) => {\n const targetPath = path.posix.normalize(url.pathname);\n return paths.some(allowedPath =>\n targetPath.startsWith(allowedPath),\n );\n }\n : (_url: URL) => true;\n const host = allowConfig.getString('host');\n const [hostname, port] = host.split(':');\n\n const checkPort = parsePortPredicate(port);\n\n if (hostname.startsWith('*.')) {\n const suffix = hostname.slice(1);\n return (url: URL) =>\n url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);\n }\n return (url: URL) =>\n url.hostname === hostname && checkPath(url) && checkPort(url);\n }) ?? [];\n\n const reader = new FetchUrlReader();\n const predicate = (url: URL) => predicates.some(p => p(url));\n return [{ reader, predicate }];\n };\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n let response: Response;\n try {\n response = await fetch(url, {\n headers: {\n ...(options?.etag && { 'If-None-Match': options.etag }),\n ...(options?.lastModifiedAfter && {\n 'If-Modified-Since': options.lastModifiedAfter.toUTCString(),\n }),\n ...(options?.token && { Authorization: `Bearer ${options.token}` }),\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n signal: options?.signal as any,\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n const message = `could not read ${url}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(): Promise<UrlReaderServiceReadTreeResponse> {\n throw new Error('FetchUrlReader does not implement readTree');\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { pathname } = new URL(url);\n\n if (pathname.match(/[*?]/)) {\n throw new Error('Unsupported search pattern URL');\n }\n\n try {\n const data = await this.readUrl(url, options);\n\n return {\n files: [\n {\n url: url,\n content: data.buffer,\n lastModifiedAt: data.lastModifiedAt,\n },\n ],\n etag: data.etag ?? '',\n };\n } catch (error) {\n assertError(error);\n if (error.name === 'NotFoundError') {\n return {\n files: [],\n etag: '',\n };\n }\n throw error;\n }\n }\n\n toString() {\n return 'fetch{}';\n }\n}\n"],"names":["path","NotModifiedError","ReadUrlResponseFactory","NotFoundError","assertError"],"mappings":";;;;;;;;;;AAiCA,MAAM,YAAY,CAAC,GAAA,EAAa,CAAC,KAAA,EAAO,GAAG,CAAA,KAAwB;AACjE,EAAA,OAAO,GAAA,IAAO,SAAS,GAAA,IAAO,GAAA;AAChC,CAAA;AAEA,MAAM,cAAA,GAAiB,CAAC,IAAA,KAAmC;AACzD,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA;AACjC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,KAAA,GAAQ,IAAA,CACX,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAA,CAAA,KAAK,QAAA,CAAS,CAAA,EAAG,EAAE,CAAC,CAAA,CACxB,OAAO,OAAO,CAAA;AACjB,IAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG,MAAM,IAAI,KAAA,CAAM,CAAA,yBAAA,EAA4B,IAAI,CAAA,CAAE,CAAA;AAC1E,IAAA,MAAM,CAAC,KAAA,EAAO,GAAG,CAAA,GAAI,KAAA;AACrB,IAAA,IAAI,KAAA,IAAS,CAAA,IAAK,GAAA,IAAO,CAAA,IAAK,KAAA,GAAQ,GAAA;AACpC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,KAAK,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,CAAG,CAAA;AAC/D,IAAA,OAAO,KAAA;AAAA,EACT;AACA,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,IAAA,EAAM,EAAE,CAAA;AACpC,EAAA,OAAO,CAAC,YAAY,UAAU,CAAA;AAChC,CAAA;AAEA,MAAM,kBAAA,GAAqB,CAAC,IAAA,KAA6B;AACvD,EAAA,IAAI,IAAA,EAAM;AACR,IAAA,MAAM,KAAA,GAAQ,eAAe,IAAI,CAAA;AACjC,IAAA,OAAO,CAAC,GAAA,KAAa;AACnB,MAAA,IAAI,GAAA,CAAI,MAAM,OAAO,SAAA,CAAU,SAAS,GAAA,CAAI,IAAA,EAAM,EAAE,CAAA,EAAG,KAAK,CAAA;AAE5D,MAAA,IAAI,IAAI,QAAA,KAAa,OAAA,EAAS,OAAO,SAAA,CAAU,IAAI,KAAK,CAAA;AACxD,MAAA,IAAI,IAAI,QAAA,KAAa,QAAA,EAAU,OAAO,SAAA,CAAU,KAAK,KAAK,CAAA;AAC1D,MAAA,OAAO,KAAA;AAAA,IACT,CAAA;AAAA,EACF;AACA,EAAA,OAAO,CAAC,GAAA,KAAa,CAAC,GAAA,CAAI,IAAA;AAC5B,CAAA;AAOO,MAAM,cAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAatD,OAAO,OAAA,GAAyB,CAAC,EAAE,QAAO,KAAM;AAC9C,IAAA,MAAM,aACJ,MAAA,CACG,sBAAA,CAAuB,uBAAuB,CAAA,EAC7C,IAAI,CAAA,WAAA,KAAe;AACnB,MAAA,MAAM,KAAA,GAAQ,WAAA,CAAY,sBAAA,CAAuB,OAAO,CAAA;AACxD,MAAA,MAAM,SAAA,GAAY,KAAA,GACd,CAAC,GAAA,KAAa;AACZ,QAAA,MAAM,UAAA,GAAaA,6BAAA,CAAK,KAAA,CAAM,SAAA,CAAU,IAAI,QAAQ,CAAA;AACpD,QAAA,OAAO,KAAA,CAAM,IAAA;AAAA,UAAK,CAAA,WAAA,KAChB,UAAA,CAAW,UAAA,CAAW,WAAW;AAAA,SACnC;AAAA,MACF,CAAA,GACA,CAAC,IAAA,KAAc,IAAA;AACnB,MAAA,MAAM,IAAA,GAAO,WAAA,CAAY,SAAA,CAAU,MAAM,CAAA;AACzC,MAAA,MAAM,CAAC,QAAA,EAAU,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAG,CAAA;AAEvC,MAAA,MAAM,SAAA,GAAY,mBAAmB,IAAI,CAAA;AAEzC,MAAA,IAAI,QAAA,CAAS,UAAA,CAAW,IAAI,CAAA,EAAG;AAC7B,QAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA;AAC/B,QAAA,OAAO,CAAC,GAAA,KACN,GAAA,CAAI,QAAA,CAAS,QAAA,CAAS,MAAM,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,MACpE;AACA,MAAA,OAAO,CAAC,QACN,GAAA,CAAI,QAAA,KAAa,YAAY,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,IAChE,CAAC,KAAK,EAAC;AAEX,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAClC,IAAA,MAAM,SAAA,GAAY,CAAC,GAAA,KAAa,UAAA,CAAW,KAAK,CAAA,CAAA,KAAK,CAAA,CAAE,GAAG,CAAC,CAAA;AAC3D,IAAA,OAAO,CAAC,EAAE,MAAA,EAAQ,SAAA,EAAW,CAAA;AAAA,EAC/B,CAAA;AAAA,EAEA,MAAM,KAAK,GAAA,EAA8B;AACvC,IAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAA,EAAO;AAAA,EACzB;AAAA,EAEA,MAAM,OAAA,CACJ,GAAA,EACA,OAAA,EAC0C;AAC1C,IAAA,IAAI,QAAA;AACJ,IAAA,IAAI;AACF,MAAA,QAAA,GAAW,MAAM,MAAM,GAAA,EAAK;AAAA,QAC1B,OAAA,EAAS;AAAA,UACP,GAAI,OAAA,EAAS,IAAA,IAAQ,EAAE,eAAA,EAAiB,QAAQ,IAAA,EAAK;AAAA,UACrD,GAAI,SAAS,iBAAA,IAAqB;AAAA,YAChC,mBAAA,EAAqB,OAAA,CAAQ,iBAAA,CAAkB,WAAA;AAAY,WAC7D;AAAA,UACA,GAAI,SAAS,KAAA,IAAS,EAAE,eAAe,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAA;AAAG,SACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOA,QAAQ,OAAA,EAAS;AAAA,OAClB,CAAA;AAAA,IACH,SAAS,CAAA,EAAG;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AAAA,IAC/C;AAEA,IAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,MAAA,MAAM,IAAIC,uBAAA,EAAiB;AAAA,IAC7B;AAEA,IAAA,IAAI,SAAS,EAAA,EAAI;AACf,MAAA,OAAOC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA,IACrD;AAEA,IAAA,MAAM,OAAA,GAAU,kBAAkB,GAAG,CAAA,EAAA,EAAK,SAAS,MAAM,CAAA,CAAA,EAAI,SAAS,UAAU,CAAA,CAAA;AAChF,IAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,MAAA,MAAM,IAAIC,qBAAc,OAAO,CAAA;AAAA,IACjC;AACA,IAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,EACzB;AAAA,EAEA,MAAM,QAAA,GAAsD;AAC1D,IAAA,MAAM,IAAI,MAAM,4CAA4C,CAAA;AAAA,EAC9D;AAAA,EAEA,MAAM,MAAA,CACJ,GAAA,EACA,OAAA,EACyC;AACzC,IAAA,MAAM,EAAE,QAAA,EAAS,GAAI,IAAI,IAAI,GAAG,CAAA;AAEhC,IAAA,IAAI,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA,EAAG;AAC1B,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAA,CAAK,OAAA,CAAQ,KAAK,OAAO,CAAA;AAE5C,MAAA,OAAO;AAAA,QACL,KAAA,EAAO;AAAA,UACL;AAAA,YACE,GAAA;AAAA,YACA,SAAS,IAAA,CAAK,MAAA;AAAA,YACd,gBAAgB,IAAA,CAAK;AAAA;AACvB,SACF;AAAA,QACA,IAAA,EAAM,KAAK,IAAA,IAAQ;AAAA,OACrB;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAAC,kBAAA,CAAY,KAAK,CAAA;AACjB,MAAA,IAAI,KAAA,CAAM,SAAS,eAAA,EAAiB;AAClC,QAAA,OAAO;AAAA,UACL,OAAO,EAAC;AAAA,UACR,IAAA,EAAM;AAAA,SACR;AAAA,MACF;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,QAAA,GAAW;AACT,IAAA,OAAO,SAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"FetchUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/FetchUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n assertError,\n NotFoundError,\n NotModifiedError,\n} from '@backstage/errors';\nimport { ReaderFactory } from './types';\nimport path from 'path';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\nimport { Config } from '@backstage/config';\n\nconst REDIRECT_STATUS_CODES = [301, 302, 307, 308];\nconst MAX_REDIRECTS = 5;\n\nconst isInRange = (num: number, [start, end]: [number, number]) => {\n return num >= start && num <= end;\n};\n\nconst parsePortRange = (port: string): [number, number] => {\n const isRange = port.includes('-');\n if (isRange) {\n const range = port\n .split('-')\n .map(v => parseInt(v, 10))\n .filter(Boolean) as [number, number];\n if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`);\n const [start, end] = range;\n if (start <= 0 || end <= 0 || start > end)\n throw new Error(`Port range is not valid: [${start}, ${end}]`);\n return range;\n }\n const parsedPort = parseInt(port, 10);\n return [parsedPort, parsedPort];\n};\n\nconst parsePortPredicate = (port: string | undefined) => {\n if (port) {\n const range = parsePortRange(port);\n return (url: URL) => {\n if (url.port) return isInRange(parseInt(url.port, 10), range);\n\n if (url.protocol === 'http:') return isInRange(80, range);\n if (url.protocol === 'https:') return isInRange(443, range);\n return false;\n };\n }\n return (url: URL) => !url.port;\n};\n\nfunction predicateFromConfig(config: Config): (url: URL) => boolean {\n const allow = config\n .getOptionalConfigArray('backend.reading.allow')\n ?.map(allowConfig => {\n const paths = allowConfig.getOptionalStringArray('paths');\n const checkPath = paths\n ? (url: URL) => {\n const targetPath = path.posix.normalize(url.pathname);\n return paths.some(allowedPath =>\n targetPath.startsWith(allowedPath),\n );\n }\n : (_url: URL) => true;\n const host = allowConfig.getString('host');\n const [hostname, port] = host.split(':');\n\n const checkPort = parsePortPredicate(port);\n\n if (hostname.startsWith('*.')) {\n const suffix = hostname.slice(1);\n return (url: URL) =>\n url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);\n }\n\n return (url: URL) =>\n url.hostname === hostname && checkPath(url) && checkPort(url);\n });\n\n return allow?.length ? url => allow.some(p => p(url)) : () => false;\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#UrlReaderService} that does a plain fetch of the URL.\n *\n * @public\n */\nexport class FetchUrlReader implements UrlReaderService {\n /**\n * The factory creates a single reader that will be used for reading any URL that's listed\n * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing\n * targets to allow, containing the following fields:\n *\n * `host`:\n * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.\n * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.\n *\n * `paths`:\n * An optional list of paths which are allowed. If the list is omitted all paths are allowed.\n */\n static factory: ReaderFactory = ({ config }) => {\n const predicate = predicateFromConfig(config);\n const reader = new FetchUrlReader({ predicate });\n return [{ reader, predicate }];\n };\n\n static fromConfig(config: Config): FetchUrlReader {\n return new FetchUrlReader({ predicate: predicateFromConfig(config) });\n }\n\n readonly #predicate: (url: URL) => boolean;\n\n private constructor(options: { predicate: (url: URL) => boolean }) {\n this.#predicate = options.predicate;\n }\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n let currentUrl = url;\n\n for (\n let redirectCount = 0;\n redirectCount < MAX_REDIRECTS;\n redirectCount += 1\n ) {\n // Validate URL against predicate if configured\n const parsedUrl = new URL(currentUrl);\n if (!this.#predicate(parsedUrl)) {\n throw new Error(\n `URL not allowed by backend.reading.allow configuration: ${currentUrl}`,\n );\n }\n\n let response: Response;\n try {\n response = await fetch(currentUrl, {\n headers: {\n ...(options?.etag && { 'If-None-Match': options.etag }),\n ...(options?.lastModifiedAfter && {\n 'If-Modified-Since': options.lastModifiedAfter.toUTCString(),\n }),\n ...(options?.token && { Authorization: `Bearer ${options.token}` }),\n },\n // Handle redirects manually to validate targets against the allowlist\n redirect: 'manual',\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n signal: options?.signal as any,\n });\n } catch (e) {\n throw new Error(`Unable to read ${currentUrl}, ${e}`);\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n const location = response.headers.get('location');\n if (!REDIRECT_STATUS_CODES.includes(response.status) || !location) {\n const message = `could not read ${currentUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n // Follow the redirect\n currentUrl = new URL(location, currentUrl).toString();\n }\n\n throw new Error(\n `Too many redirects (max ${MAX_REDIRECTS}) when reading ${url}`,\n );\n }\n\n async readTree(): Promise<UrlReaderServiceReadTreeResponse> {\n throw new Error('FetchUrlReader does not implement readTree');\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { pathname } = new URL(url);\n\n if (pathname.match(/[*?]/)) {\n throw new Error('Unsupported search pattern URL');\n }\n\n try {\n const data = await this.readUrl(url, options);\n\n return {\n files: [\n {\n url: url,\n content: data.buffer,\n lastModifiedAt: data.lastModifiedAt,\n },\n ],\n etag: data.etag ?? '',\n };\n } catch (error) {\n assertError(error);\n if (error.name === 'NotFoundError') {\n return {\n files: [],\n etag: '',\n };\n }\n throw error;\n }\n }\n\n toString() {\n return 'fetch{}';\n }\n}\n"],"names":["path","ReadUrlResponseFactory","NotModifiedError","NotFoundError","assertError"],"mappings":";;;;;;;;;;AAkCA,MAAM,qBAAA,GAAwB,CAAC,GAAA,EAAK,GAAA,EAAK,KAAK,GAAG,CAAA;AACjD,MAAM,aAAA,GAAgB,CAAA;AAEtB,MAAM,YAAY,CAAC,GAAA,EAAa,CAAC,KAAA,EAAO,GAAG,CAAA,KAAwB;AACjE,EAAA,OAAO,GAAA,IAAO,SAAS,GAAA,IAAO,GAAA;AAChC,CAAA;AAEA,MAAM,cAAA,GAAiB,CAAC,IAAA,KAAmC;AACzD,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA;AACjC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,KAAA,GAAQ,IAAA,CACX,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAA,CAAA,KAAK,QAAA,CAAS,CAAA,EAAG,EAAE,CAAC,CAAA,CACxB,OAAO,OAAO,CAAA;AACjB,IAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG,MAAM,IAAI,KAAA,CAAM,CAAA,yBAAA,EAA4B,IAAI,CAAA,CAAE,CAAA;AAC1E,IAAA,MAAM,CAAC,KAAA,EAAO,GAAG,CAAA,GAAI,KAAA;AACrB,IAAA,IAAI,KAAA,IAAS,CAAA,IAAK,GAAA,IAAO,CAAA,IAAK,KAAA,GAAQ,GAAA;AACpC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,KAAK,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,CAAG,CAAA;AAC/D,IAAA,OAAO,KAAA;AAAA,EACT;AACA,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,IAAA,EAAM,EAAE,CAAA;AACpC,EAAA,OAAO,CAAC,YAAY,UAAU,CAAA;AAChC,CAAA;AAEA,MAAM,kBAAA,GAAqB,CAAC,IAAA,KAA6B;AACvD,EAAA,IAAI,IAAA,EAAM;AACR,IAAA,MAAM,KAAA,GAAQ,eAAe,IAAI,CAAA;AACjC,IAAA,OAAO,CAAC,GAAA,KAAa;AACnB,MAAA,IAAI,GAAA,CAAI,MAAM,OAAO,SAAA,CAAU,SAAS,GAAA,CAAI,IAAA,EAAM,EAAE,CAAA,EAAG,KAAK,CAAA;AAE5D,MAAA,IAAI,IAAI,QAAA,KAAa,OAAA,EAAS,OAAO,SAAA,CAAU,IAAI,KAAK,CAAA;AACxD,MAAA,IAAI,IAAI,QAAA,KAAa,QAAA,EAAU,OAAO,SAAA,CAAU,KAAK,KAAK,CAAA;AAC1D,MAAA,OAAO,KAAA;AAAA,IACT,CAAA;AAAA,EACF;AACA,EAAA,OAAO,CAAC,GAAA,KAAa,CAAC,GAAA,CAAI,IAAA;AAC5B,CAAA;AAEA,SAAS,oBAAoB,MAAA,EAAuC;AAClE,EAAA,MAAM,QAAQ,MAAA,CACX,sBAAA,CAAuB,uBAAuB,CAAA,EAC7C,IAAI,CAAA,WAAA,KAAe;AACnB,IAAA,MAAM,KAAA,GAAQ,WAAA,CAAY,sBAAA,CAAuB,OAAO,CAAA;AACxD,IAAA,MAAM,SAAA,GAAY,KAAA,GACd,CAAC,GAAA,KAAa;AACZ,MAAA,MAAM,UAAA,GAAaA,6BAAA,CAAK,KAAA,CAAM,SAAA,CAAU,IAAI,QAAQ,CAAA;AACpD,MAAA,OAAO,KAAA,CAAM,IAAA;AAAA,QAAK,CAAA,WAAA,KAChB,UAAA,CAAW,UAAA,CAAW,WAAW;AAAA,OACnC;AAAA,IACF,CAAA,GACA,CAAC,IAAA,KAAc,IAAA;AACnB,IAAA,MAAM,IAAA,GAAO,WAAA,CAAY,SAAA,CAAU,MAAM,CAAA;AACzC,IAAA,MAAM,CAAC,QAAA,EAAU,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAG,CAAA;AAEvC,IAAA,MAAM,SAAA,GAAY,mBAAmB,IAAI,CAAA;AAEzC,IAAA,IAAI,QAAA,CAAS,UAAA,CAAW,IAAI,CAAA,EAAG;AAC7B,MAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA;AAC/B,MAAA,OAAO,CAAC,GAAA,KACN,GAAA,CAAI,QAAA,CAAS,QAAA,CAAS,MAAM,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,IACpE;AAEA,IAAA,OAAO,CAAC,QACN,GAAA,CAAI,QAAA,KAAa,YAAY,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,EAChE,CAAC,CAAA;AAEH,EAAA,OAAO,KAAA,EAAO,MAAA,GAAS,CAAA,GAAA,KAAO,KAAA,CAAM,IAAA,CAAK,OAAK,CAAA,CAAE,GAAG,CAAC,CAAA,GAAI,MAAM,KAAA;AAChE;AAOO,MAAM,cAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAatD,OAAO,OAAA,GAAyB,CAAC,EAAE,QAAO,KAAM;AAC9C,IAAA,MAAM,SAAA,GAAY,oBAAoB,MAAM,CAAA;AAC5C,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,CAAe,EAAE,WAAW,CAAA;AAC/C,IAAA,OAAO,CAAC,EAAE,MAAA,EAAQ,SAAA,EAAW,CAAA;AAAA,EAC/B,CAAA;AAAA,EAEA,OAAO,WAAW,MAAA,EAAgC;AAChD,IAAA,OAAO,IAAI,cAAA,CAAe,EAAE,WAAW,mBAAA,CAAoB,MAAM,GAAG,CAAA;AAAA,EACtE;AAAA,EAES,UAAA;AAAA,EAED,YAAY,OAAA,EAA+C;AACjE,IAAA,IAAA,CAAK,aAAa,OAAA,CAAQ,SAAA;AAAA,EAC5B;AAAA,EAEA,MAAM,KAAK,GAAA,EAA8B;AACvC,IAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAA,EAAO;AAAA,EACzB;AAAA,EAEA,MAAM,OAAA,CACJ,GAAA,EACA,OAAA,EAC0C;AAC1C,IAAA,IAAI,UAAA,GAAa,GAAA;AAEjB,IAAA,KAAA,IACM,aAAA,GAAgB,CAAA,EACpB,aAAA,GAAgB,aAAA,EAChB,iBAAiB,CAAA,EACjB;AAEA,MAAA,MAAM,SAAA,GAAY,IAAI,GAAA,CAAI,UAAU,CAAA;AACpC,MAAA,IAAI,CAAC,IAAA,CAAK,UAAA,CAAW,SAAS,CAAA,EAAG;AAC/B,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,2DAA2D,UAAU,CAAA;AAAA,SACvE;AAAA,MACF;AAEA,MAAA,IAAI,QAAA;AACJ,MAAA,IAAI;AACF,QAAA,QAAA,GAAW,MAAM,MAAM,UAAA,EAAY;AAAA,UACjC,OAAA,EAAS;AAAA,YACP,GAAI,OAAA,EAAS,IAAA,IAAQ,EAAE,eAAA,EAAiB,QAAQ,IAAA,EAAK;AAAA,YACrD,GAAI,SAAS,iBAAA,IAAqB;AAAA,cAChC,mBAAA,EAAqB,OAAA,CAAQ,iBAAA,CAAkB,WAAA;AAAY,aAC7D;AAAA,YACA,GAAI,SAAS,KAAA,IAAS,EAAE,eAAe,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAA;AAAG,WACnE;AAAA;AAAA,UAEA,QAAA,EAAU,QAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAOV,QAAQ,OAAA,EAAS;AAAA,SAClB,CAAA;AAAA,MACH,SAAS,CAAA,EAAG;AACV,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,eAAA,EAAkB,UAAU,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AAAA,MACtD;AAEA,MAAA,IAAI,SAAS,EAAA,EAAI;AACf,QAAA,OAAOC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA,MACrD;AAEA,MAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,QAAA,MAAM,IAAIC,uBAAA,EAAiB;AAAA,MAC7B;AAEA,MAAA,MAAM,QAAA,GAAW,QAAA,CAAS,OAAA,CAAQ,GAAA,CAAI,UAAU,CAAA;AAChD,MAAA,IAAI,CAAC,qBAAA,CAAsB,QAAA,CAAS,SAAS,MAAM,CAAA,IAAK,CAAC,QAAA,EAAU;AACjE,QAAA,MAAM,OAAA,GAAU,kBAAkB,UAAU,CAAA,EAAA,EAAK,SAAS,MAAM,CAAA,CAAA,EAAI,SAAS,UAAU,CAAA,CAAA;AACvF,QAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,UAAA,MAAM,IAAIC,qBAAc,OAAO,CAAA;AAAA,QACjC;AACA,QAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,MACzB;AAGA,MAAA,UAAA,GAAa,IAAI,GAAA,CAAI,QAAA,EAAU,UAAU,EAAE,QAAA,EAAS;AAAA,IACtD;AAEA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,wBAAA,EAA2B,aAAa,CAAA,eAAA,EAAkB,GAAG,CAAA;AAAA,KAC/D;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAAsD;AAC1D,IAAA,MAAM,IAAI,MAAM,4CAA4C,CAAA;AAAA,EAC9D;AAAA,EAEA,MAAM,MAAA,CACJ,GAAA,EACA,OAAA,EACyC;AACzC,IAAA,MAAM,EAAE,QAAA,EAAS,GAAI,IAAI,IAAI,GAAG,CAAA;AAEhC,IAAA,IAAI,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA,EAAG;AAC1B,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAA,CAAK,OAAA,CAAQ,KAAK,OAAO,CAAA;AAE5C,MAAA,OAAO;AAAA,QACL,KAAA,EAAO;AAAA,UACL;AAAA,YACE,GAAA;AAAA,YACA,SAAS,IAAA,CAAK,MAAA;AAAA,YACd,gBAAgB,IAAA,CAAK;AAAA;AACvB,SACF;AAAA,QACA,IAAA,EAAM,KAAK,IAAA,IAAQ;AAAA,OACrB;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAAC,kBAAA,CAAY,KAAK,CAAA;AACjB,MAAA,IAAI,KAAA,CAAM,SAAS,eAAA,EAAiB;AAClC,QAAA,OAAO;AAAA,UACL,OAAO,EAAC;AAAA,UACR,IAAA,EAAM;AAAA,SACR;AAAA,MACF;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,QAAA,GAAW;AACT,IAAA,OAAO,SAAA;AAAA,EACT;AACF;;;;"}
@@ -1,5 +1,6 @@
1
1
  'use strict';
2
2
 
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
3
4
  var concatStream = require('concat-stream');
4
5
  var platformPath = require('path');
5
6
  var getRawBody = require('raw-body');
@@ -65,7 +66,7 @@ class ReadableArrayResponse {
65
66
  const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
66
67
  for (let i = 0; i < this.stream.length; i++) {
67
68
  if (!this.stream[i].path.endsWith("/")) {
68
- const filePath = platformPath__default.default.join(dir, this.stream[i].path);
69
+ const filePath = backendPluginApi.resolveSafeChildPath(dir, this.stream[i].path);
69
70
  await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
70
71
  await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
71
72
  }
@@ -1 +1 @@
1
- {"version":3,"file":"ReadableArrayResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadableArrayResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport platformPath, { dirname } from 'path';\nimport getRawBody from 'raw-body';\nimport fs from 'fs-extra';\nimport { promisify } from 'util';\nimport tar from 'tar';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport { FromReadableArrayOptions } from '../types';\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a array of Readable objects into a tree response reader.\n */\nexport class ReadableArrayResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: FromReadableArrayOptions,\n private readonly workDir: string,\n public readonly etag: string,\n ) {\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n files.push({\n path: this.stream[i].path,\n content: () => getRawBody(this.stream[i].data),\n lastModifiedAt: this.stream[i]?.lastModifiedAt,\n });\n }\n }\n\n return files;\n }\n\n async archive(): Promise<NodeJS.ReadableStream> {\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n const filePath = platformPath.join(dir, this.stream[i].path);\n await fs.mkdir(dirname(filePath), { recursive: true });\n await pipeline(this.stream[i].data, fs.createWriteStream(filePath));\n }\n }\n\n return dir;\n }\n}\n"],"names":["promisify","pipelineCb","getRawBody","tar","concatStream","Readable","fs","platformPath","dirname"],"mappings":";;;;;;;;;;;;;;;;;;AA8BA,MAAM,QAAA,GAAWA,eAAUC,eAAU,CAAA;AAK9B,MAAM,qBAAA,CAAkE;AAAA,EAG7E,WAAA,CACmB,MAAA,EACA,OAAA,EACD,IAAA,EAChB;AAHiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAEhB,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EARQ,IAAA,GAAO,KAAA;AAAA;AAAA,EAWP,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAE1D,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,CAAE,IAAA;AAAA,UACrB,SAAS,MAAMC,2BAAA,CAAW,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAAA,UAC7C,cAAA,EAAgB,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,EAAG;AAAA,SACjC,CAAA;AAAA,MACH;AAAA,IACF;AAEA,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA0C;AAC9C,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCC,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOC,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAEjE,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,MAAM,QAAA,GAAWA,8BAAa,IAAA,CAAK,GAAA,EAAK,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAC3D,QAAA,MAAMD,mBAAA,CAAG,MAAME,oBAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACrD,QAAA,MAAM,QAAA,CAAS,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,EAAMF,mBAAA,CAAG,iBAAA,CAAkB,QAAQ,CAAC,CAAA;AAAA,MACpE;AAAA,IACF;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"ReadableArrayResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadableArrayResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n resolveSafeChildPath,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport platformPath, { dirname } from 'path';\nimport getRawBody from 'raw-body';\nimport fs from 'fs-extra';\nimport { promisify } from 'util';\nimport tar from 'tar';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport { FromReadableArrayOptions } from '../types';\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a array of Readable objects into a tree response reader.\n */\nexport class ReadableArrayResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: FromReadableArrayOptions,\n private readonly workDir: string,\n public readonly etag: string,\n ) {\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n files.push({\n path: this.stream[i].path,\n content: () => getRawBody(this.stream[i].data),\n lastModifiedAt: this.stream[i]?.lastModifiedAt,\n });\n }\n }\n\n return files;\n }\n\n async archive(): Promise<NodeJS.ReadableStream> {\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n const filePath = resolveSafeChildPath(dir, this.stream[i].path);\n await fs.mkdir(dirname(filePath), { recursive: true });\n await pipeline(this.stream[i].data, fs.createWriteStream(filePath));\n }\n }\n\n return dir;\n }\n}\n"],"names":["promisify","pipelineCb","getRawBody","tar","concatStream","Readable","fs","platformPath","resolveSafeChildPath","dirname"],"mappings":";;;;;;;;;;;;;;;;;;;AA+BA,MAAM,QAAA,GAAWA,eAAUC,eAAU,CAAA;AAK9B,MAAM,qBAAA,CAAkE;AAAA,EAG7E,WAAA,CACmB,MAAA,EACA,OAAA,EACD,IAAA,EAChB;AAHiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AAEhB,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EARQ,IAAA,GAAO,KAAA;AAAA;AAAA,EAWP,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAE1D,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,CAAE,IAAA;AAAA,UACrB,SAAS,MAAMC,2BAAA,CAAW,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAAA,UAC7C,cAAA,EAAgB,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,EAAG;AAAA,SACjC,CAAA;AAAA,MACH;AAAA,IACF;AAEA,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA0C;AAC9C,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCC,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOC,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAEjE,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,MAAM,WAAWC,qCAAA,CAAqB,GAAA,EAAK,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAC9D,QAAA,MAAMF,mBAAA,CAAG,MAAMG,oBAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACrD,QAAA,MAAM,QAAA,CAAS,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,EAAMH,mBAAA,CAAG,iBAAA,CAAkB,QAAQ,CAAC,CAAA;AAAA,MACpE;AAAA,IACF;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
@@ -1,5 +1,6 @@
1
1
  'use strict';
2
2
 
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
3
4
  var concatStream = require('concat-stream');
4
5
  var fs = require('fs-extra');
5
6
  var platformPath = require('path');
@@ -115,6 +116,17 @@ class TarArchiveResponse {
115
116
  if (filterError) {
116
117
  return false;
117
118
  }
119
+ const entry = stat;
120
+ if ((entry.type === "SymbolicLink" || entry.type === "Link") && entry.linkpath) {
121
+ const strippedPath = path.split("/").slice(strip).join("/");
122
+ const linkDir = platformPath__default.default.dirname(
123
+ platformPath__default.default.join(dir, strippedPath)
124
+ );
125
+ const targetPath = platformPath__default.default.resolve(linkDir, entry.linkpath);
126
+ if (!backendPluginApi.isChildPath(dir, targetPath)) {
127
+ return false;
128
+ }
129
+ }
118
130
  const relativePath = this.stripFirstDirectory ? util$1.stripFirstDirectoryFromPath(path) : path;
119
131
  if (this.subPath && !relativePath.startsWith(this.subPath)) {
120
132
  return false;
@@ -1 +1 @@
1
- {"version":3,"file":"TarArchiveResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/TarArchiveResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport fs from 'fs-extra';\nimport platformPath from 'path';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport tar, { Parse, ParseStream, ReadEntry } from 'tar';\nimport { promisify } from 'util';\nimport { stripFirstDirectoryFromPath } from './util';\n\n// Tar types for `Parse` is not a proper constructor, but it should be\nconst TarParseStream = Parse as unknown as { new (): ParseStream };\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a tar archive stream into a tree response reader.\n */\nexport class TarArchiveResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: Readable,\n private readonly subPath: string,\n private readonly workDir: string,\n public readonly etag: string,\n private readonly filter?: (path: string, info: { size: number }) => boolean,\n private readonly stripFirstDirectory: boolean = true,\n ) {\n if (subPath) {\n if (!subPath.endsWith('/')) {\n this.subPath += '/';\n }\n if (subPath.startsWith('/')) {\n throw new TypeError(\n `TarArchiveResponse subPath must not start with a /, got '${subPath}'`,\n );\n }\n }\n\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n const parser = new TarParseStream();\n\n parser.on('entry', (entry: ReadEntry & Readable) => {\n if (entry.type === 'Directory') {\n entry.resume();\n return;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(entry.path)\n : entry.path;\n\n if (this.subPath) {\n if (!relativePath.startsWith(this.subPath)) {\n entry.resume();\n return;\n }\n }\n\n const path = relativePath.slice(this.subPath.length);\n if (this.filter) {\n if (!this.filter(path, { size: entry.remain })) {\n entry.resume();\n return;\n }\n }\n\n const content = new Promise<Buffer>(async resolve => {\n await pipeline(entry, concatStream(resolve));\n });\n\n files.push({\n path,\n content: () => content,\n });\n\n entry.resume();\n });\n\n await pipeline(this.stream, parser);\n\n return files;\n }\n\n async archive(): Promise<Readable> {\n if (!this.subPath) {\n this.onlyOnce();\n\n return this.stream;\n }\n\n // TODO(Rugvip): method for repacking a tar with a subpath is to simply extract into a\n // tmp dir and recreate the archive. Would be nicer to stream things instead.\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n // Equivalent of tar --strip-components=N\n // When no subPath is given, remove just 1 top level directory\n let strip = this.subPath ? this.subPath.split('/').length : 1;\n if (!this.stripFirstDirectory) {\n strip--;\n }\n\n let filterError: Error | undefined = undefined;\n await pipeline(\n this.stream,\n tar.extract({\n strip,\n cwd: dir,\n filter: (path, stat) => {\n // Filter errors will short-circuit the rest of the filtering and then throw\n if (filterError) {\n return false;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(path)\n : path;\n if (this.subPath && !relativePath.startsWith(this.subPath)) {\n return false;\n }\n if (this.filter) {\n const innerPath = path.split('/').slice(strip).join('/');\n try {\n return this.filter(innerPath, { size: stat.size });\n } catch (error) {\n filterError = error;\n return false;\n }\n }\n return true;\n },\n }),\n );\n\n if (filterError) {\n // If the dir was provided we don't want to remove it, but if it wasn't it means\n // we created a temporary directory and we should remove it.\n if (!options?.targetDir) {\n await fs.remove(dir).catch(() => {});\n }\n throw filterError;\n }\n\n return dir;\n }\n}\n"],"names":["Parse","promisify","pipelineCb","stripFirstDirectoryFromPath","concatStream","tar","Readable","fs","platformPath"],"mappings":";;;;;;;;;;;;;;;;;AA8BA,MAAM,cAAA,GAAiBA,SAAA;AAEvB,MAAM,QAAA,GAAWC,eAAUC,eAAU,CAAA;AAK9B,MAAM,kBAAA,CAA+D;AAAA,EAG1E,YACmB,MAAA,EACA,OAAA,EACA,SACD,IAAA,EACC,MAAA,EACA,sBAA+B,IAAA,EAChD;AANiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AACC,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,mBAAA,GAAA,mBAAA;AAEjB,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAI,CAAC,OAAA,CAAQ,QAAA,CAAS,GAAG,CAAA,EAAG;AAC1B,QAAA,IAAA,CAAK,OAAA,IAAW,GAAA;AAAA,MAClB;AACA,MAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,GAAG,CAAA,EAAG;AAC3B,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,4DAA4D,OAAO,CAAA,CAAA;AAAA,SACrE;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAtBQ,IAAA,GAAO,KAAA;AAAA;AAAA,EAyBP,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAC1D,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAElC,IAAA,MAAA,CAAO,EAAA,CAAG,OAAA,EAAS,CAAC,KAAA,KAAgC;AAClD,MAAA,IAAI,KAAA,CAAM,SAAS,WAAA,EAAa;AAC9B,QAAA,KAAA,CAAM,MAAA,EAAO;AACb,QAAA;AAAA,MACF;AAIA,MAAA,MAAM,eAAe,IAAA,CAAK,mBAAA,GACtBC,mCAA4B,KAAA,CAAM,IAAI,IACtC,KAAA,CAAM,IAAA;AAEV,MAAA,IAAI,KAAK,OAAA,EAAS;AAChB,QAAA,IAAI,CAAC,YAAA,CAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,IAAA,GAAO,YAAA,CAAa,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAM,CAAA;AACnD,MAAA,IAAI,KAAK,MAAA,EAAQ;AACf,QAAA,IAAI,CAAC,KAAK,MAAA,CAAO,IAAA,EAAM,EAAE,IAAA,EAAM,KAAA,CAAM,MAAA,EAAQ,CAAA,EAAG;AAC9C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACnD,QAAA,MAAM,QAAA,CAAS,KAAA,EAAOC,6BAAA,CAAa,OAAO,CAAC,CAAA;AAAA,MAC7C,CAAC,CAAA;AAED,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACT,IAAA;AAAA,QACA,SAAS,MAAM;AAAA,OAChB,CAAA;AAED,MAAA,KAAA,CAAM,MAAA,EAAO;AAAA,IACf,CAAC,CAAA;AAED,IAAA,MAAM,QAAA,CAAS,IAAA,CAAK,MAAA,EAAQ,MAAM,CAAA;AAElC,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA6B;AACjC,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,QAAA,EAAS;AAEd,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AAIA,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCD,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOE,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAIjE,IAAA,IAAI,KAAA,GAAQ,KAAK,OAAA,GAAU,IAAA,CAAK,QAAQ,KAAA,CAAM,GAAG,EAAE,MAAA,GAAS,CAAA;AAC5D,IAAA,IAAI,CAAC,KAAK,mBAAA,EAAqB;AAC7B,MAAA,KAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAI,WAAA,GAAiC,MAAA;AACrC,IAAA,MAAM,QAAA;AAAA,MACJ,IAAA,CAAK,MAAA;AAAA,MACLH,qBAAI,OAAA,CAAQ;AAAA,QACV,KAAA;AAAA,QACA,GAAA,EAAK,GAAA;AAAA,QACL,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAA,KAAS;AAEtB,UAAA,IAAI,WAAA,EAAa;AACf,YAAA,OAAO,KAAA;AAAA,UACT;AAIA,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,mBAAA,GACtBF,kCAAA,CAA4B,IAAI,CAAA,GAChC,IAAA;AACJ,UAAA,IAAI,KAAK,OAAA,IAAW,CAAC,aAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1D,YAAA,OAAO,KAAA;AAAA,UACT;AACA,UAAA,IAAI,KAAK,MAAA,EAAQ;AACf,YAAA,MAAM,SAAA,GAAY,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AACvD,YAAA,IAAI;AACF,cAAA,OAAO,KAAK,MAAA,CAAO,SAAA,EAAW,EAAE,IAAA,EAAM,IAAA,CAAK,MAAM,CAAA;AAAA,YACnD,SAAS,KAAA,EAAO;AACd,cAAA,WAAA,GAAc,KAAA;AACd,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AACA,UAAA,OAAO,IAAA;AAAA,QACT;AAAA,OACD;AAAA,KACH;AAEA,IAAA,IAAI,WAAA,EAAa;AAGf,MAAA,IAAI,CAAC,SAAS,SAAA,EAAW;AACvB,QAAA,MAAMI,mBAAA,CAAG,MAAA,CAAO,GAAG,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MACrC;AACA,MAAA,MAAM,WAAA;AAAA,IACR;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"TarArchiveResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/TarArchiveResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n isChildPath,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport fs from 'fs-extra';\nimport platformPath from 'path';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport tar, { FileStat, Parse, ParseStream, ReadEntry } from 'tar';\nimport { promisify } from 'util';\nimport { stripFirstDirectoryFromPath } from './util';\n\n// Tar types for `Parse` is not a proper constructor, but it should be\nconst TarParseStream = Parse as unknown as { new (): ParseStream };\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a tar archive stream into a tree response reader.\n */\nexport class TarArchiveResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: Readable,\n private readonly subPath: string,\n private readonly workDir: string,\n public readonly etag: string,\n private readonly filter?: (path: string, info: { size: number }) => boolean,\n private readonly stripFirstDirectory: boolean = true,\n ) {\n if (subPath) {\n if (!subPath.endsWith('/')) {\n this.subPath += '/';\n }\n if (subPath.startsWith('/')) {\n throw new TypeError(\n `TarArchiveResponse subPath must not start with a /, got '${subPath}'`,\n );\n }\n }\n\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n const parser = new TarParseStream();\n\n parser.on('entry', (entry: ReadEntry & Readable) => {\n if (entry.type === 'Directory') {\n entry.resume();\n return;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(entry.path)\n : entry.path;\n\n if (this.subPath) {\n if (!relativePath.startsWith(this.subPath)) {\n entry.resume();\n return;\n }\n }\n\n const path = relativePath.slice(this.subPath.length);\n if (this.filter) {\n if (!this.filter(path, { size: entry.remain })) {\n entry.resume();\n return;\n }\n }\n\n const content = new Promise<Buffer>(async resolve => {\n await pipeline(entry, concatStream(resolve));\n });\n\n files.push({\n path,\n content: () => content,\n });\n\n entry.resume();\n });\n\n await pipeline(this.stream, parser);\n\n return files;\n }\n\n async archive(): Promise<Readable> {\n if (!this.subPath) {\n this.onlyOnce();\n\n return this.stream;\n }\n\n // TODO(Rugvip): method for repacking a tar with a subpath is to simply extract into a\n // tmp dir and recreate the archive. Would be nicer to stream things instead.\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n // Equivalent of tar --strip-components=N\n // When no subPath is given, remove just 1 top level directory\n let strip = this.subPath ? this.subPath.split('/').length : 1;\n if (!this.stripFirstDirectory) {\n strip--;\n }\n\n let filterError: Error | undefined = undefined;\n await pipeline(\n this.stream,\n tar.extract({\n strip,\n cwd: dir,\n filter: (path, stat) => {\n // Filter errors will short-circuit the rest of the filtering and then throw\n if (filterError) {\n return false;\n }\n\n // Block symlinks/hardlinks that escape the extraction directory\n const entry = stat as FileStat & { type?: string; linkpath?: string };\n if (\n (entry.type === 'SymbolicLink' || entry.type === 'Link') &&\n entry.linkpath\n ) {\n const strippedPath = path.split('/').slice(strip).join('/');\n const linkDir = platformPath.dirname(\n platformPath.join(dir, strippedPath),\n );\n const targetPath = platformPath.resolve(linkDir, entry.linkpath);\n if (!isChildPath(dir, targetPath)) {\n return false;\n }\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(path)\n : path;\n if (this.subPath && !relativePath.startsWith(this.subPath)) {\n return false;\n }\n if (this.filter) {\n const innerPath = path.split('/').slice(strip).join('/');\n try {\n return this.filter(innerPath, { size: stat.size });\n } catch (error) {\n filterError = error;\n return false;\n }\n }\n return true;\n },\n }),\n );\n\n if (filterError) {\n // If the dir was provided we don't want to remove it, but if it wasn't it means\n // we created a temporary directory and we should remove it.\n if (!options?.targetDir) {\n await fs.remove(dir).catch(() => {});\n }\n throw filterError;\n }\n\n return dir;\n }\n}\n"],"names":["Parse","promisify","pipelineCb","stripFirstDirectoryFromPath","concatStream","tar","Readable","fs","platformPath","isChildPath"],"mappings":";;;;;;;;;;;;;;;;;;AA+BA,MAAM,cAAA,GAAiBA,SAAA;AAEvB,MAAM,QAAA,GAAWC,eAAUC,eAAU,CAAA;AAK9B,MAAM,kBAAA,CAA+D;AAAA,EAG1E,YACmB,MAAA,EACA,OAAA,EACA,SACD,IAAA,EACC,MAAA,EACA,sBAA+B,IAAA,EAChD;AANiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA;AACC,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,mBAAA,GAAA,mBAAA;AAEjB,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAI,CAAC,OAAA,CAAQ,QAAA,CAAS,GAAG,CAAA,EAAG;AAC1B,QAAA,IAAA,CAAK,OAAA,IAAW,GAAA;AAAA,MAClB;AACA,MAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,GAAG,CAAA,EAAG;AAC3B,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,4DAA4D,OAAO,CAAA,CAAA;AAAA,SACrE;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAtBQ,IAAA,GAAO,KAAA;AAAA;AAAA,EAyBP,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAC1D,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAElC,IAAA,MAAA,CAAO,EAAA,CAAG,OAAA,EAAS,CAAC,KAAA,KAAgC;AAClD,MAAA,IAAI,KAAA,CAAM,SAAS,WAAA,EAAa;AAC9B,QAAA,KAAA,CAAM,MAAA,EAAO;AACb,QAAA;AAAA,MACF;AAIA,MAAA,MAAM,eAAe,IAAA,CAAK,mBAAA,GACtBC,mCAA4B,KAAA,CAAM,IAAI,IACtC,KAAA,CAAM,IAAA;AAEV,MAAA,IAAI,KAAK,OAAA,EAAS;AAChB,QAAA,IAAI,CAAC,YAAA,CAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,IAAA,GAAO,YAAA,CAAa,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAM,CAAA;AACnD,MAAA,IAAI,KAAK,MAAA,EAAQ;AACf,QAAA,IAAI,CAAC,KAAK,MAAA,CAAO,IAAA,EAAM,EAAE,IAAA,EAAM,KAAA,CAAM,MAAA,EAAQ,CAAA,EAAG;AAC9C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACnD,QAAA,MAAM,QAAA,CAAS,KAAA,EAAOC,6BAAA,CAAa,OAAO,CAAC,CAAA;AAAA,MAC7C,CAAC,CAAA;AAED,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACT,IAAA;AAAA,QACA,SAAS,MAAM;AAAA,OAChB,CAAA;AAED,MAAA,KAAA,CAAM,MAAA,EAAO;AAAA,IACf,CAAC,CAAA;AAED,IAAA,MAAM,QAAA,CAAS,IAAA,CAAK,MAAA,EAAQ,MAAM,CAAA;AAElC,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA6B;AACjC,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,QAAA,EAAS;AAEd,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AAIA,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCD,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOE,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAIjE,IAAA,IAAI,KAAA,GAAQ,KAAK,OAAA,GAAU,IAAA,CAAK,QAAQ,KAAA,CAAM,GAAG,EAAE,MAAA,GAAS,CAAA;AAC5D,IAAA,IAAI,CAAC,KAAK,mBAAA,EAAqB;AAC7B,MAAA,KAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAI,WAAA,GAAiC,MAAA;AACrC,IAAA,MAAM,QAAA;AAAA,MACJ,IAAA,CAAK,MAAA;AAAA,MACLH,qBAAI,OAAA,CAAQ;AAAA,QACV,KAAA;AAAA,QACA,GAAA,EAAK,GAAA;AAAA,QACL,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAA,KAAS;AAEtB,UAAA,IAAI,WAAA,EAAa;AACf,YAAA,OAAO,KAAA;AAAA,UACT;AAGA,UAAA,MAAM,KAAA,GAAQ,IAAA;AACd,UAAA,IAAA,CACG,MAAM,IAAA,KAAS,cAAA,IAAkB,MAAM,IAAA,KAAS,MAAA,KACjD,MAAM,QAAA,EACN;AACA,YAAA,MAAM,YAAA,GAAe,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AAC1D,YAAA,MAAM,UAAUG,6BAAA,CAAa,OAAA;AAAA,cAC3BA,6BAAA,CAAa,IAAA,CAAK,GAAA,EAAK,YAAY;AAAA,aACrC;AACA,YAAA,MAAM,UAAA,GAAaA,6BAAA,CAAa,OAAA,CAAQ,OAAA,EAAS,MAAM,QAAQ,CAAA;AAC/D,YAAA,IAAI,CAACC,4BAAA,CAAY,GAAA,EAAK,UAAU,CAAA,EAAG;AACjC,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AAIA,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,mBAAA,GACtBN,kCAAA,CAA4B,IAAI,CAAA,GAChC,IAAA;AACJ,UAAA,IAAI,KAAK,OAAA,IAAW,CAAC,aAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1D,YAAA,OAAO,KAAA;AAAA,UACT;AACA,UAAA,IAAI,KAAK,MAAA,EAAQ;AACf,YAAA,MAAM,SAAA,GAAY,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AACvD,YAAA,IAAI;AACF,cAAA,OAAO,KAAK,MAAA,CAAO,SAAA,EAAW,EAAE,IAAA,EAAM,IAAA,CAAK,MAAM,CAAA;AAAA,YACnD,SAAS,KAAA,EAAO;AACd,cAAA,WAAA,GAAc,KAAA;AACd,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AACA,UAAA,OAAO,IAAA;AAAA,QACT;AAAA,OACD;AAAA,KACH;AAEA,IAAA,IAAI,WAAA,EAAa;AAGf,MAAA,IAAI,CAAC,SAAS,SAAA,EAAW;AACvB,QAAA,MAAMI,mBAAA,CAAG,MAAA,CAAO,GAAG,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MACrC;AACA,MAAA,MAAM,WAAA;AAAA,IACR;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
@@ -2,7 +2,7 @@
2
2
 
3
3
  Object.defineProperty(exports, '__esModule', { value: true });
4
4
 
5
- var version = "0.12.1-next.1";
5
+ var version = "0.12.2";
6
6
  var packageinfo = {
7
7
  version: version};
8
8
 
@@ -1,8 +1,8 @@
1
1
  import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
2
2
  import { RootLoggerService, LoggerService } from '@backstage/backend-plugin-api';
3
- import { JsonObject } from '@backstage/types';
3
+ import { JsonPrimitive, JsonObject } from '@backstage/types';
4
4
  import { Format } from 'logform';
5
- import { transport } from 'winston';
5
+ import { transport, config } from 'winston';
6
6
 
7
7
  /**
8
8
  * Root-level logging.
@@ -15,6 +15,20 @@ import { transport } from 'winston';
15
15
  */
16
16
  declare const rootLoggerServiceFactory: _backstage_backend_plugin_api.ServiceFactory<_backstage_backend_plugin_api.RootLoggerService, "root", "singleton">;
17
17
 
18
+ /**
19
+ * @public
20
+ */
21
+ type WinstonLoggerLevelOverrideMatchers = {
22
+ [key: string]: JsonPrimitive | JsonPrimitive[] | undefined;
23
+ };
24
+ /**
25
+ * @public
26
+ */
27
+ type WinstonLoggerLevelOverride = {
28
+ matchers: WinstonLoggerLevelOverrideMatchers;
29
+ level: string;
30
+ };
31
+
18
32
  /**
19
33
  * @public
20
34
  */
@@ -46,6 +60,13 @@ declare class WinstonLogger implements RootLoggerService {
46
60
  * Creates a pretty printed winston log formatter.
47
61
  */
48
62
  static colorFormat(): Format;
63
+ /**
64
+ * Formatter that filters log levels using overrides, falling back to the default level when no criteria match.
65
+ */
66
+ static logLevelFilter(defaultLogLevel: keyof config.NpmConfigSetLevels): {
67
+ format: Format;
68
+ setOverrides: (overrides: WinstonLoggerLevelOverride[]) => void;
69
+ };
49
70
  private constructor();
50
71
  error(message: string, meta?: JsonObject): void;
51
72
  warn(message: string, meta?: JsonObject): void;
@@ -53,6 +74,7 @@ declare class WinstonLogger implements RootLoggerService {
53
74
  debug(message: string, meta?: JsonObject): void;
54
75
  child(meta: JsonObject): LoggerService;
55
76
  addRedactions(redactions: Iterable<string>): void;
77
+ setLevelOverrides(overrides: WinstonLoggerLevelOverride[]): void;
56
78
  }
57
79
 
58
- export { WinstonLogger, type WinstonLoggerOptions, rootLoggerServiceFactory };
80
+ export { WinstonLogger, type WinstonLoggerLevelOverride, type WinstonLoggerLevelOverrideMatchers, type WinstonLoggerOptions, rootLoggerServiceFactory };
@@ -3,6 +3,7 @@ import { RootConfigService, LoggerService, UrlReaderServiceReadTreeResponse, Url
3
3
  import { AzureIntegration, AzureDevOpsCredentialsProvider, BitbucketCloudIntegration, BitbucketIntegration, BitbucketServerIntegration, GerritIntegration, GithubIntegration, GithubCredentialsProvider, GitLabIntegration, GiteaIntegration, HarnessIntegration, AwsS3Integration, AzureCredentialsManager, AzureBlobStorageIntergation } from '@backstage/integration';
4
4
  import { Readable } from 'stream';
5
5
  import { AwsCredentialsManager } from '@backstage/integration-aws-node';
6
+ import { Config } from '@backstage/config';
6
7
 
7
8
  /**
8
9
  * A predicate that decides whether a specific {@link @backstage/backend-plugin-api#UrlReaderService} can handle a
@@ -373,6 +374,7 @@ declare class AzureBlobStorageUrlReader implements UrlReaderService {
373
374
  * @public
374
375
  */
375
376
  declare class FetchUrlReader implements UrlReaderService {
377
+ #private;
376
378
  /**
377
379
  * The factory creates a single reader that will be used for reading any URL that's listed
378
380
  * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing
@@ -386,6 +388,8 @@ declare class FetchUrlReader implements UrlReaderService {
386
388
  * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
387
389
  */
388
390
  static factory: ReaderFactory;
391
+ static fromConfig(config: Config): FetchUrlReader;
392
+ private constructor();
389
393
  read(url: string): Promise<Buffer>;
390
394
  readUrl(url: string, options?: UrlReaderServiceReadUrlOptions): Promise<UrlReaderServiceReadUrlResponse>;
391
395
  readTree(): Promise<UrlReaderServiceReadTreeResponse>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/backend-defaults",
3
- "version": "0.12.1-next.1",
3
+ "version": "0.12.2",
4
4
  "description": "Backend defaults used by Backstage backend apps",
5
5
  "backstage": {
6
6
  "role": "node-library"
@@ -216,19 +216,19 @@
216
216
  "@aws-sdk/credential-providers": "^3.350.0",
217
217
  "@aws-sdk/types": "^3.347.0",
218
218
  "@azure/storage-blob": "^12.5.0",
219
- "@backstage/backend-app-api": "1.2.7-next.0",
220
- "@backstage/backend-dev-utils": "0.1.5",
221
- "@backstage/backend-plugin-api": "1.4.3-next.0",
222
- "@backstage/cli-node": "0.2.14",
223
- "@backstage/config": "1.3.3",
224
- "@backstage/config-loader": "1.10.3-next.0",
225
- "@backstage/errors": "1.2.7",
226
- "@backstage/integration": "1.18.0-next.0",
227
- "@backstage/integration-aws-node": "0.1.17",
228
- "@backstage/plugin-auth-node": "0.6.7-next.1",
229
- "@backstage/plugin-events-node": "0.4.15-next.0",
230
- "@backstage/plugin-permission-node": "0.10.4-next.0",
231
- "@backstage/types": "1.2.1",
219
+ "@backstage/backend-app-api": "^1.2.7",
220
+ "@backstage/backend-dev-utils": "^0.1.5",
221
+ "@backstage/backend-plugin-api": "^1.4.3",
222
+ "@backstage/cli-node": "^0.2.14",
223
+ "@backstage/config": "^1.3.4",
224
+ "@backstage/config-loader": "^1.10.4",
225
+ "@backstage/errors": "^1.2.7",
226
+ "@backstage/integration": "^1.18.0",
227
+ "@backstage/integration-aws-node": "^0.1.17",
228
+ "@backstage/plugin-auth-node": "^0.6.7",
229
+ "@backstage/plugin-events-node": "^0.4.15",
230
+ "@backstage/plugin-permission-node": "^0.10.4",
231
+ "@backstage/types": "^1.2.2",
232
232
  "@google-cloud/storage": "^7.0.0",
233
233
  "@keyv/memcache": "^2.0.1",
234
234
  "@keyv/redis": "^4.0.1",
@@ -284,9 +284,9 @@
284
284
  },
285
285
  "devDependencies": {
286
286
  "@aws-sdk/util-stream-node": "^3.350.0",
287
- "@backstage/backend-plugin-api": "1.4.3-next.0",
288
- "@backstage/backend-test-utils": "1.9.0-next.1",
289
- "@backstage/cli": "0.34.2-next.2",
287
+ "@backstage/backend-plugin-api": "^1.4.3",
288
+ "@backstage/backend-test-utils": "^1.9.0",
289
+ "@backstage/cli": "^0.34.3",
290
290
  "@google-cloud/cloud-sql-connector": "^1.4.0",
291
291
  "@types/archiver": "^6.0.0",
292
292
  "@types/base64-stream": "^1.0.2",