@backstage/backend-defaults 0.13.1 → 0.13.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @backstage/backend-defaults
2
2
 
3
+ ## 0.13.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Backport security fixes
8
+
3
9
  ## 0.13.1
4
10
 
5
11
  ### Patch Changes
@@ -8,6 +8,8 @@ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'defau
8
8
 
9
9
  var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
10
10
 
11
+ const REDIRECT_STATUS_CODES = [301, 302, 307, 308];
12
+ const MAX_REDIRECTS = 5;
11
13
  const isInRange = (num, [start, end]) => {
12
14
  return num >= start && num <= end;
13
15
  };
@@ -36,6 +38,26 @@ const parsePortPredicate = (port) => {
36
38
  }
37
39
  return (url) => !url.port;
38
40
  };
41
+ function predicateFromConfig(config) {
42
+ const allow = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
43
+ const paths = allowConfig.getOptionalStringArray("paths");
44
+ const checkPath = paths ? (url) => {
45
+ const targetPath = platformPath__default.default.posix.normalize(url.pathname);
46
+ return paths.some(
47
+ (allowedPath) => targetPath.startsWith(allowedPath)
48
+ );
49
+ } : (_url) => true;
50
+ const host = allowConfig.getString("host");
51
+ const [hostname, port] = host.split(":");
52
+ const checkPort = parsePortPredicate(port);
53
+ if (hostname.startsWith("*.")) {
54
+ const suffix = hostname.slice(1);
55
+ return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
56
+ }
57
+ return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
58
+ });
59
+ return allow?.length ? (url) => allow.some((p) => p(url)) : () => false;
60
+ }
39
61
  class FetchUrlReader {
40
62
  /**
41
63
  * The factory creates a single reader that will be used for reading any URL that's listed
@@ -50,64 +72,72 @@ class FetchUrlReader {
50
72
  * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
51
73
  */
52
74
  static factory = ({ config }) => {
53
- const predicates = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
54
- const paths = allowConfig.getOptionalStringArray("paths");
55
- const checkPath = paths ? (url) => {
56
- const targetPath = platformPath__default.default.posix.normalize(url.pathname);
57
- return paths.some(
58
- (allowedPath) => targetPath.startsWith(allowedPath)
59
- );
60
- } : (_url) => true;
61
- const host = allowConfig.getString("host");
62
- const [hostname, port] = host.split(":");
63
- const checkPort = parsePortPredicate(port);
64
- if (hostname.startsWith("*.")) {
65
- const suffix = hostname.slice(1);
66
- return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
67
- }
68
- return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
69
- }) ?? [];
70
- const reader = new FetchUrlReader();
71
- const predicate = (url) => predicates.some((p) => p(url));
75
+ const predicate = predicateFromConfig(config);
76
+ const reader = new FetchUrlReader({ predicate });
72
77
  return [{ reader, predicate }];
73
78
  };
79
+ static fromConfig(config) {
80
+ return new FetchUrlReader({ predicate: predicateFromConfig(config) });
81
+ }
82
+ #predicate;
83
+ constructor(options) {
84
+ this.#predicate = options.predicate;
85
+ }
74
86
  async read(url) {
75
87
  const response = await this.readUrl(url);
76
88
  return response.buffer();
77
89
  }
78
90
  async readUrl(url, options) {
79
- let response;
80
- try {
81
- response = await fetch(url, {
82
- headers: {
83
- ...options?.etag && { "If-None-Match": options.etag },
84
- ...options?.lastModifiedAfter && {
85
- "If-Modified-Since": options.lastModifiedAfter.toUTCString()
91
+ let currentUrl = url;
92
+ for (let redirectCount = 0; redirectCount < MAX_REDIRECTS; redirectCount += 1) {
93
+ const parsedUrl = new URL(currentUrl);
94
+ if (!this.#predicate(parsedUrl)) {
95
+ throw new Error(
96
+ `URL not allowed by backend.reading.allow configuration: ${currentUrl}`
97
+ );
98
+ }
99
+ let response;
100
+ try {
101
+ response = await fetch(currentUrl, {
102
+ headers: {
103
+ ...options?.etag && { "If-None-Match": options.etag },
104
+ ...options?.lastModifiedAfter && {
105
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
106
+ },
107
+ ...options?.token && { Authorization: `Bearer ${options.token}` }
86
108
  },
87
- ...options?.token && { Authorization: `Bearer ${options.token}` }
88
- },
89
- // TODO(freben): The signal cast is there because pre-3.x versions of
90
- // node-fetch have a very slightly deviating AbortSignal type signature.
91
- // The difference does not affect us in practice however. The cast can
92
- // be removed after we support ESM for CLI dependencies and migrate to
93
- // version 3 of node-fetch.
94
- // https://github.com/backstage/backstage/issues/8242
95
- signal: options?.signal
96
- });
97
- } catch (e) {
98
- throw new Error(`Unable to read ${url}, ${e}`);
99
- }
100
- if (response.status === 304) {
101
- throw new errors.NotModifiedError();
102
- }
103
- if (response.ok) {
104
- return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
105
- }
106
- const message = `could not read ${url}, ${response.status} ${response.statusText}`;
107
- if (response.status === 404) {
108
- throw new errors.NotFoundError(message);
109
+ // Handle redirects manually to validate targets against the allowlist
110
+ redirect: "manual",
111
+ // TODO(freben): The signal cast is there because pre-3.x versions of
112
+ // node-fetch have a very slightly deviating AbortSignal type signature.
113
+ // The difference does not affect us in practice however. The cast can
114
+ // be removed after we support ESM for CLI dependencies and migrate to
115
+ // version 3 of node-fetch.
116
+ // https://github.com/backstage/backstage/issues/8242
117
+ signal: options?.signal
118
+ });
119
+ } catch (e) {
120
+ throw new Error(`Unable to read ${currentUrl}, ${e}`);
121
+ }
122
+ if (response.ok) {
123
+ return ReadUrlResponseFactory.ReadUrlResponseFactory.fromResponse(response);
124
+ }
125
+ if (response.status === 304) {
126
+ throw new errors.NotModifiedError();
127
+ }
128
+ const location = response.headers.get("location");
129
+ if (!REDIRECT_STATUS_CODES.includes(response.status) || !location) {
130
+ const message = `could not read ${currentUrl}, ${response.status} ${response.statusText}`;
131
+ if (response.status === 404) {
132
+ throw new errors.NotFoundError(message);
133
+ }
134
+ throw new Error(message);
135
+ }
136
+ currentUrl = new URL(location, currentUrl).toString();
109
137
  }
110
- throw new Error(message);
138
+ throw new Error(
139
+ `Too many redirects (max ${MAX_REDIRECTS}) when reading ${url}`
140
+ );
111
141
  }
112
142
  async readTree() {
113
143
  throw new Error("FetchUrlReader does not implement readTree");
@@ -1 +1 @@
1
- {"version":3,"file":"FetchUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/FetchUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n assertError,\n NotFoundError,\n NotModifiedError,\n} from '@backstage/errors';\nimport { ReaderFactory } from './types';\nimport path from 'path';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\n\nconst isInRange = (num: number, [start, end]: [number, number]) => {\n return num >= start && num <= end;\n};\n\nconst parsePortRange = (port: string): [number, number] => {\n const isRange = port.includes('-');\n if (isRange) {\n const range = port\n .split('-')\n .map(v => parseInt(v, 10))\n .filter(Boolean) as [number, number];\n if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`);\n const [start, end] = range;\n if (start <= 0 || end <= 0 || start > end)\n throw new Error(`Port range is not valid: [${start}, ${end}]`);\n return range;\n }\n const parsedPort = parseInt(port, 10);\n return [parsedPort, parsedPort];\n};\n\nconst parsePortPredicate = (port: string | undefined) => {\n if (port) {\n const range = parsePortRange(port);\n return (url: URL) => {\n if (url.port) return isInRange(parseInt(url.port, 10), range);\n\n if (url.protocol === 'http:') return isInRange(80, range);\n if (url.protocol === 'https:') return isInRange(443, range);\n return false;\n };\n }\n return (url: URL) => !url.port;\n};\n\n/**\n * A {@link @backstage/backend-plugin-api#UrlReaderService} that does a plain fetch of the URL.\n *\n * @public\n */\nexport class FetchUrlReader implements UrlReaderService {\n /**\n * The factory creates a single reader that will be used for reading any URL that's listed\n * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing\n * targets to allow, containing the following fields:\n *\n * `host`:\n * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.\n * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.\n *\n * `paths`:\n * An optional list of paths which are allowed. If the list is omitted all paths are allowed.\n */\n static factory: ReaderFactory = ({ config }) => {\n const predicates =\n config\n .getOptionalConfigArray('backend.reading.allow')\n ?.map(allowConfig => {\n const paths = allowConfig.getOptionalStringArray('paths');\n const checkPath = paths\n ? (url: URL) => {\n const targetPath = path.posix.normalize(url.pathname);\n return paths.some(allowedPath =>\n targetPath.startsWith(allowedPath),\n );\n }\n : (_url: URL) => true;\n const host = allowConfig.getString('host');\n const [hostname, port] = host.split(':');\n\n const checkPort = parsePortPredicate(port);\n\n if (hostname.startsWith('*.')) {\n const suffix = hostname.slice(1);\n return (url: URL) =>\n url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);\n }\n return (url: URL) =>\n url.hostname === hostname && checkPath(url) && checkPort(url);\n }) ?? [];\n\n const reader = new FetchUrlReader();\n const predicate = (url: URL) => predicates.some(p => p(url));\n return [{ reader, predicate }];\n };\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n let response: Response;\n try {\n response = await fetch(url, {\n headers: {\n ...(options?.etag && { 'If-None-Match': options.etag }),\n ...(options?.lastModifiedAfter && {\n 'If-Modified-Since': options.lastModifiedAfter.toUTCString(),\n }),\n ...(options?.token && { Authorization: `Bearer ${options.token}` }),\n },\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n signal: options?.signal as any,\n });\n } catch (e) {\n throw new Error(`Unable to read ${url}, ${e}`);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n const message = `could not read ${url}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n async readTree(): Promise<UrlReaderServiceReadTreeResponse> {\n throw new Error('FetchUrlReader does not implement readTree');\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { pathname } = new URL(url);\n\n if (pathname.match(/[*?]/)) {\n throw new Error('Unsupported search pattern URL');\n }\n\n try {\n const data = await this.readUrl(url, options);\n\n return {\n files: [\n {\n url: url,\n content: data.buffer,\n lastModifiedAt: data.lastModifiedAt,\n },\n ],\n etag: data.etag ?? '',\n };\n } catch (error) {\n assertError(error);\n if (error.name === 'NotFoundError') {\n return {\n files: [],\n etag: '',\n };\n }\n throw error;\n }\n }\n\n toString() {\n return 'fetch{}';\n }\n}\n"],"names":["path","NotModifiedError","ReadUrlResponseFactory","NotFoundError","assertError"],"mappings":";;;;;;;;;;AAiCA,MAAM,YAAY,CAAC,GAAA,EAAa,CAAC,KAAA,EAAO,GAAG,CAAA,KAAwB;AACjE,EAAA,OAAO,GAAA,IAAO,SAAS,GAAA,IAAO,GAAA;AAChC,CAAA;AAEA,MAAM,cAAA,GAAiB,CAAC,IAAA,KAAmC;AACzD,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA;AACjC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,KAAA,GAAQ,IAAA,CACX,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAA,CAAA,KAAK,QAAA,CAAS,CAAA,EAAG,EAAE,CAAC,CAAA,CACxB,OAAO,OAAO,CAAA;AACjB,IAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG,MAAM,IAAI,KAAA,CAAM,CAAA,yBAAA,EAA4B,IAAI,CAAA,CAAE,CAAA;AAC1E,IAAA,MAAM,CAAC,KAAA,EAAO,GAAG,CAAA,GAAI,KAAA;AACrB,IAAA,IAAI,KAAA,IAAS,CAAA,IAAK,GAAA,IAAO,CAAA,IAAK,KAAA,GAAQ,GAAA;AACpC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,KAAK,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,CAAG,CAAA;AAC/D,IAAA,OAAO,KAAA;AAAA,EACT;AACA,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,IAAA,EAAM,EAAE,CAAA;AACpC,EAAA,OAAO,CAAC,YAAY,UAAU,CAAA;AAChC,CAAA;AAEA,MAAM,kBAAA,GAAqB,CAAC,IAAA,KAA6B;AACvD,EAAA,IAAI,IAAA,EAAM;AACR,IAAA,MAAM,KAAA,GAAQ,eAAe,IAAI,CAAA;AACjC,IAAA,OAAO,CAAC,GAAA,KAAa;AACnB,MAAA,IAAI,GAAA,CAAI,MAAM,OAAO,SAAA,CAAU,SAAS,GAAA,CAAI,IAAA,EAAM,EAAE,CAAA,EAAG,KAAK,CAAA;AAE5D,MAAA,IAAI,IAAI,QAAA,KAAa,OAAA,EAAS,OAAO,SAAA,CAAU,IAAI,KAAK,CAAA;AACxD,MAAA,IAAI,IAAI,QAAA,KAAa,QAAA,EAAU,OAAO,SAAA,CAAU,KAAK,KAAK,CAAA;AAC1D,MAAA,OAAO,KAAA;AAAA,IACT,CAAA;AAAA,EACF;AACA,EAAA,OAAO,CAAC,GAAA,KAAa,CAAC,GAAA,CAAI,IAAA;AAC5B,CAAA;AAOO,MAAM,cAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAatD,OAAO,OAAA,GAAyB,CAAC,EAAE,QAAO,KAAM;AAC9C,IAAA,MAAM,aACJ,MAAA,CACG,sBAAA,CAAuB,uBAAuB,CAAA,EAC7C,IAAI,CAAA,WAAA,KAAe;AACnB,MAAA,MAAM,KAAA,GAAQ,WAAA,CAAY,sBAAA,CAAuB,OAAO,CAAA;AACxD,MAAA,MAAM,SAAA,GAAY,KAAA,GACd,CAAC,GAAA,KAAa;AACZ,QAAA,MAAM,UAAA,GAAaA,6BAAA,CAAK,KAAA,CAAM,SAAA,CAAU,IAAI,QAAQ,CAAA;AACpD,QAAA,OAAO,KAAA,CAAM,IAAA;AAAA,UAAK,CAAA,WAAA,KAChB,UAAA,CAAW,UAAA,CAAW,WAAW;AAAA,SACnC;AAAA,MACF,CAAA,GACA,CAAC,IAAA,KAAc,IAAA;AACnB,MAAA,MAAM,IAAA,GAAO,WAAA,CAAY,SAAA,CAAU,MAAM,CAAA;AACzC,MAAA,MAAM,CAAC,QAAA,EAAU,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAG,CAAA;AAEvC,MAAA,MAAM,SAAA,GAAY,mBAAmB,IAAI,CAAA;AAEzC,MAAA,IAAI,QAAA,CAAS,UAAA,CAAW,IAAI,CAAA,EAAG;AAC7B,QAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA;AAC/B,QAAA,OAAO,CAAC,GAAA,KACN,GAAA,CAAI,QAAA,CAAS,QAAA,CAAS,MAAM,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,MACpE;AACA,MAAA,OAAO,CAAC,QACN,GAAA,CAAI,QAAA,KAAa,YAAY,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,IAChE,CAAC,KAAK,EAAC;AAEX,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAClC,IAAA,MAAM,SAAA,GAAY,CAAC,GAAA,KAAa,UAAA,CAAW,KAAK,CAAA,CAAA,KAAK,CAAA,CAAE,GAAG,CAAC,CAAA;AAC3D,IAAA,OAAO,CAAC,EAAE,MAAA,EAAQ,SAAA,EAAW,CAAA;AAAA,EAC/B,CAAA;AAAA,EAEA,MAAM,KAAK,GAAA,EAA8B;AACvC,IAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAA,EAAO;AAAA,EACzB;AAAA,EAEA,MAAM,OAAA,CACJ,GAAA,EACA,OAAA,EAC0C;AAC1C,IAAA,IAAI,QAAA;AACJ,IAAA,IAAI;AACF,MAAA,QAAA,GAAW,MAAM,MAAM,GAAA,EAAK;AAAA,QAC1B,OAAA,EAAS;AAAA,UACP,GAAI,OAAA,EAAS,IAAA,IAAQ,EAAE,eAAA,EAAiB,QAAQ,IAAA,EAAK;AAAA,UACrD,GAAI,SAAS,iBAAA,IAAqB;AAAA,YAChC,mBAAA,EAAqB,OAAA,CAAQ,iBAAA,CAAkB,WAAA;AAAY,WAC7D;AAAA,UACA,GAAI,SAAS,KAAA,IAAS,EAAE,eAAe,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAA;AAAG,SACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOA,QAAQ,OAAA,EAAS;AAAA,OAClB,CAAA;AAAA,IACH,SAAS,CAAA,EAAG;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,eAAA,EAAkB,GAAG,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AAAA,IAC/C;AAEA,IAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,MAAA,MAAM,IAAIC,uBAAA,EAAiB;AAAA,IAC7B;AAEA,IAAA,IAAI,SAAS,EAAA,EAAI;AACf,MAAA,OAAOC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA,IACrD;AAEA,IAAA,MAAM,OAAA,GAAU,kBAAkB,GAAG,CAAA,EAAA,EAAK,SAAS,MAAM,CAAA,CAAA,EAAI,SAAS,UAAU,CAAA,CAAA;AAChF,IAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,MAAA,MAAM,IAAIC,qBAAc,OAAO,CAAA;AAAA,IACjC;AACA,IAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,EACzB;AAAA,EAEA,MAAM,QAAA,GAAsD;AAC1D,IAAA,MAAM,IAAI,MAAM,4CAA4C,CAAA;AAAA,EAC9D;AAAA,EAEA,MAAM,MAAA,CACJ,GAAA,EACA,OAAA,EACyC;AACzC,IAAA,MAAM,EAAE,QAAA,EAAS,GAAI,IAAI,IAAI,GAAG,CAAA;AAEhC,IAAA,IAAI,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA,EAAG;AAC1B,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAA,CAAK,OAAA,CAAQ,KAAK,OAAO,CAAA;AAE5C,MAAA,OAAO;AAAA,QACL,KAAA,EAAO;AAAA,UACL;AAAA,YACE,GAAA;AAAA,YACA,SAAS,IAAA,CAAK,MAAA;AAAA,YACd,gBAAgB,IAAA,CAAK;AAAA;AACvB,SACF;AAAA,QACA,IAAA,EAAM,KAAK,IAAA,IAAQ;AAAA,OACrB;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAAC,kBAAA,CAAY,KAAK,CAAA;AACjB,MAAA,IAAI,KAAA,CAAM,SAAS,eAAA,EAAiB;AAClC,QAAA,OAAO;AAAA,UACL,OAAO,EAAC;AAAA,UACR,IAAA,EAAM;AAAA,SACR;AAAA,MACF;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,QAAA,GAAW;AACT,IAAA,OAAO,SAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"FetchUrlReader.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/FetchUrlReader.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderService,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n} from '@backstage/backend-plugin-api';\nimport {\n assertError,\n NotFoundError,\n NotModifiedError,\n} from '@backstage/errors';\nimport { ReaderFactory } from './types';\nimport path from 'path';\nimport { ReadUrlResponseFactory } from './ReadUrlResponseFactory';\nimport { Config } from '@backstage/config';\n\nconst REDIRECT_STATUS_CODES = [301, 302, 307, 308];\nconst MAX_REDIRECTS = 5;\n\nconst isInRange = (num: number, [start, end]: [number, number]) => {\n return num >= start && num <= end;\n};\n\nconst parsePortRange = (port: string): [number, number] => {\n const isRange = port.includes('-');\n if (isRange) {\n const range = port\n .split('-')\n .map(v => parseInt(v, 10))\n .filter(Boolean) as [number, number];\n if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`);\n const [start, end] = range;\n if (start <= 0 || end <= 0 || start > end)\n throw new Error(`Port range is not valid: [${start}, ${end}]`);\n return range;\n }\n const parsedPort = parseInt(port, 10);\n return [parsedPort, parsedPort];\n};\n\nconst parsePortPredicate = (port: string | undefined) => {\n if (port) {\n const range = parsePortRange(port);\n return (url: URL) => {\n if (url.port) return isInRange(parseInt(url.port, 10), range);\n\n if (url.protocol === 'http:') return isInRange(80, range);\n if (url.protocol === 'https:') return isInRange(443, range);\n return false;\n };\n }\n return (url: URL) => !url.port;\n};\n\nfunction predicateFromConfig(config: Config): (url: URL) => boolean {\n const allow = config\n .getOptionalConfigArray('backend.reading.allow')\n ?.map(allowConfig => {\n const paths = allowConfig.getOptionalStringArray('paths');\n const checkPath = paths\n ? (url: URL) => {\n const targetPath = path.posix.normalize(url.pathname);\n return paths.some(allowedPath =>\n targetPath.startsWith(allowedPath),\n );\n }\n : (_url: URL) => true;\n const host = allowConfig.getString('host');\n const [hostname, port] = host.split(':');\n\n const checkPort = parsePortPredicate(port);\n\n if (hostname.startsWith('*.')) {\n const suffix = hostname.slice(1);\n return (url: URL) =>\n url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);\n }\n\n return (url: URL) =>\n url.hostname === hostname && checkPath(url) && checkPort(url);\n });\n\n return allow?.length ? url => allow.some(p => p(url)) : () => false;\n}\n\n/**\n * A {@link @backstage/backend-plugin-api#UrlReaderService} that does a plain fetch of the URL.\n *\n * @public\n */\nexport class FetchUrlReader implements UrlReaderService {\n /**\n * The factory creates a single reader that will be used for reading any URL that's listed\n * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing\n * targets to allow, containing the following fields:\n *\n * `host`:\n * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.\n * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.\n *\n * `paths`:\n * An optional list of paths which are allowed. If the list is omitted all paths are allowed.\n */\n static factory: ReaderFactory = ({ config }) => {\n const predicate = predicateFromConfig(config);\n const reader = new FetchUrlReader({ predicate });\n return [{ reader, predicate }];\n };\n\n static fromConfig(config: Config): FetchUrlReader {\n return new FetchUrlReader({ predicate: predicateFromConfig(config) });\n }\n\n readonly #predicate: (url: URL) => boolean;\n\n private constructor(options: { predicate: (url: URL) => boolean }) {\n this.#predicate = options.predicate;\n }\n\n async read(url: string): Promise<Buffer> {\n const response = await this.readUrl(url);\n return response.buffer();\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n let currentUrl = url;\n\n for (\n let redirectCount = 0;\n redirectCount < MAX_REDIRECTS;\n redirectCount += 1\n ) {\n // Validate URL against predicate if configured\n const parsedUrl = new URL(currentUrl);\n if (!this.#predicate(parsedUrl)) {\n throw new Error(\n `URL not allowed by backend.reading.allow configuration: ${currentUrl}`,\n );\n }\n\n let response: Response;\n try {\n response = await fetch(currentUrl, {\n headers: {\n ...(options?.etag && { 'If-None-Match': options.etag }),\n ...(options?.lastModifiedAfter && {\n 'If-Modified-Since': options.lastModifiedAfter.toUTCString(),\n }),\n ...(options?.token && { Authorization: `Bearer ${options.token}` }),\n },\n // Handle redirects manually to validate targets against the allowlist\n redirect: 'manual',\n // TODO(freben): The signal cast is there because pre-3.x versions of\n // node-fetch have a very slightly deviating AbortSignal type signature.\n // The difference does not affect us in practice however. The cast can\n // be removed after we support ESM for CLI dependencies and migrate to\n // version 3 of node-fetch.\n // https://github.com/backstage/backstage/issues/8242\n signal: options?.signal as any,\n });\n } catch (e) {\n throw new Error(`Unable to read ${currentUrl}, ${e}`);\n }\n\n if (response.ok) {\n return ReadUrlResponseFactory.fromResponse(response);\n }\n\n if (response.status === 304) {\n throw new NotModifiedError();\n }\n\n const location = response.headers.get('location');\n if (!REDIRECT_STATUS_CODES.includes(response.status) || !location) {\n const message = `could not read ${currentUrl}, ${response.status} ${response.statusText}`;\n if (response.status === 404) {\n throw new NotFoundError(message);\n }\n throw new Error(message);\n }\n\n // Follow the redirect\n currentUrl = new URL(location, currentUrl).toString();\n }\n\n throw new Error(\n `Too many redirects (max ${MAX_REDIRECTS}) when reading ${url}`,\n );\n }\n\n async readTree(): Promise<UrlReaderServiceReadTreeResponse> {\n throw new Error('FetchUrlReader does not implement readTree');\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const { pathname } = new URL(url);\n\n if (pathname.match(/[*?]/)) {\n throw new Error('Unsupported search pattern URL');\n }\n\n try {\n const data = await this.readUrl(url, options);\n\n return {\n files: [\n {\n url: url,\n content: data.buffer,\n lastModifiedAt: data.lastModifiedAt,\n },\n ],\n etag: data.etag ?? '',\n };\n } catch (error) {\n assertError(error);\n if (error.name === 'NotFoundError') {\n return {\n files: [],\n etag: '',\n };\n }\n throw error;\n }\n }\n\n toString() {\n return 'fetch{}';\n }\n}\n"],"names":["path","ReadUrlResponseFactory","NotModifiedError","NotFoundError","assertError"],"mappings":";;;;;;;;;;AAkCA,MAAM,qBAAA,GAAwB,CAAC,GAAA,EAAK,GAAA,EAAK,KAAK,GAAG,CAAA;AACjD,MAAM,aAAA,GAAgB,CAAA;AAEtB,MAAM,YAAY,CAAC,GAAA,EAAa,CAAC,KAAA,EAAO,GAAG,CAAA,KAAwB;AACjE,EAAA,OAAO,GAAA,IAAO,SAAS,GAAA,IAAO,GAAA;AAChC,CAAA;AAEA,MAAM,cAAA,GAAiB,CAAC,IAAA,KAAmC;AACzD,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA;AACjC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAM,KAAA,GAAQ,IAAA,CACX,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAA,CAAA,KAAK,QAAA,CAAS,CAAA,EAAG,EAAE,CAAC,CAAA,CACxB,OAAO,OAAO,CAAA;AACjB,IAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG,MAAM,IAAI,KAAA,CAAM,CAAA,yBAAA,EAA4B,IAAI,CAAA,CAAE,CAAA;AAC1E,IAAA,MAAM,CAAC,KAAA,EAAO,GAAG,CAAA,GAAI,KAAA;AACrB,IAAA,IAAI,KAAA,IAAS,CAAA,IAAK,GAAA,IAAO,CAAA,IAAK,KAAA,GAAQ,GAAA;AACpC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,0BAAA,EAA6B,KAAK,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,CAAG,CAAA;AAC/D,IAAA,OAAO,KAAA;AAAA,EACT;AACA,EAAA,MAAM,UAAA,GAAa,QAAA,CAAS,IAAA,EAAM,EAAE,CAAA;AACpC,EAAA,OAAO,CAAC,YAAY,UAAU,CAAA;AAChC,CAAA;AAEA,MAAM,kBAAA,GAAqB,CAAC,IAAA,KAA6B;AACvD,EAAA,IAAI,IAAA,EAAM;AACR,IAAA,MAAM,KAAA,GAAQ,eAAe,IAAI,CAAA;AACjC,IAAA,OAAO,CAAC,GAAA,KAAa;AACnB,MAAA,IAAI,GAAA,CAAI,MAAM,OAAO,SAAA,CAAU,SAAS,GAAA,CAAI,IAAA,EAAM,EAAE,CAAA,EAAG,KAAK,CAAA;AAE5D,MAAA,IAAI,IAAI,QAAA,KAAa,OAAA,EAAS,OAAO,SAAA,CAAU,IAAI,KAAK,CAAA;AACxD,MAAA,IAAI,IAAI,QAAA,KAAa,QAAA,EAAU,OAAO,SAAA,CAAU,KAAK,KAAK,CAAA;AAC1D,MAAA,OAAO,KAAA;AAAA,IACT,CAAA;AAAA,EACF;AACA,EAAA,OAAO,CAAC,GAAA,KAAa,CAAC,GAAA,CAAI,IAAA;AAC5B,CAAA;AAEA,SAAS,oBAAoB,MAAA,EAAuC;AAClE,EAAA,MAAM,QAAQ,MAAA,CACX,sBAAA,CAAuB,uBAAuB,CAAA,EAC7C,IAAI,CAAA,WAAA,KAAe;AACnB,IAAA,MAAM,KAAA,GAAQ,WAAA,CAAY,sBAAA,CAAuB,OAAO,CAAA;AACxD,IAAA,MAAM,SAAA,GAAY,KAAA,GACd,CAAC,GAAA,KAAa;AACZ,MAAA,MAAM,UAAA,GAAaA,6BAAA,CAAK,KAAA,CAAM,SAAA,CAAU,IAAI,QAAQ,CAAA;AACpD,MAAA,OAAO,KAAA,CAAM,IAAA;AAAA,QAAK,CAAA,WAAA,KAChB,UAAA,CAAW,UAAA,CAAW,WAAW;AAAA,OACnC;AAAA,IACF,CAAA,GACA,CAAC,IAAA,KAAc,IAAA;AACnB,IAAA,MAAM,IAAA,GAAO,WAAA,CAAY,SAAA,CAAU,MAAM,CAAA;AACzC,IAAA,MAAM,CAAC,QAAA,EAAU,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAG,CAAA;AAEvC,IAAA,MAAM,SAAA,GAAY,mBAAmB,IAAI,CAAA;AAEzC,IAAA,IAAI,QAAA,CAAS,UAAA,CAAW,IAAI,CAAA,EAAG;AAC7B,MAAA,MAAM,MAAA,GAAS,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA;AAC/B,MAAA,OAAO,CAAC,GAAA,KACN,GAAA,CAAI,QAAA,CAAS,QAAA,CAAS,MAAM,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,IACpE;AAEA,IAAA,OAAO,CAAC,QACN,GAAA,CAAI,QAAA,KAAa,YAAY,SAAA,CAAU,GAAG,CAAA,IAAK,SAAA,CAAU,GAAG,CAAA;AAAA,EAChE,CAAC,CAAA;AAEH,EAAA,OAAO,KAAA,EAAO,MAAA,GAAS,CAAA,GAAA,KAAO,KAAA,CAAM,IAAA,CAAK,OAAK,CAAA,CAAE,GAAG,CAAC,CAAA,GAAI,MAAM,KAAA;AAChE;AAOO,MAAM,cAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAatD,OAAO,OAAA,GAAyB,CAAC,EAAE,QAAO,KAAM;AAC9C,IAAA,MAAM,SAAA,GAAY,oBAAoB,MAAM,CAAA;AAC5C,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,CAAe,EAAE,WAAW,CAAA;AAC/C,IAAA,OAAO,CAAC,EAAE,MAAA,EAAQ,SAAA,EAAW,CAAA;AAAA,EAC/B,CAAA;AAAA,EAEA,OAAO,WAAW,MAAA,EAAgC;AAChD,IAAA,OAAO,IAAI,cAAA,CAAe,EAAE,WAAW,mBAAA,CAAoB,MAAM,GAAG,CAAA;AAAA,EACtE;AAAA,EAES,UAAA;AAAA,EAED,YAAY,OAAA,EAA+C;AACjE,IAAA,IAAA,CAAK,aAAa,OAAA,CAAQ,SAAA;AAAA,EAC5B;AAAA,EAEA,MAAM,KAAK,GAAA,EAA8B;AACvC,IAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA;AACvC,IAAA,OAAO,SAAS,MAAA,EAAO;AAAA,EACzB;AAAA,EAEA,MAAM,OAAA,CACJ,GAAA,EACA,OAAA,EAC0C;AAC1C,IAAA,IAAI,UAAA,GAAa,GAAA;AAEjB,IAAA,KAAA,IACM,aAAA,GAAgB,CAAA,EACpB,aAAA,GAAgB,aAAA,EAChB,iBAAiB,CAAA,EACjB;AAEA,MAAA,MAAM,SAAA,GAAY,IAAI,GAAA,CAAI,UAAU,CAAA;AACpC,MAAA,IAAI,CAAC,IAAA,CAAK,UAAA,CAAW,SAAS,CAAA,EAAG;AAC/B,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,2DAA2D,UAAU,CAAA;AAAA,SACvE;AAAA,MACF;AAEA,MAAA,IAAI,QAAA;AACJ,MAAA,IAAI;AACF,QAAA,QAAA,GAAW,MAAM,MAAM,UAAA,EAAY;AAAA,UACjC,OAAA,EAAS;AAAA,YACP,GAAI,OAAA,EAAS,IAAA,IAAQ,EAAE,eAAA,EAAiB,QAAQ,IAAA,EAAK;AAAA,YACrD,GAAI,SAAS,iBAAA,IAAqB;AAAA,cAChC,mBAAA,EAAqB,OAAA,CAAQ,iBAAA,CAAkB,WAAA;AAAY,aAC7D;AAAA,YACA,GAAI,SAAS,KAAA,IAAS,EAAE,eAAe,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAA;AAAG,WACnE;AAAA;AAAA,UAEA,QAAA,EAAU,QAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAOV,QAAQ,OAAA,EAAS;AAAA,SAClB,CAAA;AAAA,MACH,SAAS,CAAA,EAAG;AACV,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,eAAA,EAAkB,UAAU,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AAAA,MACtD;AAEA,MAAA,IAAI,SAAS,EAAA,EAAI;AACf,QAAA,OAAOC,6CAAA,CAAuB,aAAa,QAAQ,CAAA;AAAA,MACrD;AAEA,MAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,QAAA,MAAM,IAAIC,uBAAA,EAAiB;AAAA,MAC7B;AAEA,MAAA,MAAM,QAAA,GAAW,QAAA,CAAS,OAAA,CAAQ,GAAA,CAAI,UAAU,CAAA;AAChD,MAAA,IAAI,CAAC,qBAAA,CAAsB,QAAA,CAAS,SAAS,MAAM,CAAA,IAAK,CAAC,QAAA,EAAU;AACjE,QAAA,MAAM,OAAA,GAAU,kBAAkB,UAAU,CAAA,EAAA,EAAK,SAAS,MAAM,CAAA,CAAA,EAAI,SAAS,UAAU,CAAA,CAAA;AACvF,QAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAC3B,UAAA,MAAM,IAAIC,qBAAc,OAAO,CAAA;AAAA,QACjC;AACA,QAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,MACzB;AAGA,MAAA,UAAA,GAAa,IAAI,GAAA,CAAI,QAAA,EAAU,UAAU,EAAE,QAAA,EAAS;AAAA,IACtD;AAEA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,wBAAA,EAA2B,aAAa,CAAA,eAAA,EAAkB,GAAG,CAAA;AAAA,KAC/D;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAAsD;AAC1D,IAAA,MAAM,IAAI,MAAM,4CAA4C,CAAA;AAAA,EAC9D;AAAA,EAEA,MAAM,MAAA,CACJ,GAAA,EACA,OAAA,EACyC;AACzC,IAAA,MAAM,EAAE,QAAA,EAAS,GAAI,IAAI,IAAI,GAAG,CAAA;AAEhC,IAAA,IAAI,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA,EAAG;AAC1B,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAA,CAAK,OAAA,CAAQ,KAAK,OAAO,CAAA;AAE5C,MAAA,OAAO;AAAA,QACL,KAAA,EAAO;AAAA,UACL;AAAA,YACE,GAAA;AAAA,YACA,SAAS,IAAA,CAAK,MAAA;AAAA,YACd,gBAAgB,IAAA,CAAK;AAAA;AACvB,SACF;AAAA,QACA,IAAA,EAAM,KAAK,IAAA,IAAQ;AAAA,OACrB;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAAC,kBAAA,CAAY,KAAK,CAAA;AACjB,MAAA,IAAI,KAAA,CAAM,SAAS,eAAA,EAAiB;AAClC,QAAA,OAAO;AAAA,UACL,OAAO,EAAC;AAAA,UACR,IAAA,EAAM;AAAA,SACR;AAAA,MACF;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,QAAA,GAAW;AACT,IAAA,OAAO,SAAA;AAAA,EACT;AACF;;;;"}
@@ -1,5 +1,6 @@
1
1
  'use strict';
2
2
 
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
3
4
  var concatStream = require('concat-stream');
4
5
  var platformPath = require('path');
5
6
  var getRawBody = require('raw-body');
@@ -67,7 +68,7 @@ class ReadableArrayResponse {
67
68
  const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
68
69
  for (let i = 0; i < this.stream.length; i++) {
69
70
  if (!this.stream[i].path.endsWith("/")) {
70
- const filePath = platformPath__default.default.join(dir, this.stream[i].path);
71
+ const filePath = backendPluginApi.resolveSafeChildPath(dir, this.stream[i].path);
71
72
  await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
72
73
  await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
73
74
  }
@@ -1 +1 @@
1
- {"version":3,"file":"ReadableArrayResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadableArrayResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport platformPath, { dirname } from 'path';\nimport getRawBody from 'raw-body';\nimport fs from 'fs-extra';\nimport { promisify } from 'util';\nimport tar from 'tar';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport { FromReadableArrayOptions } from '../types';\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a array of Readable objects into a tree response reader.\n */\nexport class ReadableArrayResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n private readonly stream: FromReadableArrayOptions;\n private readonly workDir: string;\n public readonly etag: string;\n\n constructor(stream: FromReadableArrayOptions, workDir: string, etag: string) {\n this.stream = stream;\n this.workDir = workDir;\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n files.push({\n path: this.stream[i].path,\n content: () => getRawBody(this.stream[i].data),\n lastModifiedAt: this.stream[i]?.lastModifiedAt,\n });\n }\n }\n\n return files;\n }\n\n async archive(): Promise<NodeJS.ReadableStream> {\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n const filePath = platformPath.join(dir, this.stream[i].path);\n await fs.mkdir(dirname(filePath), { recursive: true });\n await pipeline(this.stream[i].data, fs.createWriteStream(filePath));\n }\n }\n\n return dir;\n }\n}\n"],"names":["promisify","pipelineCb","getRawBody","tar","concatStream","Readable","fs","platformPath","dirname"],"mappings":";;;;;;;;;;;;;;;;;;AA8BA,MAAM,QAAA,GAAWA,eAAUC,eAAU,CAAA;AAK9B,MAAM,qBAAA,CAAkE;AAAA,EACrE,IAAA,GAAO,KAAA;AAAA,EACE,MAAA;AAAA,EACA,OAAA;AAAA,EACD,IAAA;AAAA,EAEhB,WAAA,CAAY,MAAA,EAAkC,OAAA,EAAiB,IAAA,EAAc;AAC3E,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA;AAAA,EAGQ,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAE1D,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,CAAE,IAAA;AAAA,UACrB,SAAS,MAAMC,2BAAA,CAAW,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAAA,UAC7C,cAAA,EAAgB,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,EAAG;AAAA,SACjC,CAAA;AAAA,MACH;AAAA,IACF;AAEA,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA0C;AAC9C,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCC,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOC,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAEjE,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,MAAM,QAAA,GAAWA,8BAAa,IAAA,CAAK,GAAA,EAAK,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAC3D,QAAA,MAAMD,mBAAA,CAAG,MAAME,oBAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACrD,QAAA,MAAM,QAAA,CAAS,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,EAAMF,mBAAA,CAAG,iBAAA,CAAkB,QAAQ,CAAC,CAAA;AAAA,MACpE;AAAA,IACF;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"ReadableArrayResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadableArrayResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n resolveSafeChildPath,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport platformPath, { dirname } from 'path';\nimport getRawBody from 'raw-body';\nimport fs from 'fs-extra';\nimport { promisify } from 'util';\nimport tar from 'tar';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport { FromReadableArrayOptions } from '../types';\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a array of Readable objects into a tree response reader.\n */\nexport class ReadableArrayResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n private readonly stream: FromReadableArrayOptions;\n private readonly workDir: string;\n public readonly etag: string;\n\n constructor(stream: FromReadableArrayOptions, workDir: string, etag: string) {\n this.stream = stream;\n this.workDir = workDir;\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n files.push({\n path: this.stream[i].path,\n content: () => getRawBody(this.stream[i].data),\n lastModifiedAt: this.stream[i]?.lastModifiedAt,\n });\n }\n }\n\n return files;\n }\n\n async archive(): Promise<NodeJS.ReadableStream> {\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n const filePath = resolveSafeChildPath(dir, this.stream[i].path);\n await fs.mkdir(dirname(filePath), { recursive: true });\n await pipeline(this.stream[i].data, fs.createWriteStream(filePath));\n }\n }\n\n return dir;\n }\n}\n"],"names":["promisify","pipelineCb","getRawBody","tar","concatStream","Readable","fs","platformPath","resolveSafeChildPath","dirname"],"mappings":";;;;;;;;;;;;;;;;;;;AA+BA,MAAM,QAAA,GAAWA,eAAUC,eAAU,CAAA;AAK9B,MAAM,qBAAA,CAAkE;AAAA,EACrE,IAAA,GAAO,KAAA;AAAA,EACE,MAAA;AAAA,EACA,OAAA;AAAA,EACD,IAAA;AAAA,EAEhB,WAAA,CAAY,MAAA,EAAkC,OAAA,EAAiB,IAAA,EAAc;AAC3E,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA;AAAA,EAGQ,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAE1D,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,CAAE,IAAA;AAAA,UACrB,SAAS,MAAMC,2BAAA,CAAW,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAAA,UAC7C,cAAA,EAAgB,IAAA,CAAK,MAAA,CAAO,CAAC,CAAA,EAAG;AAAA,SACjC,CAAA;AAAA,MACH;AAAA,IACF;AAEA,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA0C;AAC9C,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCC,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOC,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAEjE,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AAC3C,MAAA,IAAI,CAAC,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,CAAK,QAAA,CAAS,GAAG,CAAA,EAAG;AACtC,QAAA,MAAM,WAAWC,qCAAA,CAAqB,GAAA,EAAK,KAAK,MAAA,CAAO,CAAC,EAAE,IAAI,CAAA;AAC9D,QAAA,MAAMF,mBAAA,CAAG,MAAMG,oBAAA,CAAQ,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACrD,QAAA,MAAM,QAAA,CAAS,KAAK,MAAA,CAAO,CAAC,EAAE,IAAA,EAAMH,mBAAA,CAAG,iBAAA,CAAkB,QAAQ,CAAC,CAAA;AAAA,MACpE;AAAA,IACF;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
@@ -1,5 +1,6 @@
1
1
  'use strict';
2
2
 
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
3
4
  var concatStream = require('concat-stream');
4
5
  var fs = require('fs-extra');
5
6
  var platformPath = require('path');
@@ -121,6 +122,17 @@ class TarArchiveResponse {
121
122
  if (filterError) {
122
123
  return false;
123
124
  }
125
+ const entry = stat;
126
+ if ((entry.type === "SymbolicLink" || entry.type === "Link") && entry.linkpath) {
127
+ const strippedPath = path.split("/").slice(strip).join("/");
128
+ const linkDir = platformPath__default.default.dirname(
129
+ platformPath__default.default.join(dir, strippedPath)
130
+ );
131
+ const targetPath = platformPath__default.default.resolve(linkDir, entry.linkpath);
132
+ if (!backendPluginApi.isChildPath(dir, targetPath)) {
133
+ return false;
134
+ }
135
+ }
124
136
  const relativePath = this.stripFirstDirectory ? util$1.stripFirstDirectoryFromPath(path) : path;
125
137
  if (this.subPath && !relativePath.startsWith(this.subPath)) {
126
138
  return false;
@@ -1 +1 @@
1
- {"version":3,"file":"TarArchiveResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/TarArchiveResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport fs from 'fs-extra';\nimport platformPath from 'path';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport tar, { Parse, ParseStream, ReadEntry } from 'tar';\nimport { promisify } from 'util';\nimport { stripFirstDirectoryFromPath } from './util';\n\n// Tar types for `Parse` is not a proper constructor, but it should be\nconst TarParseStream = Parse as unknown as { new (): ParseStream };\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a tar archive stream into a tree response reader.\n */\nexport class TarArchiveResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n private readonly stream: Readable;\n private readonly subPath: string;\n private readonly workDir: string;\n public readonly etag: string;\n private readonly filter?: (path: string, info: { size: number }) => boolean;\n private readonly stripFirstDirectory: boolean;\n\n constructor(\n stream: Readable,\n subPath: string,\n workDir: string,\n etag: string,\n filter?: (path: string, info: { size: number }) => boolean,\n stripFirstDirectory: boolean = true,\n ) {\n this.stream = stream;\n this.subPath = subPath;\n this.workDir = workDir;\n this.etag = etag;\n this.filter = filter;\n this.stripFirstDirectory = stripFirstDirectory;\n if (subPath) {\n if (!subPath.endsWith('/')) {\n this.subPath += '/';\n }\n if (subPath.startsWith('/')) {\n throw new TypeError(\n `TarArchiveResponse subPath must not start with a /, got '${subPath}'`,\n );\n }\n }\n\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n const parser = new TarParseStream();\n\n parser.on('entry', (entry: ReadEntry & Readable) => {\n if (entry.type === 'Directory') {\n entry.resume();\n return;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(entry.path)\n : entry.path;\n\n if (this.subPath) {\n if (!relativePath.startsWith(this.subPath)) {\n entry.resume();\n return;\n }\n }\n\n const path = relativePath.slice(this.subPath.length);\n if (this.filter) {\n if (!this.filter(path, { size: entry.remain })) {\n entry.resume();\n return;\n }\n }\n\n const content = new Promise<Buffer>(async resolve => {\n await pipeline(entry, concatStream(resolve));\n });\n\n files.push({\n path,\n content: () => content,\n });\n\n entry.resume();\n });\n\n await pipeline(this.stream, parser);\n\n return files;\n }\n\n async archive(): Promise<Readable> {\n if (!this.subPath) {\n this.onlyOnce();\n\n return this.stream;\n }\n\n // TODO(Rugvip): method for repacking a tar with a subpath is to simply extract into a\n // tmp dir and recreate the archive. Would be nicer to stream things instead.\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n // Equivalent of tar --strip-components=N\n // When no subPath is given, remove just 1 top level directory\n let strip = this.subPath ? this.subPath.split('/').length : 1;\n if (!this.stripFirstDirectory) {\n strip--;\n }\n\n let filterError: Error | undefined = undefined;\n await pipeline(\n this.stream,\n tar.extract({\n strip,\n cwd: dir,\n filter: (path, stat) => {\n // Filter errors will short-circuit the rest of the filtering and then throw\n if (filterError) {\n return false;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(path)\n : path;\n if (this.subPath && !relativePath.startsWith(this.subPath)) {\n return false;\n }\n if (this.filter) {\n const innerPath = path.split('/').slice(strip).join('/');\n try {\n return this.filter(innerPath, { size: stat.size });\n } catch (error) {\n filterError = error;\n return false;\n }\n }\n return true;\n },\n }),\n );\n\n if (filterError) {\n // If the dir was provided we don't want to remove it, but if it wasn't it means\n // we created a temporary directory and we should remove it.\n if (!options?.targetDir) {\n await fs.remove(dir).catch(() => {});\n }\n throw filterError;\n }\n\n return dir;\n }\n}\n"],"names":["Parse","promisify","pipelineCb","stripFirstDirectoryFromPath","concatStream","tar","Readable","fs","platformPath"],"mappings":";;;;;;;;;;;;;;;;;AA8BA,MAAM,cAAA,GAAiBA,SAAA;AAEvB,MAAM,QAAA,GAAWC,eAAUC,eAAU,CAAA;AAK9B,MAAM,kBAAA,CAA+D;AAAA,EAClE,IAAA,GAAO,KAAA;AAAA,EACE,MAAA;AAAA,EACA,OAAA;AAAA,EACA,OAAA;AAAA,EACD,IAAA;AAAA,EACC,MAAA;AAAA,EACA,mBAAA;AAAA,EAEjB,YACE,MAAA,EACA,OAAA,EACA,SACA,IAAA,EACA,MAAA,EACA,sBAA+B,IAAA,EAC/B;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,mBAAA,GAAsB,mBAAA;AAC3B,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAI,CAAC,OAAA,CAAQ,QAAA,CAAS,GAAG,CAAA,EAAG;AAC1B,QAAA,IAAA,CAAK,OAAA,IAAW,GAAA;AAAA,MAClB;AACA,MAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,GAAG,CAAA,EAAG;AAC3B,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,4DAA4D,OAAO,CAAA,CAAA;AAAA,SACrE;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA;AAAA,EAGQ,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAC1D,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAElC,IAAA,MAAA,CAAO,EAAA,CAAG,OAAA,EAAS,CAAC,KAAA,KAAgC;AAClD,MAAA,IAAI,KAAA,CAAM,SAAS,WAAA,EAAa;AAC9B,QAAA,KAAA,CAAM,MAAA,EAAO;AACb,QAAA;AAAA,MACF;AAIA,MAAA,MAAM,eAAe,IAAA,CAAK,mBAAA,GACtBC,mCAA4B,KAAA,CAAM,IAAI,IACtC,KAAA,CAAM,IAAA;AAEV,MAAA,IAAI,KAAK,OAAA,EAAS;AAChB,QAAA,IAAI,CAAC,YAAA,CAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,IAAA,GAAO,YAAA,CAAa,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAM,CAAA;AACnD,MAAA,IAAI,KAAK,MAAA,EAAQ;AACf,QAAA,IAAI,CAAC,KAAK,MAAA,CAAO,IAAA,EAAM,EAAE,IAAA,EAAM,KAAA,CAAM,MAAA,EAAQ,CAAA,EAAG;AAC9C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACnD,QAAA,MAAM,QAAA,CAAS,KAAA,EAAOC,6BAAA,CAAa,OAAO,CAAC,CAAA;AAAA,MAC7C,CAAC,CAAA;AAED,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACT,IAAA;AAAA,QACA,SAAS,MAAM;AAAA,OAChB,CAAA;AAED,MAAA,KAAA,CAAM,MAAA,EAAO;AAAA,IACf,CAAC,CAAA;AAED,IAAA,MAAM,QAAA,CAAS,IAAA,CAAK,MAAA,EAAQ,MAAM,CAAA;AAElC,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA6B;AACjC,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,QAAA,EAAS;AAEd,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AAIA,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCD,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOE,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAIjE,IAAA,IAAI,KAAA,GAAQ,KAAK,OAAA,GAAU,IAAA,CAAK,QAAQ,KAAA,CAAM,GAAG,EAAE,MAAA,GAAS,CAAA;AAC5D,IAAA,IAAI,CAAC,KAAK,mBAAA,EAAqB;AAC7B,MAAA,KAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAI,WAAA,GAAiC,MAAA;AACrC,IAAA,MAAM,QAAA;AAAA,MACJ,IAAA,CAAK,MAAA;AAAA,MACLH,qBAAI,OAAA,CAAQ;AAAA,QACV,KAAA;AAAA,QACA,GAAA,EAAK,GAAA;AAAA,QACL,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAA,KAAS;AAEtB,UAAA,IAAI,WAAA,EAAa;AACf,YAAA,OAAO,KAAA;AAAA,UACT;AAIA,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,mBAAA,GACtBF,kCAAA,CAA4B,IAAI,CAAA,GAChC,IAAA;AACJ,UAAA,IAAI,KAAK,OAAA,IAAW,CAAC,aAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1D,YAAA,OAAO,KAAA;AAAA,UACT;AACA,UAAA,IAAI,KAAK,MAAA,EAAQ;AACf,YAAA,MAAM,SAAA,GAAY,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AACvD,YAAA,IAAI;AACF,cAAA,OAAO,KAAK,MAAA,CAAO,SAAA,EAAW,EAAE,IAAA,EAAM,IAAA,CAAK,MAAM,CAAA;AAAA,YACnD,SAAS,KAAA,EAAO;AACd,cAAA,WAAA,GAAc,KAAA;AACd,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AACA,UAAA,OAAO,IAAA;AAAA,QACT;AAAA,OACD;AAAA,KACH;AAEA,IAAA,IAAI,WAAA,EAAa;AAGf,MAAA,IAAI,CAAC,SAAS,SAAA,EAAW;AACvB,QAAA,MAAMI,mBAAA,CAAG,MAAA,CAAO,GAAG,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MACrC;AACA,MAAA,MAAM,WAAA;AAAA,IACR;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"TarArchiveResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/TarArchiveResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n isChildPath,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport fs from 'fs-extra';\nimport platformPath from 'path';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport tar, { FileStat, Parse, ParseStream, ReadEntry } from 'tar';\nimport { promisify } from 'util';\nimport { stripFirstDirectoryFromPath } from './util';\n\n// Tar types for `Parse` is not a proper constructor, but it should be\nconst TarParseStream = Parse as unknown as { new (): ParseStream };\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a tar archive stream into a tree response reader.\n */\nexport class TarArchiveResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n private readonly stream: Readable;\n private readonly subPath: string;\n private readonly workDir: string;\n public readonly etag: string;\n private readonly filter?: (path: string, info: { size: number }) => boolean;\n private readonly stripFirstDirectory: boolean;\n\n constructor(\n stream: Readable,\n subPath: string,\n workDir: string,\n etag: string,\n filter?: (path: string, info: { size: number }) => boolean,\n stripFirstDirectory: boolean = true,\n ) {\n this.stream = stream;\n this.subPath = subPath;\n this.workDir = workDir;\n this.etag = etag;\n this.filter = filter;\n this.stripFirstDirectory = stripFirstDirectory;\n if (subPath) {\n if (!subPath.endsWith('/')) {\n this.subPath += '/';\n }\n if (subPath.startsWith('/')) {\n throw new TypeError(\n `TarArchiveResponse subPath must not start with a /, got '${subPath}'`,\n );\n }\n }\n\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n const parser = new TarParseStream();\n\n parser.on('entry', (entry: ReadEntry & Readable) => {\n if (entry.type === 'Directory') {\n entry.resume();\n return;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(entry.path)\n : entry.path;\n\n if (this.subPath) {\n if (!relativePath.startsWith(this.subPath)) {\n entry.resume();\n return;\n }\n }\n\n const path = relativePath.slice(this.subPath.length);\n if (this.filter) {\n if (!this.filter(path, { size: entry.remain })) {\n entry.resume();\n return;\n }\n }\n\n const content = new Promise<Buffer>(async resolve => {\n await pipeline(entry, concatStream(resolve));\n });\n\n files.push({\n path,\n content: () => content,\n });\n\n entry.resume();\n });\n\n await pipeline(this.stream, parser);\n\n return files;\n }\n\n async archive(): Promise<Readable> {\n if (!this.subPath) {\n this.onlyOnce();\n\n return this.stream;\n }\n\n // TODO(Rugvip): method for repacking a tar with a subpath is to simply extract into a\n // tmp dir and recreate the archive. Would be nicer to stream things instead.\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n // Equivalent of tar --strip-components=N\n // When no subPath is given, remove just 1 top level directory\n let strip = this.subPath ? this.subPath.split('/').length : 1;\n if (!this.stripFirstDirectory) {\n strip--;\n }\n\n let filterError: Error | undefined = undefined;\n await pipeline(\n this.stream,\n tar.extract({\n strip,\n cwd: dir,\n filter: (path, stat) => {\n // Filter errors will short-circuit the rest of the filtering and then throw\n if (filterError) {\n return false;\n }\n\n // Block symlinks/hardlinks that escape the extraction directory\n const entry = stat as FileStat & { type?: string; linkpath?: string };\n if (\n (entry.type === 'SymbolicLink' || entry.type === 'Link') &&\n entry.linkpath\n ) {\n const strippedPath = path.split('/').slice(strip).join('/');\n const linkDir = platformPath.dirname(\n platformPath.join(dir, strippedPath),\n );\n const targetPath = platformPath.resolve(linkDir, entry.linkpath);\n if (!isChildPath(dir, targetPath)) {\n return false;\n }\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(path)\n : path;\n if (this.subPath && !relativePath.startsWith(this.subPath)) {\n return false;\n }\n if (this.filter) {\n const innerPath = path.split('/').slice(strip).join('/');\n try {\n return this.filter(innerPath, { size: stat.size });\n } catch (error) {\n filterError = error;\n return false;\n }\n }\n return true;\n },\n }),\n );\n\n if (filterError) {\n // If the dir was provided we don't want to remove it, but if it wasn't it means\n // we created a temporary directory and we should remove it.\n if (!options?.targetDir) {\n await fs.remove(dir).catch(() => {});\n }\n throw filterError;\n }\n\n return dir;\n }\n}\n"],"names":["Parse","promisify","pipelineCb","stripFirstDirectoryFromPath","concatStream","tar","Readable","fs","platformPath","isChildPath"],"mappings":";;;;;;;;;;;;;;;;;;AA+BA,MAAM,cAAA,GAAiBA,SAAA;AAEvB,MAAM,QAAA,GAAWC,eAAUC,eAAU,CAAA;AAK9B,MAAM,kBAAA,CAA+D;AAAA,EAClE,IAAA,GAAO,KAAA;AAAA,EACE,MAAA;AAAA,EACA,OAAA;AAAA,EACA,OAAA;AAAA,EACD,IAAA;AAAA,EACC,MAAA;AAAA,EACA,mBAAA;AAAA,EAEjB,YACE,MAAA,EACA,OAAA,EACA,SACA,IAAA,EACA,MAAA,EACA,sBAA+B,IAAA,EAC/B;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,mBAAA,GAAsB,mBAAA;AAC3B,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAI,CAAC,OAAA,CAAQ,QAAA,CAAS,GAAG,CAAA,EAAG;AAC1B,QAAA,IAAA,CAAK,OAAA,IAAW,GAAA;AAAA,MAClB;AACA,MAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,GAAG,CAAA,EAAG;AAC3B,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,4DAA4D,OAAO,CAAA,CAAA;AAAA,SACrE;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA;AAAA,EAGQ,QAAA,GAAW;AACjB,IAAA,IAAI,KAAK,IAAA,EAAM;AACb,MAAA,MAAM,IAAI,MAAM,gCAAgC,CAAA;AAAA,IAClD;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAM,KAAA,GAAyD;AAC7D,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,QAAQ,KAAA,EAA4C;AAC1D,IAAA,MAAM,MAAA,GAAS,IAAI,cAAA,EAAe;AAElC,IAAA,MAAA,CAAO,EAAA,CAAG,OAAA,EAAS,CAAC,KAAA,KAAgC;AAClD,MAAA,IAAI,KAAA,CAAM,SAAS,WAAA,EAAa;AAC9B,QAAA,KAAA,CAAM,MAAA,EAAO;AACb,QAAA;AAAA,MACF;AAIA,MAAA,MAAM,eAAe,IAAA,CAAK,mBAAA,GACtBC,mCAA4B,KAAA,CAAM,IAAI,IACtC,KAAA,CAAM,IAAA;AAEV,MAAA,IAAI,KAAK,OAAA,EAAS;AAChB,QAAA,IAAI,CAAC,YAAA,CAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,IAAA,GAAO,YAAA,CAAa,KAAA,CAAM,IAAA,CAAK,QAAQ,MAAM,CAAA;AACnD,MAAA,IAAI,KAAK,MAAA,EAAQ;AACf,QAAA,IAAI,CAAC,KAAK,MAAA,CAAO,IAAA,EAAM,EAAE,IAAA,EAAM,KAAA,CAAM,MAAA,EAAQ,CAAA,EAAG;AAC9C,UAAA,KAAA,CAAM,MAAA,EAAO;AACb,UAAA;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACnD,QAAA,MAAM,QAAA,CAAS,KAAA,EAAOC,6BAAA,CAAa,OAAO,CAAC,CAAA;AAAA,MAC7C,CAAC,CAAA;AAED,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACT,IAAA;AAAA,QACA,SAAS,MAAM;AAAA,OAChB,CAAA;AAED,MAAA,KAAA,CAAM,MAAA,EAAO;AAAA,IACf,CAAC,CAAA;AAED,IAAA,MAAM,QAAA,CAAS,IAAA,CAAK,MAAA,EAAQ,MAAM,CAAA;AAElC,IAAA,OAAO,KAAA;AAAA,EACT;AAAA,EAEA,MAAM,OAAA,GAA6B;AACjC,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,QAAA,EAAS;AAEd,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AAIA,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,GAAA,EAAI;AAE9B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAA,KAAW;AACtD,QAAA,MAAM,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAO,EAAG,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCD,8BAAa,OAAO;AAAA,SACtB;AAAA,MACF,CAAC,CAAA;AACD,MAAA,OAAOE,eAAA,CAAS,KAAK,IAAI,CAAA;AAAA,IAC3B,CAAA,SAAE;AACA,MAAA,MAAMC,mBAAA,CAAG,OAAO,MAAM,CAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,IACJ,OAAA,EACiB;AACjB,IAAA,IAAA,CAAK,QAAA,EAAS;AAEd,IAAA,MAAM,GAAA,GACJ,OAAA,EAAS,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAA,CAAQC,6BAAA,CAAa,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,YAAY,CAAC,CAAA;AAIjE,IAAA,IAAI,KAAA,GAAQ,KAAK,OAAA,GAAU,IAAA,CAAK,QAAQ,KAAA,CAAM,GAAG,EAAE,MAAA,GAAS,CAAA;AAC5D,IAAA,IAAI,CAAC,KAAK,mBAAA,EAAqB;AAC7B,MAAA,KAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAI,WAAA,GAAiC,MAAA;AACrC,IAAA,MAAM,QAAA;AAAA,MACJ,IAAA,CAAK,MAAA;AAAA,MACLH,qBAAI,OAAA,CAAQ;AAAA,QACV,KAAA;AAAA,QACA,GAAA,EAAK,GAAA;AAAA,QACL,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAA,KAAS;AAEtB,UAAA,IAAI,WAAA,EAAa;AACf,YAAA,OAAO,KAAA;AAAA,UACT;AAGA,UAAA,MAAM,KAAA,GAAQ,IAAA;AACd,UAAA,IAAA,CACG,MAAM,IAAA,KAAS,cAAA,IAAkB,MAAM,IAAA,KAAS,MAAA,KACjD,MAAM,QAAA,EACN;AACA,YAAA,MAAM,YAAA,GAAe,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AAC1D,YAAA,MAAM,UAAUG,6BAAA,CAAa,OAAA;AAAA,cAC3BA,6BAAA,CAAa,IAAA,CAAK,GAAA,EAAK,YAAY;AAAA,aACrC;AACA,YAAA,MAAM,UAAA,GAAaA,6BAAA,CAAa,OAAA,CAAQ,OAAA,EAAS,MAAM,QAAQ,CAAA;AAC/D,YAAA,IAAI,CAACC,4BAAA,CAAY,GAAA,EAAK,UAAU,CAAA,EAAG;AACjC,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AAIA,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,mBAAA,GACtBN,kCAAA,CAA4B,IAAI,CAAA,GAChC,IAAA;AACJ,UAAA,IAAI,KAAK,OAAA,IAAW,CAAC,aAAa,UAAA,CAAW,IAAA,CAAK,OAAO,CAAA,EAAG;AAC1D,YAAA,OAAO,KAAA;AAAA,UACT;AACA,UAAA,IAAI,KAAK,MAAA,EAAQ;AACf,YAAA,MAAM,SAAA,GAAY,KAAK,KAAA,CAAM,GAAG,EAAE,KAAA,CAAM,KAAK,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA;AACvD,YAAA,IAAI;AACF,cAAA,OAAO,KAAK,MAAA,CAAO,SAAA,EAAW,EAAE,IAAA,EAAM,IAAA,CAAK,MAAM,CAAA;AAAA,YACnD,SAAS,KAAA,EAAO;AACd,cAAA,WAAA,GAAc,KAAA;AACd,cAAA,OAAO,KAAA;AAAA,YACT;AAAA,UACF;AACA,UAAA,OAAO,IAAA;AAAA,QACT;AAAA,OACD;AAAA,KACH;AAEA,IAAA,IAAI,WAAA,EAAa;AAGf,MAAA,IAAI,CAAC,SAAS,SAAA,EAAW;AACvB,QAAA,MAAMI,mBAAA,CAAG,MAAA,CAAO,GAAG,CAAA,CAAE,MAAM,MAAM;AAAA,QAAC,CAAC,CAAA;AAAA,MACrC;AACA,MAAA,MAAM,WAAA;AAAA,IACR;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AACF;;;;"}
@@ -2,7 +2,7 @@
2
2
 
3
3
  Object.defineProperty(exports, '__esModule', { value: true });
4
4
 
5
- var version = "0.13.1";
5
+ var version = "0.13.2";
6
6
  var packageinfo = {
7
7
  version: version};
8
8
 
@@ -3,6 +3,7 @@ import { RootConfigService, LoggerService, UrlReaderServiceReadTreeResponse, Url
3
3
  import { AzureIntegration, AzureDevOpsCredentialsProvider, BitbucketCloudIntegration, BitbucketIntegration, BitbucketServerIntegration, GerritIntegration, GithubIntegration, GithubCredentialsProvider, GitLabIntegration, GiteaIntegration, HarnessIntegration, AwsS3Integration, AzureCredentialsManager, AzureBlobStorageIntergation } from '@backstage/integration';
4
4
  import { Readable } from 'stream';
5
5
  import { AwsCredentialsManager } from '@backstage/integration-aws-node';
6
+ import { Config } from '@backstage/config';
6
7
 
7
8
  /**
8
9
  * A predicate that decides whether a specific {@link @backstage/backend-plugin-api#UrlReaderService} can handle a
@@ -374,6 +375,7 @@ declare class AzureBlobStorageUrlReader implements UrlReaderService {
374
375
  * @public
375
376
  */
376
377
  declare class FetchUrlReader implements UrlReaderService {
378
+ #private;
377
379
  /**
378
380
  * The factory creates a single reader that will be used for reading any URL that's listed
379
381
  * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing
@@ -387,6 +389,8 @@ declare class FetchUrlReader implements UrlReaderService {
387
389
  * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
388
390
  */
389
391
  static factory: ReaderFactory;
392
+ static fromConfig(config: Config): FetchUrlReader;
393
+ private constructor();
390
394
  read(url: string): Promise<Buffer>;
391
395
  readUrl(url: string, options?: UrlReaderServiceReadUrlOptions): Promise<UrlReaderServiceReadUrlResponse>;
392
396
  readTree(): Promise<UrlReaderServiceReadTreeResponse>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/backend-defaults",
3
- "version": "0.13.1",
3
+ "version": "0.13.2",
4
4
  "description": "Backend defaults used by Backstage backend apps",
5
5
  "backstage": {
6
6
  "role": "node-library"
@@ -285,7 +285,7 @@
285
285
  "devDependencies": {
286
286
  "@aws-sdk/util-stream-node": "^3.350.0",
287
287
  "@backstage/backend-plugin-api": "^1.5.0",
288
- "@backstage/backend-test-utils": "^1.10.0",
288
+ "@backstage/backend-test-utils": "^1.10.1",
289
289
  "@backstage/cli": "^0.34.5",
290
290
  "@google-cloud/cloud-sql-connector": "^1.4.0",
291
291
  "@types/archiver": "^6.0.0",