@backstage/plugin-search-backend-node 1.4.0 → 1.4.1-next.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @backstage/plugin-search-backend-node
2
2
 
3
+ ## 1.4.1-next.0
4
+
5
+ ### Patch Changes
6
+
7
+ - 7455dae: Use node prefix on native imports
8
+ - Updated dependencies
9
+ - @backstage/backend-plugin-api@1.7.0-next.0
10
+ - @backstage/plugin-search-common@1.2.22-next.0
11
+ - @backstage/plugin-permission-common@0.9.5-next.0
12
+ - @backstage/config@1.3.6
13
+ - @backstage/errors@1.2.7
14
+
3
15
  ## 1.4.0
4
16
 
5
17
  ### Minor Changes
@@ -1,6 +1,6 @@
1
1
  'use strict';
2
2
 
3
- var stream = require('stream');
3
+ var node_stream = require('node:stream');
4
4
  var Scheduler = require('./Scheduler.cjs.js');
5
5
 
6
6
  class IndexBuilder {
@@ -95,7 +95,7 @@ class IndexBuilder {
95
95
  );
96
96
  const indexer = await this.searchEngine.getIndexer(type);
97
97
  return new Promise((resolve, reject) => {
98
- stream.pipeline(
98
+ node_stream.pipeline(
99
99
  [collator, ...decorators, indexer],
100
100
  (error) => {
101
101
  if (error) {
@@ -1 +1 @@
1
- {"version":3,"file":"IndexBuilder.cjs.js","sources":["../src/IndexBuilder.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n DocumentDecoratorFactory,\n DocumentTypeInfo,\n} from '@backstage/plugin-search-common';\nimport { pipeline, Transform } from 'stream';\nimport { Scheduler } from './Scheduler';\nimport {\n IndexBuilderOptions,\n RegisterCollatorParameters,\n RegisterDecoratorParameters,\n SearchEngine,\n} from './types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Used for adding collators, decorators and compile them into tasks which are added to a scheduler returned to the caller.\n * @public\n */\nexport class IndexBuilder {\n private collators: Record<string, RegisterCollatorParameters>;\n private decorators: Record<string, DocumentDecoratorFactory[]>;\n private documentTypes: Record<string, DocumentTypeInfo>;\n private searchEngine: SearchEngine;\n private logger: LoggerService;\n\n constructor(options: IndexBuilderOptions) {\n this.collators = {};\n this.decorators = {};\n this.documentTypes = {};\n this.logger = options.logger;\n this.searchEngine = options.searchEngine;\n }\n\n /**\n * Responsible for returning the registered search engine.\n */\n getSearchEngine(): SearchEngine {\n return this.searchEngine;\n }\n\n /**\n * Responsible for returning the registered document types.\n */\n getDocumentTypes(): Record<string, DocumentTypeInfo> {\n return this.documentTypes;\n }\n\n /**\n * Makes the index builder aware of a collator that should be executed at the\n * given refresh interval.\n */\n addCollator(options: RegisterCollatorParameters): void {\n const { factory, schedule } = options;\n\n this.logger.info(\n `Added ${factory.constructor.name} collator factory for type ${factory.type}`,\n );\n this.collators[factory.type] = {\n factory,\n schedule,\n };\n this.documentTypes[factory.type] = {\n visibilityPermission: factory.visibilityPermission,\n };\n }\n\n /**\n * Makes the index builder aware of a decorator. If no types are provided on\n * the decorator, it will be applied to documents from all known collators,\n * otherwise it will only be applied to documents of the given types.\n */\n addDecorator(options: RegisterDecoratorParameters): void {\n const { factory } = options;\n const types = factory.types || ['*'];\n this.logger.info(\n `Added decorator ${factory.constructor.name} to types ${types.join(\n ', ',\n )}`,\n );\n types.forEach(type => {\n if (this.decorators.hasOwnProperty(type)) {\n this.decorators[type].push(factory);\n } else {\n this.decorators[type] = [factory];\n }\n });\n }\n\n /**\n * Compiles collators and decorators into tasks, which are added to a\n * scheduler returned to the caller.\n */\n async build(): Promise<{ scheduler: Scheduler }> {\n const scheduler = new Scheduler({\n logger: this.logger,\n });\n\n Object.keys(this.collators).forEach(type => {\n const taskLogger = this.logger.child({ documentType: type });\n scheduler.addToSchedule({\n id: `search_index_${type.replace('-', '_').toLocaleLowerCase('en-US')}`,\n scheduledRunner: this.collators[type].schedule,\n task: async () => {\n // Instantiate the collator.\n const collator = await this.collators[type].factory.getCollator();\n taskLogger.info(\n `Collating documents for ${type} via ${this.collators[type].factory.constructor.name}`,\n );\n\n // Instantiate all relevant decorators.\n const decorators: Transform[] = await Promise.all(\n (this.decorators['*'] || [])\n .concat(this.decorators[type] || [])\n .map(async factory => {\n const decorator = await factory.getDecorator();\n taskLogger.info(\n `Attached decorator via ${factory.constructor.name} to ${type} index pipeline.`,\n );\n return decorator;\n }),\n );\n\n // Instantiate the indexer.\n const indexer = await this.searchEngine.getIndexer(type);\n\n // Compose collator/decorators/indexer into a pipeline\n return new Promise<void>((resolve, reject) => {\n pipeline(\n [collator, ...decorators, indexer],\n (error: NodeJS.ErrnoException | null) => {\n if (error) {\n taskLogger.error(\n `Collating documents for ${type} failed: ${error}`,\n );\n reject(error);\n } else {\n // Signal index pipeline completion!\n taskLogger.info(`Collating documents for ${type} succeeded`);\n resolve();\n }\n },\n );\n });\n },\n });\n });\n\n return {\n scheduler,\n };\n }\n}\n"],"names":["Scheduler","pipeline"],"mappings":";;;;;AAkCO,MAAM,YAAA,CAAa;AAAA,EAChB,SAAA;AAAA,EACA,UAAA;AAAA,EACA,aAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA;AAAA,EAER,YAAY,OAAA,EAA8B;AACxC,IAAA,IAAA,CAAK,YAAY,EAAC;AAClB,IAAA,IAAA,CAAK,aAAa,EAAC;AACnB,IAAA,IAAA,CAAK,gBAAgB,EAAC;AACtB,IAAA,IAAA,CAAK,SAAS,OAAA,CAAQ,MAAA;AACtB,IAAA,IAAA,CAAK,eAAe,OAAA,CAAQ,YAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAA,GAAgC;AAC9B,IAAA,OAAO,IAAA,CAAK,YAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAA,GAAqD;AACnD,IAAA,OAAO,IAAA,CAAK,aAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,OAAA,EAA2C;AACrD,IAAA,MAAM,EAAE,OAAA,EAAS,QAAA,EAAS,GAAI,OAAA;AAE9B,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,SAAS,OAAA,CAAQ,WAAA,CAAY,IAAI,CAAA,2BAAA,EAA8B,QAAQ,IAAI,CAAA;AAAA,KAC7E;AACA,IAAA,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,IAAI,CAAA,GAAI;AAAA,MAC7B,OAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAA,CAAK,aAAA,CAAc,OAAA,CAAQ,IAAI,CAAA,GAAI;AAAA,MACjC,sBAAsB,OAAA,CAAQ;AAAA,KAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAa,OAAA,EAA4C;AACvD,IAAA,MAAM,EAAE,SAAQ,GAAI,OAAA;AACpB,IAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,IAAS,CAAC,GAAG,CAAA;AACnC,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,CAAA,gBAAA,EAAmB,OAAA,CAAQ,WAAA,CAAY,IAAI,aAAa,KAAA,CAAM,IAAA;AAAA,QAC5D;AAAA,OACD,CAAA;AAAA,KACH;AACA,IAAA,KAAA,CAAM,QAAQ,CAAA,IAAA,KAAQ;AACpB,MAAA,IAAI,IAAA,CAAK,UAAA,CAAW,cAAA,CAAe,IAAI,CAAA,EAAG;AACxC,QAAA,IAAA,CAAK,UAAA,CAAW,IAAI,CAAA,CAAE,IAAA,CAAK,OAAO,CAAA;AAAA,MACpC,CAAA,MAAO;AACL,QAAA,IAAA,CAAK,UAAA,CAAW,IAAI,CAAA,GAAI,CAAC,OAAO,CAAA;AAAA,MAClC;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAA,GAA2C;AAC/C,IAAA,MAAM,SAAA,GAAY,IAAIA,mBAAA,CAAU;AAAA,MAC9B,QAAQ,IAAA,CAAK;AAAA,KACd,CAAA;AAED,IAAA,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,CAAE,QAAQ,CAAA,IAAA,KAAQ;AAC1C,MAAA,MAAM,aAAa,IAAA,CAAK,MAAA,CAAO,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA;AAC3D,MAAA,SAAA,CAAU,aAAA,CAAc;AAAA,QACtB,EAAA,EAAI,gBAAgB,IAAA,CAAK,OAAA,CAAQ,KAAK,GAAG,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA,CAAA;AAAA,QACrE,eAAA,EAAiB,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,CAAE,QAAA;AAAA,QACtC,MAAM,YAAY;AAEhB,UAAA,MAAM,WAAW,MAAM,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,QAAQ,WAAA,EAAY;AAChE,UAAA,UAAA,CAAW,IAAA;AAAA,YACT,CAAA,wBAAA,EAA2B,IAAI,CAAA,KAAA,EAAQ,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,OAAA,CAAQ,WAAA,CAAY,IAAI,CAAA;AAAA,WACtF;AAGA,UAAA,MAAM,UAAA,GAA0B,MAAM,OAAA,CAAQ,GAAA;AAAA,YAAA,CAC3C,KAAK,UAAA,CAAW,GAAG,CAAA,IAAK,IACtB,MAAA,CAAO,IAAA,CAAK,UAAA,CAAW,IAAI,KAAK,EAAE,CAAA,CAClC,GAAA,CAAI,OAAM,OAAA,KAAW;AACpB,cAAA,MAAM,SAAA,GAAY,MAAM,OAAA,CAAQ,YAAA,EAAa;AAC7C,cAAA,UAAA,CAAW,IAAA;AAAA,gBACT,CAAA,uBAAA,EAA0B,OAAA,CAAQ,WAAA,CAAY,IAAI,OAAO,IAAI,CAAA,gBAAA;AAAA,eAC/D;AACA,cAAA,OAAO,SAAA;AAAA,YACT,CAAC;AAAA,WACL;AAGA,UAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,YAAA,CAAa,WAAW,IAAI,CAAA;AAGvD,UAAA,OAAO,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,KAAW;AAC5C,YAAAC,eAAA;AAAA,cACE,CAAC,QAAA,EAAU,GAAG,UAAA,EAAY,OAAO,CAAA;AAAA,cACjC,CAAC,KAAA,KAAwC;AACvC,gBAAA,IAAI,KAAA,EAAO;AACT,kBAAA,UAAA,CAAW,KAAA;AAAA,oBACT,CAAA,wBAAA,EAA2B,IAAI,CAAA,SAAA,EAAY,KAAK,CAAA;AAAA,mBAClD;AACA,kBAAA,MAAA,CAAO,KAAK,CAAA;AAAA,gBACd,CAAA,MAAO;AAEL,kBAAA,UAAA,CAAW,IAAA,CAAK,CAAA,wBAAA,EAA2B,IAAI,CAAA,UAAA,CAAY,CAAA;AAC3D,kBAAA,OAAA,EAAQ;AAAA,gBACV;AAAA,cACF;AAAA,aACF;AAAA,UACF,CAAC,CAAA;AAAA,QACH;AAAA,OACD,CAAA;AAAA,IACH,CAAC,CAAA;AAED,IAAA,OAAO;AAAA,MACL;AAAA,KACF;AAAA,EACF;AACF;;;;"}
1
+ {"version":3,"file":"IndexBuilder.cjs.js","sources":["../src/IndexBuilder.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n DocumentDecoratorFactory,\n DocumentTypeInfo,\n} from '@backstage/plugin-search-common';\nimport { pipeline, Transform } from 'node:stream';\nimport { Scheduler } from './Scheduler';\nimport {\n IndexBuilderOptions,\n RegisterCollatorParameters,\n RegisterDecoratorParameters,\n SearchEngine,\n} from './types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Used for adding collators, decorators and compile them into tasks which are added to a scheduler returned to the caller.\n * @public\n */\nexport class IndexBuilder {\n private collators: Record<string, RegisterCollatorParameters>;\n private decorators: Record<string, DocumentDecoratorFactory[]>;\n private documentTypes: Record<string, DocumentTypeInfo>;\n private searchEngine: SearchEngine;\n private logger: LoggerService;\n\n constructor(options: IndexBuilderOptions) {\n this.collators = {};\n this.decorators = {};\n this.documentTypes = {};\n this.logger = options.logger;\n this.searchEngine = options.searchEngine;\n }\n\n /**\n * Responsible for returning the registered search engine.\n */\n getSearchEngine(): SearchEngine {\n return this.searchEngine;\n }\n\n /**\n * Responsible for returning the registered document types.\n */\n getDocumentTypes(): Record<string, DocumentTypeInfo> {\n return this.documentTypes;\n }\n\n /**\n * Makes the index builder aware of a collator that should be executed at the\n * given refresh interval.\n */\n addCollator(options: RegisterCollatorParameters): void {\n const { factory, schedule } = options;\n\n this.logger.info(\n `Added ${factory.constructor.name} collator factory for type ${factory.type}`,\n );\n this.collators[factory.type] = {\n factory,\n schedule,\n };\n this.documentTypes[factory.type] = {\n visibilityPermission: factory.visibilityPermission,\n };\n }\n\n /**\n * Makes the index builder aware of a decorator. If no types are provided on\n * the decorator, it will be applied to documents from all known collators,\n * otherwise it will only be applied to documents of the given types.\n */\n addDecorator(options: RegisterDecoratorParameters): void {\n const { factory } = options;\n const types = factory.types || ['*'];\n this.logger.info(\n `Added decorator ${factory.constructor.name} to types ${types.join(\n ', ',\n )}`,\n );\n types.forEach(type => {\n if (this.decorators.hasOwnProperty(type)) {\n this.decorators[type].push(factory);\n } else {\n this.decorators[type] = [factory];\n }\n });\n }\n\n /**\n * Compiles collators and decorators into tasks, which are added to a\n * scheduler returned to the caller.\n */\n async build(): Promise<{ scheduler: Scheduler }> {\n const scheduler = new Scheduler({\n logger: this.logger,\n });\n\n Object.keys(this.collators).forEach(type => {\n const taskLogger = this.logger.child({ documentType: type });\n scheduler.addToSchedule({\n id: `search_index_${type.replace('-', '_').toLocaleLowerCase('en-US')}`,\n scheduledRunner: this.collators[type].schedule,\n task: async () => {\n // Instantiate the collator.\n const collator = await this.collators[type].factory.getCollator();\n taskLogger.info(\n `Collating documents for ${type} via ${this.collators[type].factory.constructor.name}`,\n );\n\n // Instantiate all relevant decorators.\n const decorators: Transform[] = await Promise.all(\n (this.decorators['*'] || [])\n .concat(this.decorators[type] || [])\n .map(async factory => {\n const decorator = await factory.getDecorator();\n taskLogger.info(\n `Attached decorator via ${factory.constructor.name} to ${type} index pipeline.`,\n );\n return decorator;\n }),\n );\n\n // Instantiate the indexer.\n const indexer = await this.searchEngine.getIndexer(type);\n\n // Compose collator/decorators/indexer into a pipeline\n return new Promise<void>((resolve, reject) => {\n pipeline(\n [collator, ...decorators, indexer],\n (error: NodeJS.ErrnoException | null) => {\n if (error) {\n taskLogger.error(\n `Collating documents for ${type} failed: ${error}`,\n );\n reject(error);\n } else {\n // Signal index pipeline completion!\n taskLogger.info(`Collating documents for ${type} succeeded`);\n resolve();\n }\n },\n );\n });\n },\n });\n });\n\n return {\n scheduler,\n };\n }\n}\n"],"names":["Scheduler","pipeline"],"mappings":";;;;;AAkCO,MAAM,YAAA,CAAa;AAAA,EAChB,SAAA;AAAA,EACA,UAAA;AAAA,EACA,aAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA;AAAA,EAER,YAAY,OAAA,EAA8B;AACxC,IAAA,IAAA,CAAK,YAAY,EAAC;AAClB,IAAA,IAAA,CAAK,aAAa,EAAC;AACnB,IAAA,IAAA,CAAK,gBAAgB,EAAC;AACtB,IAAA,IAAA,CAAK,SAAS,OAAA,CAAQ,MAAA;AACtB,IAAA,IAAA,CAAK,eAAe,OAAA,CAAQ,YAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAA,GAAgC;AAC9B,IAAA,OAAO,IAAA,CAAK,YAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAA,GAAqD;AACnD,IAAA,OAAO,IAAA,CAAK,aAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,OAAA,EAA2C;AACrD,IAAA,MAAM,EAAE,OAAA,EAAS,QAAA,EAAS,GAAI,OAAA;AAE9B,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,SAAS,OAAA,CAAQ,WAAA,CAAY,IAAI,CAAA,2BAAA,EAA8B,QAAQ,IAAI,CAAA;AAAA,KAC7E;AACA,IAAA,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,IAAI,CAAA,GAAI;AAAA,MAC7B,OAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAA,CAAK,aAAA,CAAc,OAAA,CAAQ,IAAI,CAAA,GAAI;AAAA,MACjC,sBAAsB,OAAA,CAAQ;AAAA,KAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAa,OAAA,EAA4C;AACvD,IAAA,MAAM,EAAE,SAAQ,GAAI,OAAA;AACpB,IAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,IAAS,CAAC,GAAG,CAAA;AACnC,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,CAAA,gBAAA,EAAmB,OAAA,CAAQ,WAAA,CAAY,IAAI,aAAa,KAAA,CAAM,IAAA;AAAA,QAC5D;AAAA,OACD,CAAA;AAAA,KACH;AACA,IAAA,KAAA,CAAM,QAAQ,CAAA,IAAA,KAAQ;AACpB,MAAA,IAAI,IAAA,CAAK,UAAA,CAAW,cAAA,CAAe,IAAI,CAAA,EAAG;AACxC,QAAA,IAAA,CAAK,UAAA,CAAW,IAAI,CAAA,CAAE,IAAA,CAAK,OAAO,CAAA;AAAA,MACpC,CAAA,MAAO;AACL,QAAA,IAAA,CAAK,UAAA,CAAW,IAAI,CAAA,GAAI,CAAC,OAAO,CAAA;AAAA,MAClC;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAA,GAA2C;AAC/C,IAAA,MAAM,SAAA,GAAY,IAAIA,mBAAA,CAAU;AAAA,MAC9B,QAAQ,IAAA,CAAK;AAAA,KACd,CAAA;AAED,IAAA,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,CAAE,QAAQ,CAAA,IAAA,KAAQ;AAC1C,MAAA,MAAM,aAAa,IAAA,CAAK,MAAA,CAAO,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA;AAC3D,MAAA,SAAA,CAAU,aAAA,CAAc;AAAA,QACtB,EAAA,EAAI,gBAAgB,IAAA,CAAK,OAAA,CAAQ,KAAK,GAAG,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA,CAAA;AAAA,QACrE,eAAA,EAAiB,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA,CAAE,QAAA;AAAA,QACtC,MAAM,YAAY;AAEhB,UAAA,MAAM,WAAW,MAAM,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,QAAQ,WAAA,EAAY;AAChE,UAAA,UAAA,CAAW,IAAA;AAAA,YACT,CAAA,wBAAA,EAA2B,IAAI,CAAA,KAAA,EAAQ,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,OAAA,CAAQ,WAAA,CAAY,IAAI,CAAA;AAAA,WACtF;AAGA,UAAA,MAAM,UAAA,GAA0B,MAAM,OAAA,CAAQ,GAAA;AAAA,YAAA,CAC3C,KAAK,UAAA,CAAW,GAAG,CAAA,IAAK,IACtB,MAAA,CAAO,IAAA,CAAK,UAAA,CAAW,IAAI,KAAK,EAAE,CAAA,CAClC,GAAA,CAAI,OAAM,OAAA,KAAW;AACpB,cAAA,MAAM,SAAA,GAAY,MAAM,OAAA,CAAQ,YAAA,EAAa;AAC7C,cAAA,UAAA,CAAW,IAAA;AAAA,gBACT,CAAA,uBAAA,EAA0B,OAAA,CAAQ,WAAA,CAAY,IAAI,OAAO,IAAI,CAAA,gBAAA;AAAA,eAC/D;AACA,cAAA,OAAO,SAAA;AAAA,YACT,CAAC;AAAA,WACL;AAGA,UAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,YAAA,CAAa,WAAW,IAAI,CAAA;AAGvD,UAAA,OAAO,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,KAAW;AAC5C,YAAAC,oBAAA;AAAA,cACE,CAAC,QAAA,EAAU,GAAG,UAAA,EAAY,OAAO,CAAA;AAAA,cACjC,CAAC,KAAA,KAAwC;AACvC,gBAAA,IAAI,KAAA,EAAO;AACT,kBAAA,UAAA,CAAW,KAAA;AAAA,oBACT,CAAA,wBAAA,EAA2B,IAAI,CAAA,SAAA,EAAY,KAAK,CAAA;AAAA,mBAClD;AACA,kBAAA,MAAA,CAAO,KAAK,CAAA;AAAA,gBACd,CAAA,MAAO;AAEL,kBAAA,UAAA,CAAW,IAAA,CAAK,CAAA,wBAAA,EAA2B,IAAI,CAAA,UAAA,CAAY,CAAA;AAC3D,kBAAA,OAAA,EAAQ;AAAA,gBACV;AAAA,cACF;AAAA,aACF;AAAA,UACF,CAAC,CAAA;AAAA,QACH;AAAA,OACD,CAAA;AAAA,IACH,CAAC,CAAA;AAED,IAAA,OAAO;AAAA,MACL;AAAA,KACF;AAAA,EACF;AACF;;;;"}
@@ -1 +1 @@
1
- {"version":3,"file":"NewlineDelimitedJsonCollatorFactory.cjs.js","sources":["../../src/collators/NewlineDelimitedJsonCollatorFactory.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Config } from '@backstage/config';\nimport { DocumentCollatorFactory } from '@backstage/plugin-search-common';\nimport { Permission } from '@backstage/plugin-permission-common';\nimport { Readable } from 'stream';\nimport { parse as parseNdjson } from 'ndjson';\nimport { LoggerService, UrlReaderService } from '@backstage/backend-plugin-api';\n\n/**\n * Options for instantiate NewlineDelimitedJsonCollatorFactory\n * @public\n */\nexport type NewlineDelimitedJsonCollatorFactoryOptions = {\n type: string;\n searchPattern: string;\n reader: UrlReaderService;\n logger: LoggerService;\n visibilityPermission?: Permission;\n};\n\n/**\n * Factory class producing a collator that can be used to index documents\n * sourced from the latest newline delimited JSON file matching a given search\n * pattern. \"Latest\" is determined by the name of the file (last alphabetically\n * is considered latest).\n *\n * @remarks\n * The reader provided must implement the `search()` method as well as the\n * `readUrl` method whose response includes the `stream()` method. Naturally,\n * the reader must also be configured to understand the given search pattern.\n *\n * @example\n * Here's an example configuration using Google Cloud Storage, which would\n * return the latest file under the `bucket` GCS bucket with files like\n * `xyz-2021.ndjson` or `xyz-2022.ndjson`.\n * ```ts\n * indexBuilder.addCollator({\n * schedule,\n * factory: NewlineDelimitedJsonCollatorFactory.fromConfig(env.config, {\n * type: 'techdocs',\n * searchPattern: 'https://storage.cloud.google.com/bucket/xyz-*',\n * reader: env.reader,\n * logger: env.logger,\n * })\n * });\n * ```\n *\n * @public\n */\nexport class NewlineDelimitedJsonCollatorFactory\n implements DocumentCollatorFactory\n{\n readonly type: string;\n\n public readonly visibilityPermission: Permission | undefined;\n\n private readonly searchPattern: string;\n private readonly reader: UrlReaderService;\n private readonly logger: LoggerService;\n\n private constructor(\n type: string,\n searchPattern: string,\n reader: UrlReaderService,\n logger: LoggerService,\n visibilityPermission: Permission | undefined,\n ) {\n this.searchPattern = searchPattern;\n this.reader = reader;\n this.logger = logger;\n this.type = type;\n this.visibilityPermission = visibilityPermission;\n }\n\n /**\n * Returns a NewlineDelimitedJsonCollatorFactory instance from configuration\n * and a set of options.\n */\n static fromConfig(\n _config: Config,\n options: NewlineDelimitedJsonCollatorFactoryOptions,\n ): NewlineDelimitedJsonCollatorFactory {\n return new NewlineDelimitedJsonCollatorFactory(\n options.type,\n options.searchPattern,\n options.reader,\n options.logger.child({ documentType: options.type }),\n options.visibilityPermission,\n );\n }\n\n /**\n * Returns the \"latest\" URL for the given search pattern (e.g. the one at the\n * end of the list, sorted alphabetically).\n */\n private async lastUrl(): Promise<string | undefined> {\n try {\n // Search for files matching the given pattern, then sort/reverse. The\n // first item in the list will be the \"latest\" file.\n this.logger.info(\n `Attempting to find latest .ndjson matching ${this.searchPattern}`,\n );\n const { files } = await this.reader.search(this.searchPattern);\n const candidates = files\n .filter(file => file.url.endsWith('.ndjson'))\n .sort((a, b) => a.url.localeCompare(b.url))\n .reverse();\n\n return candidates[0]?.url;\n } catch (e) {\n this.logger.error(`Could not search for ${this.searchPattern}`, e);\n throw e;\n }\n }\n\n async getCollator(): Promise<Readable> {\n // Search for files matching the given pattern.\n const lastUrl = await this.lastUrl();\n\n // Abort if no such file could be found.\n if (!lastUrl) {\n const noMatchingFile = `Could not find an .ndjson file matching ${this.searchPattern}`;\n this.logger.error(noMatchingFile);\n throw new Error(noMatchingFile);\n } else {\n this.logger.info(`Using latest .ndjson file ${lastUrl}`);\n }\n\n // Use the UrlReader to try and stream the file.\n const readerResponse = await this.reader.readUrl!(lastUrl);\n const stream = readerResponse.stream!();\n\n // Use ndjson's parser to turn the raw file into an object-mode stream.\n return stream.pipe(parseNdjson());\n }\n}\n"],"names":["parseNdjson"],"mappings":";;;;AAgEO,MAAM,mCAAA,CAEb;AAAA,EACW,IAAA;AAAA,EAEO,oBAAA;AAAA,EAEC,aAAA;AAAA,EACA,MAAA;AAAA,EACA,MAAA;AAAA,EAET,WAAA,CACN,IAAA,EACA,aAAA,EACA,MAAA,EACA,QACA,oBAAA,EACA;AACA,IAAA,IAAA,CAAK,aAAA,GAAgB,aAAA;AACrB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,oBAAA,GAAuB,oBAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,UAAA,CACL,OAAA,EACA,OAAA,EACqC;AACrC,IAAA,OAAO,IAAI,mCAAA;AAAA,MACT,OAAA,CAAQ,IAAA;AAAA,MACR,OAAA,CAAQ,aAAA;AAAA,MACR,OAAA,CAAQ,MAAA;AAAA,MACR,QAAQ,MAAA,CAAO,KAAA,CAAM,EAAE,YAAA,EAAc,OAAA,CAAQ,MAAM,CAAA;AAAA,MACnD,OAAA,CAAQ;AAAA,KACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,OAAA,GAAuC;AACnD,IAAA,IAAI;AAGF,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,QACV,CAAA,2CAAA,EAA8C,KAAK,aAAa,CAAA;AAAA,OAClE;AACA,MAAA,MAAM,EAAE,OAAM,GAAI,MAAM,KAAK,MAAA,CAAO,MAAA,CAAO,KAAK,aAAa,CAAA;AAC7D,MAAA,MAAM,UAAA,GAAa,MAChB,MAAA,CAAO,CAAA,IAAA,KAAQ,KAAK,GAAA,CAAI,QAAA,CAAS,SAAS,CAAC,CAAA,CAC3C,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,GAAA,CAAI,cAAc,CAAA,CAAE,GAAG,CAAC,CAAA,CACzC,OAAA,EAAQ;AAEX,MAAA,OAAO,UAAA,CAAW,CAAC,CAAA,EAAG,GAAA;AAAA,IACxB,SAAS,CAAA,EAAG;AACV,MAAA,IAAA,CAAK,OAAO,KAAA,CAAM,CAAA,qBAAA,EAAwB,IAAA,CAAK,aAAa,IAAI,CAAC,CAAA;AACjE,MAAA,MAAM,CAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,WAAA,GAAiC;AAErC,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,OAAA,EAAQ;AAGnC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,cAAA,GAAiB,CAAA,wCAAA,EAA2C,IAAA,CAAK,aAAa,CAAA,CAAA;AACpF,MAAA,IAAA,CAAK,MAAA,CAAO,MAAM,cAAc,CAAA;AAChC,MAAA,MAAM,IAAI,MAAM,cAAc,CAAA;AAAA,IAChC,CAAA,MAAO;AACL,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,0BAAA,EAA6B,OAAO,CAAA,CAAE,CAAA;AAAA,IACzD;AAGA,IAAA,MAAM,cAAA,GAAiB,MAAM,IAAA,CAAK,MAAA,CAAO,QAAS,OAAO,CAAA;AACzD,IAAA,MAAM,MAAA,GAAS,eAAe,MAAA,EAAQ;AAGtC,IAAA,OAAO,MAAA,CAAO,IAAA,CAAKA,YAAA,EAAa,CAAA;AAAA,EAClC;AACF;;;;"}
1
+ {"version":3,"file":"NewlineDelimitedJsonCollatorFactory.cjs.js","sources":["../../src/collators/NewlineDelimitedJsonCollatorFactory.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Config } from '@backstage/config';\nimport { DocumentCollatorFactory } from '@backstage/plugin-search-common';\nimport { Permission } from '@backstage/plugin-permission-common';\nimport { Readable } from 'node:stream';\nimport { parse as parseNdjson } from 'ndjson';\nimport { LoggerService, UrlReaderService } from '@backstage/backend-plugin-api';\n\n/**\n * Options for instantiate NewlineDelimitedJsonCollatorFactory\n * @public\n */\nexport type NewlineDelimitedJsonCollatorFactoryOptions = {\n type: string;\n searchPattern: string;\n reader: UrlReaderService;\n logger: LoggerService;\n visibilityPermission?: Permission;\n};\n\n/**\n * Factory class producing a collator that can be used to index documents\n * sourced from the latest newline delimited JSON file matching a given search\n * pattern. \"Latest\" is determined by the name of the file (last alphabetically\n * is considered latest).\n *\n * @remarks\n * The reader provided must implement the `search()` method as well as the\n * `readUrl` method whose response includes the `stream()` method. Naturally,\n * the reader must also be configured to understand the given search pattern.\n *\n * @example\n * Here's an example configuration using Google Cloud Storage, which would\n * return the latest file under the `bucket` GCS bucket with files like\n * `xyz-2021.ndjson` or `xyz-2022.ndjson`.\n * ```ts\n * indexBuilder.addCollator({\n * schedule,\n * factory: NewlineDelimitedJsonCollatorFactory.fromConfig(env.config, {\n * type: 'techdocs',\n * searchPattern: 'https://storage.cloud.google.com/bucket/xyz-*',\n * reader: env.reader,\n * logger: env.logger,\n * })\n * });\n * ```\n *\n * @public\n */\nexport class NewlineDelimitedJsonCollatorFactory\n implements DocumentCollatorFactory\n{\n readonly type: string;\n\n public readonly visibilityPermission: Permission | undefined;\n\n private readonly searchPattern: string;\n private readonly reader: UrlReaderService;\n private readonly logger: LoggerService;\n\n private constructor(\n type: string,\n searchPattern: string,\n reader: UrlReaderService,\n logger: LoggerService,\n visibilityPermission: Permission | undefined,\n ) {\n this.searchPattern = searchPattern;\n this.reader = reader;\n this.logger = logger;\n this.type = type;\n this.visibilityPermission = visibilityPermission;\n }\n\n /**\n * Returns a NewlineDelimitedJsonCollatorFactory instance from configuration\n * and a set of options.\n */\n static fromConfig(\n _config: Config,\n options: NewlineDelimitedJsonCollatorFactoryOptions,\n ): NewlineDelimitedJsonCollatorFactory {\n return new NewlineDelimitedJsonCollatorFactory(\n options.type,\n options.searchPattern,\n options.reader,\n options.logger.child({ documentType: options.type }),\n options.visibilityPermission,\n );\n }\n\n /**\n * Returns the \"latest\" URL for the given search pattern (e.g. the one at the\n * end of the list, sorted alphabetically).\n */\n private async lastUrl(): Promise<string | undefined> {\n try {\n // Search for files matching the given pattern, then sort/reverse. The\n // first item in the list will be the \"latest\" file.\n this.logger.info(\n `Attempting to find latest .ndjson matching ${this.searchPattern}`,\n );\n const { files } = await this.reader.search(this.searchPattern);\n const candidates = files\n .filter(file => file.url.endsWith('.ndjson'))\n .sort((a, b) => a.url.localeCompare(b.url))\n .reverse();\n\n return candidates[0]?.url;\n } catch (e) {\n this.logger.error(`Could not search for ${this.searchPattern}`, e);\n throw e;\n }\n }\n\n async getCollator(): Promise<Readable> {\n // Search for files matching the given pattern.\n const lastUrl = await this.lastUrl();\n\n // Abort if no such file could be found.\n if (!lastUrl) {\n const noMatchingFile = `Could not find an .ndjson file matching ${this.searchPattern}`;\n this.logger.error(noMatchingFile);\n throw new Error(noMatchingFile);\n } else {\n this.logger.info(`Using latest .ndjson file ${lastUrl}`);\n }\n\n // Use the UrlReader to try and stream the file.\n const readerResponse = await this.reader.readUrl!(lastUrl);\n const stream = readerResponse.stream!();\n\n // Use ndjson's parser to turn the raw file into an object-mode stream.\n return stream.pipe(parseNdjson());\n }\n}\n"],"names":["parseNdjson"],"mappings":";;;;AAgEO,MAAM,mCAAA,CAEb;AAAA,EACW,IAAA;AAAA,EAEO,oBAAA;AAAA,EAEC,aAAA;AAAA,EACA,MAAA;AAAA,EACA,MAAA;AAAA,EAET,WAAA,CACN,IAAA,EACA,aAAA,EACA,MAAA,EACA,QACA,oBAAA,EACA;AACA,IAAA,IAAA,CAAK,aAAA,GAAgB,aAAA;AACrB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,oBAAA,GAAuB,oBAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,UAAA,CACL,OAAA,EACA,OAAA,EACqC;AACrC,IAAA,OAAO,IAAI,mCAAA;AAAA,MACT,OAAA,CAAQ,IAAA;AAAA,MACR,OAAA,CAAQ,aAAA;AAAA,MACR,OAAA,CAAQ,MAAA;AAAA,MACR,QAAQ,MAAA,CAAO,KAAA,CAAM,EAAE,YAAA,EAAc,OAAA,CAAQ,MAAM,CAAA;AAAA,MACnD,OAAA,CAAQ;AAAA,KACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,OAAA,GAAuC;AACnD,IAAA,IAAI;AAGF,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,QACV,CAAA,2CAAA,EAA8C,KAAK,aAAa,CAAA;AAAA,OAClE;AACA,MAAA,MAAM,EAAE,OAAM,GAAI,MAAM,KAAK,MAAA,CAAO,MAAA,CAAO,KAAK,aAAa,CAAA;AAC7D,MAAA,MAAM,UAAA,GAAa,MAChB,MAAA,CAAO,CAAA,IAAA,KAAQ,KAAK,GAAA,CAAI,QAAA,CAAS,SAAS,CAAC,CAAA,CAC3C,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,GAAA,CAAI,cAAc,CAAA,CAAE,GAAG,CAAC,CAAA,CACzC,OAAA,EAAQ;AAEX,MAAA,OAAO,UAAA,CAAW,CAAC,CAAA,EAAG,GAAA;AAAA,IACxB,SAAS,CAAA,EAAG;AACV,MAAA,IAAA,CAAK,OAAO,KAAA,CAAM,CAAA,qBAAA,EAAwB,IAAA,CAAK,aAAa,IAAI,CAAC,CAAA;AACjE,MAAA,MAAM,CAAA;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,WAAA,GAAiC;AAErC,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,OAAA,EAAQ;AAGnC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,cAAA,GAAiB,CAAA,wCAAA,EAA2C,IAAA,CAAK,aAAa,CAAA,CAAA;AACpF,MAAA,IAAA,CAAK,MAAA,CAAO,MAAM,cAAc,CAAA;AAChC,MAAA,MAAM,IAAI,MAAM,cAAc,CAAA;AAAA,IAChC,CAAA,MAAO;AACL,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,CAAA,0BAAA,EAA6B,OAAO,CAAA,CAAE,CAAA;AAAA,IACzD;AAGA,IAAA,MAAM,cAAA,GAAiB,MAAM,IAAA,CAAK,MAAA,CAAO,QAAS,OAAO,CAAA;AACzD,IAAA,MAAM,MAAA,GAAS,eAAe,MAAA,EAAQ;AAGtC,IAAA,OAAO,MAAA,CAAO,IAAA,CAAKA,YAAA,EAAa,CAAA;AAAA,EAClC;AACF;;;;"}
@@ -3,7 +3,7 @@
3
3
  var lunr = require('lunr');
4
4
  var BatchSearchEngineIndexer = require('../indexing/BatchSearchEngineIndexer.cjs.js');
5
5
  require('@backstage/errors');
6
- require('stream');
6
+ require('node:stream');
7
7
 
8
8
  function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
9
9
 
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { SearchQuery, IndexableResultSet, DocumentCollatorFactory, DocumentDecoratorFactory, DocumentTypeInfo, IndexableDocument } from '@backstage/plugin-search-common';
2
2
  import { LoggerService, SchedulerServiceTaskFunction, SchedulerServiceTaskRunner, BackstageCredentials, UrlReaderService } from '@backstage/backend-plugin-api';
3
- import { Writable, Readable, Transform } from 'stream';
3
+ import { Writable, Readable, Transform } from 'node:stream';
4
4
  import { Config } from '@backstage/config';
5
5
  import { Permission } from '@backstage/plugin-permission-common';
6
6
  import lunr from 'lunr';
@@ -1,9 +1,9 @@
1
1
  'use strict';
2
2
 
3
3
  var errors = require('@backstage/errors');
4
- var stream = require('stream');
4
+ var node_stream = require('node:stream');
5
5
 
6
- class BatchSearchEngineIndexer extends stream.Writable {
6
+ class BatchSearchEngineIndexer extends node_stream.Writable {
7
7
  batchSize;
8
8
  currentBatch = [];
9
9
  constructor(options) {
@@ -1 +1 @@
1
- {"version":3,"file":"BatchSearchEngineIndexer.cjs.js","sources":["../../src/indexing/BatchSearchEngineIndexer.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertError } from '@backstage/errors';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Writable } from 'stream';\n\n/**\n * Options for {@link BatchSearchEngineIndexer}\n * @public\n */\nexport type BatchSearchEngineOptions = {\n batchSize: number;\n};\n\n/**\n * Base class encapsulating batch-based stream processing. Useful as a base\n * class for search engine indexers.\n * @public\n */\nexport abstract class BatchSearchEngineIndexer extends Writable {\n private batchSize: number;\n private currentBatch: IndexableDocument[] = [];\n\n constructor(options: BatchSearchEngineOptions) {\n super({ objectMode: true });\n this.batchSize = options.batchSize;\n }\n\n /**\n * Receives an array of indexable documents (of size this.batchSize) which\n * should be written to the search engine. This method won't be called again\n * at least until it resolves.\n */\n public abstract index(documents: IndexableDocument[]): Promise<void>;\n\n /**\n * Any asynchronous setup tasks can be performed here.\n */\n public abstract initialize(): Promise<void>;\n\n /**\n * Any asynchronous teardown tasks can be performed here.\n */\n public abstract finalize(): Promise<void>;\n\n /**\n * Encapsulates initialization logic.\n * @internal\n */\n async _construct(done: (error?: Error | null | undefined) => void) {\n try {\n await this.initialize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates batch stream write logic.\n * @internal\n */\n async _write(\n doc: IndexableDocument,\n _e: any,\n done: (error?: Error | null) => void,\n ) {\n this.currentBatch.push(doc);\n if (this.currentBatch.length < this.batchSize) {\n done();\n return;\n }\n\n try {\n await this.index(this.currentBatch);\n this.currentBatch = [];\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates finalization and final error handling logic.\n * @internal\n */\n async _final(done: (error?: Error | null) => void) {\n try {\n // Index any remaining documents.\n if (this.currentBatch.length) {\n await this.index(this.currentBatch);\n this.currentBatch = [];\n }\n await this.finalize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n}\n"],"names":["Writable","assertError"],"mappings":";;;;;AAiCO,MAAe,iCAAiCA,eAAA,CAAS;AAAA,EACtD,SAAA;AAAA,EACA,eAAoC,EAAC;AAAA,EAE7C,YAAY,OAAA,EAAmC;AAC7C,IAAA,KAAA,CAAM,EAAE,UAAA,EAAY,IAAA,EAAM,CAAA;AAC1B,IAAA,IAAA,CAAK,YAAY,OAAA,CAAQ,SAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,WAAW,IAAA,EAAkD;AACjE,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,UAAA,EAAW;AACtB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAC,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAA,CACJ,GAAA,EACA,EAAA,EACA,IAAA,EACA;AACA,IAAA,IAAA,CAAK,YAAA,CAAa,KAAK,GAAG,CAAA;AAC1B,IAAA,IAAI,IAAA,CAAK,YAAA,CAAa,MAAA,GAAS,IAAA,CAAK,SAAA,EAAW;AAC7C,MAAA,IAAA,EAAK;AACL,MAAA;AAAA,IACF;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA;AAClC,MAAA,IAAA,CAAK,eAAe,EAAC;AACrB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,IAAA,EAAsC;AACjD,IAAA,IAAI;AAEF,MAAA,IAAI,IAAA,CAAK,aAAa,MAAA,EAAQ;AAC5B,QAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA;AAClC,QAAA,IAAA,CAAK,eAAe,EAAC;AAAA,MACvB;AACA,MAAA,MAAM,KAAK,QAAA,EAAS;AACpB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AACF;;;;"}
1
+ {"version":3,"file":"BatchSearchEngineIndexer.cjs.js","sources":["../../src/indexing/BatchSearchEngineIndexer.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertError } from '@backstage/errors';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Writable } from 'node:stream';\n\n/**\n * Options for {@link BatchSearchEngineIndexer}\n * @public\n */\nexport type BatchSearchEngineOptions = {\n batchSize: number;\n};\n\n/**\n * Base class encapsulating batch-based stream processing. Useful as a base\n * class for search engine indexers.\n * @public\n */\nexport abstract class BatchSearchEngineIndexer extends Writable {\n private batchSize: number;\n private currentBatch: IndexableDocument[] = [];\n\n constructor(options: BatchSearchEngineOptions) {\n super({ objectMode: true });\n this.batchSize = options.batchSize;\n }\n\n /**\n * Receives an array of indexable documents (of size this.batchSize) which\n * should be written to the search engine. This method won't be called again\n * at least until it resolves.\n */\n public abstract index(documents: IndexableDocument[]): Promise<void>;\n\n /**\n * Any asynchronous setup tasks can be performed here.\n */\n public abstract initialize(): Promise<void>;\n\n /**\n * Any asynchronous teardown tasks can be performed here.\n */\n public abstract finalize(): Promise<void>;\n\n /**\n * Encapsulates initialization logic.\n * @internal\n */\n async _construct(done: (error?: Error | null | undefined) => void) {\n try {\n await this.initialize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates batch stream write logic.\n * @internal\n */\n async _write(\n doc: IndexableDocument,\n _e: any,\n done: (error?: Error | null) => void,\n ) {\n this.currentBatch.push(doc);\n if (this.currentBatch.length < this.batchSize) {\n done();\n return;\n }\n\n try {\n await this.index(this.currentBatch);\n this.currentBatch = [];\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates finalization and final error handling logic.\n * @internal\n */\n async _final(done: (error?: Error | null) => void) {\n try {\n // Index any remaining documents.\n if (this.currentBatch.length) {\n await this.index(this.currentBatch);\n this.currentBatch = [];\n }\n await this.finalize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n}\n"],"names":["Writable","assertError"],"mappings":";;;;;AAiCO,MAAe,iCAAiCA,oBAAA,CAAS;AAAA,EACtD,SAAA;AAAA,EACA,eAAoC,EAAC;AAAA,EAE7C,YAAY,OAAA,EAAmC;AAC7C,IAAA,KAAA,CAAM,EAAE,UAAA,EAAY,IAAA,EAAM,CAAA;AAC1B,IAAA,IAAA,CAAK,YAAY,OAAA,CAAQ,SAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,WAAW,IAAA,EAAkD;AACjE,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,UAAA,EAAW;AACtB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAC,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAA,CACJ,GAAA,EACA,EAAA,EACA,IAAA,EACA;AACA,IAAA,IAAA,CAAK,YAAA,CAAa,KAAK,GAAG,CAAA;AAC1B,IAAA,IAAI,IAAA,CAAK,YAAA,CAAa,MAAA,GAAS,IAAA,CAAK,SAAA,EAAW;AAC7C,MAAA,IAAA,EAAK;AACL,MAAA;AAAA,IACF;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA;AAClC,MAAA,IAAA,CAAK,eAAe,EAAC;AACrB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,IAAA,EAAsC;AACjD,IAAA,IAAI;AAEF,MAAA,IAAI,IAAA,CAAK,aAAa,MAAA,EAAQ;AAC5B,QAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,YAAY,CAAA;AAClC,QAAA,IAAA,CAAK,eAAe,EAAC;AAAA,MACvB;AACA,MAAA,MAAM,KAAK,QAAA,EAAS;AACpB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AACF;;;;"}
@@ -1,9 +1,9 @@
1
1
  'use strict';
2
2
 
3
3
  var errors = require('@backstage/errors');
4
- var stream = require('stream');
4
+ var node_stream = require('node:stream');
5
5
 
6
- class DecoratorBase extends stream.Transform {
6
+ class DecoratorBase extends node_stream.Transform {
7
7
  constructor() {
8
8
  super({ objectMode: true });
9
9
  }
@@ -1 +1 @@
1
- {"version":3,"file":"DecoratorBase.cjs.js","sources":["../../src/indexing/DecoratorBase.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertError } from '@backstage/errors';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Transform } from 'stream';\n\n/**\n * Base class encapsulating simple async transformations. Useful as a base\n * class for Backstage search decorators.\n * @public\n */\nexport abstract class DecoratorBase extends Transform {\n constructor() {\n super({ objectMode: true });\n }\n\n /**\n * Any asynchronous setup tasks can be performed here.\n */\n public abstract initialize(): Promise<void>;\n\n /**\n * Receives a single indexable document. In your decorate method, you can:\n *\n * - Resolve `undefined` to indicate the record should be omitted.\n * - Resolve a single modified document, which could contain new fields,\n * edited fields, or removed fields.\n * - Resolve an array of indexable documents, if the purpose if the decorator\n * is to convert one document into multiple derivative documents.\n */\n public abstract decorate(\n document: IndexableDocument,\n ): Promise<IndexableDocument | IndexableDocument[] | undefined>;\n\n /**\n * Any asynchronous teardown tasks can be performed here.\n */\n public abstract finalize(): Promise<void>;\n\n /**\n * Encapsulates initialization logic.\n * @internal\n */\n async _construct(done: (error?: Error | null | undefined) => void) {\n try {\n await this.initialize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates simple transform stream logic.\n * @internal\n */\n async _transform(\n document: IndexableDocument,\n _: any,\n done: (error?: Error | null) => void,\n ) {\n try {\n const decorated = await this.decorate(document);\n\n // If undefined was returned, omit the record and move on.\n if (decorated === undefined) {\n done();\n return;\n }\n\n // If an array of documents was given, push them all.\n if (Array.isArray(decorated)) {\n decorated.forEach(doc => {\n this.push(doc);\n });\n done();\n return;\n }\n\n // Otherwise, just push the decorated document.\n this.push(decorated);\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates finalization and final error handling logic.\n * @internal\n */\n async _final(done: (error?: Error | null) => void) {\n try {\n await this.finalize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n}\n"],"names":["Transform","assertError"],"mappings":";;;;;AAyBO,MAAe,sBAAsBA,gBAAA,CAAU;AAAA,EACpD,WAAA,GAAc;AACZ,IAAA,KAAA,CAAM,EAAE,UAAA,EAAY,IAAA,EAAM,CAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA,MAAM,WAAW,IAAA,EAAkD;AACjE,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,UAAA,EAAW;AACtB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAC,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAA,CACJ,QAAA,EACA,CAAA,EACA,IAAA,EACA;AACA,IAAA,IAAI;AACF,MAAA,MAAM,SAAA,GAAY,MAAM,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA;AAG9C,MAAA,IAAI,cAAc,KAAA,CAAA,EAAW;AAC3B,QAAA,IAAA,EAAK;AACL,QAAA;AAAA,MACF;AAGA,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC5B,QAAA,SAAA,CAAU,QAAQ,CAAA,GAAA,KAAO;AACvB,UAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,QACf,CAAC,CAAA;AACD,QAAA,IAAA,EAAK;AACL,QAAA;AAAA,MACF;AAGA,MAAA,IAAA,CAAK,KAAK,SAAS,CAAA;AACnB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,IAAA,EAAsC;AACjD,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,QAAA,EAAS;AACpB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AACF;;;;"}
1
+ {"version":3,"file":"DecoratorBase.cjs.js","sources":["../../src/indexing/DecoratorBase.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertError } from '@backstage/errors';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Transform } from 'node:stream';\n\n/**\n * Base class encapsulating simple async transformations. Useful as a base\n * class for Backstage search decorators.\n * @public\n */\nexport abstract class DecoratorBase extends Transform {\n constructor() {\n super({ objectMode: true });\n }\n\n /**\n * Any asynchronous setup tasks can be performed here.\n */\n public abstract initialize(): Promise<void>;\n\n /**\n * Receives a single indexable document. In your decorate method, you can:\n *\n * - Resolve `undefined` to indicate the record should be omitted.\n * - Resolve a single modified document, which could contain new fields,\n * edited fields, or removed fields.\n * - Resolve an array of indexable documents, if the purpose if the decorator\n * is to convert one document into multiple derivative documents.\n */\n public abstract decorate(\n document: IndexableDocument,\n ): Promise<IndexableDocument | IndexableDocument[] | undefined>;\n\n /**\n * Any asynchronous teardown tasks can be performed here.\n */\n public abstract finalize(): Promise<void>;\n\n /**\n * Encapsulates initialization logic.\n * @internal\n */\n async _construct(done: (error?: Error | null | undefined) => void) {\n try {\n await this.initialize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates simple transform stream logic.\n * @internal\n */\n async _transform(\n document: IndexableDocument,\n _: any,\n done: (error?: Error | null) => void,\n ) {\n try {\n const decorated = await this.decorate(document);\n\n // If undefined was returned, omit the record and move on.\n if (decorated === undefined) {\n done();\n return;\n }\n\n // If an array of documents was given, push them all.\n if (Array.isArray(decorated)) {\n decorated.forEach(doc => {\n this.push(doc);\n });\n done();\n return;\n }\n\n // Otherwise, just push the decorated document.\n this.push(decorated);\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n\n /**\n * Encapsulates finalization and final error handling logic.\n * @internal\n */\n async _final(done: (error?: Error | null) => void) {\n try {\n await this.finalize();\n done();\n } catch (e) {\n assertError(e);\n done(e);\n }\n }\n}\n"],"names":["Transform","assertError"],"mappings":";;;;;AAyBO,MAAe,sBAAsBA,qBAAA,CAAU;AAAA,EACpD,WAAA,GAAc;AACZ,IAAA,KAAA,CAAM,EAAE,UAAA,EAAY,IAAA,EAAM,CAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA,MAAM,WAAW,IAAA,EAAkD;AACjE,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,UAAA,EAAW;AACtB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAC,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAA,CACJ,QAAA,EACA,CAAA,EACA,IAAA,EACA;AACA,IAAA,IAAI;AACF,MAAA,MAAM,SAAA,GAAY,MAAM,IAAA,CAAK,QAAA,CAAS,QAAQ,CAAA;AAG9C,MAAA,IAAI,cAAc,KAAA,CAAA,EAAW;AAC3B,QAAA,IAAA,EAAK;AACL,QAAA;AAAA,MACF;AAGA,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC5B,QAAA,SAAA,CAAU,QAAQ,CAAA,GAAA,KAAO;AACvB,UAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,QACf,CAAC,CAAA;AACD,QAAA,IAAA,EAAK;AACL,QAAA;AAAA,MACF;AAGA,MAAA,IAAA,CAAK,KAAK,SAAS,CAAA;AACnB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO,IAAA,EAAsC;AACjD,IAAA,IAAI;AACF,MAAA,MAAM,KAAK,QAAA,EAAS;AACpB,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,CAAA,EAAG;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA;AACb,MAAA,IAAA,CAAK,CAAC,CAAA;AAAA,IACR;AAAA,EACF;AACF;;;;"}
@@ -1,6 +1,6 @@
1
1
  'use strict';
2
2
 
3
- var stream = require('stream');
3
+ var node_stream = require('node:stream');
4
4
 
5
5
  class TestPipeline {
6
6
  collator;
@@ -22,13 +22,13 @@ class TestPipeline {
22
22
  * methods to create a test pipeline instead.
23
23
  */
24
24
  static withSubject(subject) {
25
- if (subject instanceof stream.Transform) {
25
+ if (subject instanceof node_stream.Transform) {
26
26
  return new TestPipeline({ decorator: subject });
27
27
  }
28
- if (subject instanceof stream.Writable) {
28
+ if (subject instanceof node_stream.Writable) {
29
29
  return new TestPipeline({ indexer: subject });
30
30
  }
31
- if (subject.readable || subject instanceof stream.Readable) {
31
+ if (subject.readable || subject instanceof node_stream.Readable) {
32
32
  return new TestPipeline({ collator: subject });
33
33
  }
34
34
  throw new Error(
@@ -81,7 +81,7 @@ class TestPipeline {
81
81
  if (this.collator) {
82
82
  throw new Error("Cannot provide documents when testing a collator.");
83
83
  }
84
- this.collator = new stream.Readable({ objectMode: true });
84
+ this.collator = new node_stream.Readable({ objectMode: true });
85
85
  this.collator._read = () => {
86
86
  };
87
87
  process.nextTick(() => {
@@ -104,7 +104,7 @@ class TestPipeline {
104
104
  );
105
105
  }
106
106
  if (!this.indexer) {
107
- this.indexer = new stream.Writable({ objectMode: true });
107
+ this.indexer = new node_stream.Writable({ objectMode: true });
108
108
  this.indexer._write = (document, _, done) => {
109
109
  documents.push(document);
110
110
  done();
@@ -116,7 +116,7 @@ class TestPipeline {
116
116
  pipes.push(this.decorator);
117
117
  }
118
118
  pipes.push(this.indexer);
119
- stream.pipeline(pipes, (error) => {
119
+ node_stream.pipeline(pipes, (error) => {
120
120
  done({
121
121
  error,
122
122
  documents
@@ -1 +1 @@
1
- {"version":3,"file":"TestPipeline.cjs.js","sources":["../../src/test-utils/TestPipeline.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { pipeline, Readable, Transform, Writable } from 'stream';\n\n/**\n * Object resolved after a test pipeline is executed.\n * @public\n */\nexport type TestPipelineResult = {\n /**\n * If an error was emitted by the pipeline, it will be set here.\n */\n error: unknown;\n\n /**\n * A list of documents collected at the end of the pipeline. If the subject\n * under test is an indexer, this will be an empty array (because your\n * indexer should have received the documents instead).\n */\n documents: IndexableDocument[];\n};\n\n/**\n * Test utility for Backstage Search collators, decorators, and indexers.\n *\n * @example\n * An example test checking that a collator provides expected documents.\n * ```\n * it('provides expected documents', async () => {\n * const testSubject = await yourCollatorFactory.getCollator();\n * const pipeline = TestPipeline.fromCollator(testSubject);\n *\n * const { documents } = await pipeline.execute();\n *\n * expect(documents).toHaveLength(2);\n * })\n * ```\n *\n * @example\n * An example test checking that a decorator behaves as expected.\n * ```\n * it('filters private documents', async () => {\n * const testSubject = await yourDecoratorFactory.getDecorator();\n * const pipeline = TestPipeline\n * .fromDecorator(testSubject)\n * .withDocuments([{ title: 'Private', location: '/private', text: '' }]);\n *\n * const { documents } = await pipeline.execute();\n *\n * expect(documents).toHaveLength(0);\n * })\n * ```\n *\n * @public\n */\nexport class TestPipeline {\n private collator?: Readable;\n private decorator?: Transform;\n private indexer?: Writable;\n\n private constructor({\n collator,\n decorator,\n indexer,\n }: {\n collator?: Readable;\n decorator?: Transform;\n indexer?: Writable;\n }) {\n this.collator = collator;\n this.decorator = decorator;\n this.indexer = indexer;\n }\n\n /**\n * Provide the collator, decorator, or indexer to be tested.\n *\n * @deprecated Use `fromCollator`, `fromDecorator` or `fromIndexer` static\n * methods to create a test pipeline instead.\n */\n static withSubject(subject: Readable | Transform | Writable) {\n if (subject instanceof Transform) {\n return new TestPipeline({ decorator: subject });\n }\n\n if (subject instanceof Writable) {\n return new TestPipeline({ indexer: subject });\n }\n\n if (subject.readable || subject instanceof Readable) {\n return new TestPipeline({ collator: subject });\n }\n\n throw new Error(\n 'Unknown test subject: are you passing a readable, writable, or transform stream?',\n );\n }\n\n /**\n * Create a test pipeline given a collator you want to test.\n */\n static fromCollator(collator: Readable) {\n return new TestPipeline({ collator });\n }\n\n /**\n * Add a collator to the test pipeline.\n */\n withCollator(collator: Readable): this {\n this.collator = collator;\n return this;\n }\n\n /**\n * Create a test pipeline given a decorator you want to test.\n */\n static fromDecorator(decorator: Transform) {\n return new TestPipeline({ decorator });\n }\n\n /**\n * Add a decorator to the test pipeline.\n */\n withDecorator(decorator: Transform): this {\n this.decorator = decorator;\n return this;\n }\n\n /**\n * Create a test pipeline given an indexer you want to test.\n */\n static fromIndexer(indexer: Writable) {\n return new TestPipeline({ indexer });\n }\n\n /**\n * Add an indexer to the test pipeline.\n */\n withIndexer(indexer: Writable): this {\n this.indexer = indexer;\n return this;\n }\n\n /**\n * Provide documents for testing decorators and indexers.\n */\n withDocuments(documents: IndexableDocument[]): TestPipeline {\n if (this.collator) {\n throw new Error('Cannot provide documents when testing a collator.');\n }\n\n // Set a naive readable stream that just pushes all given documents.\n this.collator = new Readable({ objectMode: true });\n this.collator._read = () => {};\n process.nextTick(() => {\n documents.forEach(document => {\n this.collator!.push(document);\n });\n this.collator!.push(null);\n });\n\n return this;\n }\n\n /**\n * Execute the test pipeline so that you can make assertions about the result\n * or behavior of the given test subject.\n */\n async execute(): Promise<TestPipelineResult> {\n const documents: IndexableDocument[] = [];\n if (!this.collator) {\n throw new Error(\n 'Cannot execute pipeline without a collator or documents',\n );\n }\n\n // If we are here and there is no indexer, we are testing a collator or a\n // decorator. Set up a naive writable that captures documents in memory.\n if (!this.indexer) {\n this.indexer = new Writable({ objectMode: true });\n this.indexer._write = (document: IndexableDocument, _, done) => {\n documents.push(document);\n done();\n };\n }\n\n return new Promise<TestPipelineResult>(done => {\n const pipes: (Readable | Transform | Writable)[] = [this.collator!];\n if (this.decorator) {\n pipes.push(this.decorator);\n }\n pipes.push(this.indexer!);\n\n pipeline(pipes, (error: NodeJS.ErrnoException | null) => {\n done({\n error,\n documents,\n });\n });\n });\n }\n}\n"],"names":["Transform","Writable","Readable","pipeline"],"mappings":";;;;AAsEO,MAAM,YAAA,CAAa;AAAA,EAChB,QAAA;AAAA,EACA,SAAA;AAAA,EACA,OAAA;AAAA,EAEA,WAAA,CAAY;AAAA,IAClB,QAAA;AAAA,IACA,SAAA;AAAA,IACA;AAAA,GACF,EAIG;AACD,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,YAAY,OAAA,EAA0C;AAC3D,IAAA,IAAI,mBAAmBA,gBAAA,EAAW;AAChC,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,SAAA,EAAW,SAAS,CAAA;AAAA,IAChD;AAEA,IAAA,IAAI,mBAAmBC,eAAA,EAAU;AAC/B,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,OAAA,EAAS,SAAS,CAAA;AAAA,IAC9C;AAEA,IAAA,IAAI,OAAA,CAAQ,QAAA,IAAY,OAAA,YAAmBC,eAAA,EAAU;AACnD,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,QAAA,EAAU,SAAS,CAAA;AAAA,IAC/C;AAEA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,aAAa,QAAA,EAAoB;AACtC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,QAAA,EAAU,CAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,QAAA,EAA0B;AACrC,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAc,SAAA,EAAsB;AACzC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,SAAA,EAAW,CAAA;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,SAAA,EAA4B;AACxC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,OAAA,EAAmB;AACpC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,OAAA,EAAS,CAAA;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,OAAA,EAAyB;AACnC,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,SAAA,EAA8C;AAC1D,IAAA,IAAI,KAAK,QAAA,EAAU;AACjB,MAAA,MAAM,IAAI,MAAM,mDAAmD,CAAA;AAAA,IACrE;AAGA,IAAA,IAAA,CAAK,WAAW,IAAIA,eAAA,CAAS,EAAE,UAAA,EAAY,MAAM,CAAA;AACjD,IAAA,IAAA,CAAK,QAAA,CAAS,QAAQ,MAAM;AAAA,IAAC,CAAA;AAC7B,IAAA,OAAA,CAAQ,SAAS,MAAM;AACrB,MAAA,SAAA,CAAU,QAAQ,CAAA,QAAA,KAAY;AAC5B,QAAA,IAAA,CAAK,QAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,MAC9B,CAAC,CAAA;AACD,MAAA,IAAA,CAAK,QAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IAC1B,CAAC,CAAA;AAED,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAA,GAAuC;AAC3C,IAAA,MAAM,YAAiC,EAAC;AACxC,IAAA,IAAI,CAAC,KAAK,QAAA,EAAU;AAClB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAIA,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,UAAU,IAAID,eAAA,CAAS,EAAE,UAAA,EAAY,MAAM,CAAA;AAChD,MAAA,IAAA,CAAK,OAAA,CAAQ,MAAA,GAAS,CAAC,QAAA,EAA6B,GAAG,IAAA,KAAS;AAC9D,QAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AACvB,QAAA,IAAA,EAAK;AAAA,MACP,CAAA;AAAA,IACF;AAEA,IAAA,OAAO,IAAI,QAA4B,CAAA,IAAA,KAAQ;AAC7C,MAAA,MAAM,KAAA,GAA6C,CAAC,IAAA,CAAK,QAAS,CAAA;AAClE,MAAA,IAAI,KAAK,SAAA,EAAW;AAClB,QAAA,KAAA,CAAM,IAAA,CAAK,KAAK,SAAS,CAAA;AAAA,MAC3B;AACA,MAAA,KAAA,CAAM,IAAA,CAAK,KAAK,OAAQ,CAAA;AAExB,MAAAE,eAAA,CAAS,KAAA,EAAO,CAAC,KAAA,KAAwC;AACvD,QAAA,IAAA,CAAK;AAAA,UACH,KAAA;AAAA,UACA;AAAA,SACD,CAAA;AAAA,MACH,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,EACH;AACF;;;;"}
1
+ {"version":3,"file":"TestPipeline.cjs.js","sources":["../../src/test-utils/TestPipeline.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { pipeline, Readable, Transform, Writable } from 'node:stream';\n\n/**\n * Object resolved after a test pipeline is executed.\n * @public\n */\nexport type TestPipelineResult = {\n /**\n * If an error was emitted by the pipeline, it will be set here.\n */\n error: unknown;\n\n /**\n * A list of documents collected at the end of the pipeline. If the subject\n * under test is an indexer, this will be an empty array (because your\n * indexer should have received the documents instead).\n */\n documents: IndexableDocument[];\n};\n\n/**\n * Test utility for Backstage Search collators, decorators, and indexers.\n *\n * @example\n * An example test checking that a collator provides expected documents.\n * ```\n * it('provides expected documents', async () => {\n * const testSubject = await yourCollatorFactory.getCollator();\n * const pipeline = TestPipeline.fromCollator(testSubject);\n *\n * const { documents } = await pipeline.execute();\n *\n * expect(documents).toHaveLength(2);\n * })\n * ```\n *\n * @example\n * An example test checking that a decorator behaves as expected.\n * ```\n * it('filters private documents', async () => {\n * const testSubject = await yourDecoratorFactory.getDecorator();\n * const pipeline = TestPipeline\n * .fromDecorator(testSubject)\n * .withDocuments([{ title: 'Private', location: '/private', text: '' }]);\n *\n * const { documents } = await pipeline.execute();\n *\n * expect(documents).toHaveLength(0);\n * })\n * ```\n *\n * @public\n */\nexport class TestPipeline {\n private collator?: Readable;\n private decorator?: Transform;\n private indexer?: Writable;\n\n private constructor({\n collator,\n decorator,\n indexer,\n }: {\n collator?: Readable;\n decorator?: Transform;\n indexer?: Writable;\n }) {\n this.collator = collator;\n this.decorator = decorator;\n this.indexer = indexer;\n }\n\n /**\n * Provide the collator, decorator, or indexer to be tested.\n *\n * @deprecated Use `fromCollator`, `fromDecorator` or `fromIndexer` static\n * methods to create a test pipeline instead.\n */\n static withSubject(subject: Readable | Transform | Writable) {\n if (subject instanceof Transform) {\n return new TestPipeline({ decorator: subject });\n }\n\n if (subject instanceof Writable) {\n return new TestPipeline({ indexer: subject });\n }\n\n if (subject.readable || subject instanceof Readable) {\n return new TestPipeline({ collator: subject });\n }\n\n throw new Error(\n 'Unknown test subject: are you passing a readable, writable, or transform stream?',\n );\n }\n\n /**\n * Create a test pipeline given a collator you want to test.\n */\n static fromCollator(collator: Readable) {\n return new TestPipeline({ collator });\n }\n\n /**\n * Add a collator to the test pipeline.\n */\n withCollator(collator: Readable): this {\n this.collator = collator;\n return this;\n }\n\n /**\n * Create a test pipeline given a decorator you want to test.\n */\n static fromDecorator(decorator: Transform) {\n return new TestPipeline({ decorator });\n }\n\n /**\n * Add a decorator to the test pipeline.\n */\n withDecorator(decorator: Transform): this {\n this.decorator = decorator;\n return this;\n }\n\n /**\n * Create a test pipeline given an indexer you want to test.\n */\n static fromIndexer(indexer: Writable) {\n return new TestPipeline({ indexer });\n }\n\n /**\n * Add an indexer to the test pipeline.\n */\n withIndexer(indexer: Writable): this {\n this.indexer = indexer;\n return this;\n }\n\n /**\n * Provide documents for testing decorators and indexers.\n */\n withDocuments(documents: IndexableDocument[]): TestPipeline {\n if (this.collator) {\n throw new Error('Cannot provide documents when testing a collator.');\n }\n\n // Set a naive readable stream that just pushes all given documents.\n this.collator = new Readable({ objectMode: true });\n this.collator._read = () => {};\n process.nextTick(() => {\n documents.forEach(document => {\n this.collator!.push(document);\n });\n this.collator!.push(null);\n });\n\n return this;\n }\n\n /**\n * Execute the test pipeline so that you can make assertions about the result\n * or behavior of the given test subject.\n */\n async execute(): Promise<TestPipelineResult> {\n const documents: IndexableDocument[] = [];\n if (!this.collator) {\n throw new Error(\n 'Cannot execute pipeline without a collator or documents',\n );\n }\n\n // If we are here and there is no indexer, we are testing a collator or a\n // decorator. Set up a naive writable that captures documents in memory.\n if (!this.indexer) {\n this.indexer = new Writable({ objectMode: true });\n this.indexer._write = (document: IndexableDocument, _, done) => {\n documents.push(document);\n done();\n };\n }\n\n return new Promise<TestPipelineResult>(done => {\n const pipes: (Readable | Transform | Writable)[] = [this.collator!];\n if (this.decorator) {\n pipes.push(this.decorator);\n }\n pipes.push(this.indexer!);\n\n pipeline(pipes, (error: NodeJS.ErrnoException | null) => {\n done({\n error,\n documents,\n });\n });\n });\n }\n}\n"],"names":["Transform","Writable","Readable","pipeline"],"mappings":";;;;AAsEO,MAAM,YAAA,CAAa;AAAA,EAChB,QAAA;AAAA,EACA,SAAA;AAAA,EACA,OAAA;AAAA,EAEA,WAAA,CAAY;AAAA,IAClB,QAAA;AAAA,IACA,SAAA;AAAA,IACA;AAAA,GACF,EAIG;AACD,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,YAAY,OAAA,EAA0C;AAC3D,IAAA,IAAI,mBAAmBA,qBAAA,EAAW;AAChC,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,SAAA,EAAW,SAAS,CAAA;AAAA,IAChD;AAEA,IAAA,IAAI,mBAAmBC,oBAAA,EAAU;AAC/B,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,OAAA,EAAS,SAAS,CAAA;AAAA,IAC9C;AAEA,IAAA,IAAI,OAAA,CAAQ,QAAA,IAAY,OAAA,YAAmBC,oBAAA,EAAU;AACnD,MAAA,OAAO,IAAI,YAAA,CAAa,EAAE,QAAA,EAAU,SAAS,CAAA;AAAA,IAC/C;AAEA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,aAAa,QAAA,EAAoB;AACtC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,QAAA,EAAU,CAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,QAAA,EAA0B;AACrC,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAc,SAAA,EAAsB;AACzC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,SAAA,EAAW,CAAA;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,SAAA,EAA4B;AACxC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,OAAA,EAAmB;AACpC,IAAA,OAAO,IAAI,YAAA,CAAa,EAAE,OAAA,EAAS,CAAA;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,OAAA,EAAyB;AACnC,IAAA,IAAA,CAAK,OAAA,GAAU,OAAA;AACf,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,SAAA,EAA8C;AAC1D,IAAA,IAAI,KAAK,QAAA,EAAU;AACjB,MAAA,MAAM,IAAI,MAAM,mDAAmD,CAAA;AAAA,IACrE;AAGA,IAAA,IAAA,CAAK,WAAW,IAAIA,oBAAA,CAAS,EAAE,UAAA,EAAY,MAAM,CAAA;AACjD,IAAA,IAAA,CAAK,QAAA,CAAS,QAAQ,MAAM;AAAA,IAAC,CAAA;AAC7B,IAAA,OAAA,CAAQ,SAAS,MAAM;AACrB,MAAA,SAAA,CAAU,QAAQ,CAAA,QAAA,KAAY;AAC5B,QAAA,IAAA,CAAK,QAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,MAC9B,CAAC,CAAA;AACD,MAAA,IAAA,CAAK,QAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IAC1B,CAAC,CAAA;AAED,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAA,GAAuC;AAC3C,IAAA,MAAM,YAAiC,EAAC;AACxC,IAAA,IAAI,CAAC,KAAK,QAAA,EAAU;AAClB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAIA,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,IAAA,CAAK,UAAU,IAAID,oBAAA,CAAS,EAAE,UAAA,EAAY,MAAM,CAAA;AAChD,MAAA,IAAA,CAAK,OAAA,CAAQ,MAAA,GAAS,CAAC,QAAA,EAA6B,GAAG,IAAA,KAAS;AAC9D,QAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AACvB,QAAA,IAAA,EAAK;AAAA,MACP,CAAA;AAAA,IACF;AAEA,IAAA,OAAO,IAAI,QAA4B,CAAA,IAAA,KAAQ;AAC7C,MAAA,MAAM,KAAA,GAA6C,CAAC,IAAA,CAAK,QAAS,CAAA;AAClE,MAAA,IAAI,KAAK,SAAA,EAAW;AAClB,QAAA,KAAA,CAAM,IAAA,CAAK,KAAK,SAAS,CAAA;AAAA,MAC3B;AACA,MAAA,KAAA,CAAM,IAAA,CAAK,KAAK,OAAQ,CAAA;AAExB,MAAAE,oBAAA,CAAS,KAAA,EAAO,CAAC,KAAA,KAAwC;AACvD,QAAA,IAAA,CAAK;AAAA,UACH,KAAA;AAAA,UACA;AAAA,SACD,CAAA;AAAA,MACH,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,EACH;AACF;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-search-backend-node",
3
- "version": "1.4.0",
3
+ "version": "1.4.1-next.0",
4
4
  "description": "A library for Backstage backend plugins that want to interact with the search backend plugin",
5
5
  "backstage": {
6
6
  "role": "node-library",
@@ -61,11 +61,11 @@
61
61
  "test": "backstage-cli package test"
62
62
  },
63
63
  "dependencies": {
64
- "@backstage/backend-plugin-api": "^1.6.0",
65
- "@backstage/config": "^1.3.6",
66
- "@backstage/errors": "^1.2.7",
67
- "@backstage/plugin-permission-common": "^0.9.3",
68
- "@backstage/plugin-search-common": "^1.2.21",
64
+ "@backstage/backend-plugin-api": "1.7.0-next.0",
65
+ "@backstage/config": "1.3.6",
66
+ "@backstage/errors": "1.2.7",
67
+ "@backstage/plugin-permission-common": "0.9.5-next.0",
68
+ "@backstage/plugin-search-common": "1.2.22-next.0",
69
69
  "@types/lunr": "^2.3.3",
70
70
  "lodash": "^4.17.21",
71
71
  "lunr": "^2.3.9",
@@ -73,9 +73,9 @@
73
73
  "uuid": "^11.0.0"
74
74
  },
75
75
  "devDependencies": {
76
- "@backstage/backend-defaults": "^0.14.0",
77
- "@backstage/backend-test-utils": "^1.10.2",
78
- "@backstage/cli": "^0.35.0",
76
+ "@backstage/backend-defaults": "0.15.1-next.0",
77
+ "@backstage/backend-test-utils": "1.10.4-next.0",
78
+ "@backstage/cli": "0.35.3-next.0",
79
79
  "@types/ndjson": "^2.0.1"
80
80
  }
81
81
  }