@backstage/plugin-search-backend-node 1.3.3-next.0 → 1.3.3-next.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,29 @@
1
1
  # @backstage/plugin-search-backend-node
2
2
 
3
+ ## 1.3.3-next.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies
8
+ - @backstage/backend-defaults@0.5.1-next.2
9
+ - @backstage/backend-plugin-api@1.0.1-next.1
10
+ - @backstage/config@1.2.0
11
+ - @backstage/errors@1.2.4
12
+ - @backstage/plugin-permission-common@0.8.1
13
+ - @backstage/plugin-search-common@1.2.14
14
+
15
+ ## 1.3.3-next.1
16
+
17
+ ### Patch Changes
18
+
19
+ - Updated dependencies
20
+ - @backstage/backend-defaults@0.5.1-next.1
21
+ - @backstage/backend-plugin-api@1.0.1-next.0
22
+ - @backstage/config@1.2.0
23
+ - @backstage/errors@1.2.4
24
+ - @backstage/plugin-permission-common@0.8.1
25
+ - @backstage/plugin-search-common@1.2.14
26
+
3
27
  ## 1.3.3-next.0
4
28
 
5
29
  ### Patch Changes
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-search-backend-node__alpha",
3
- "version": "1.3.3-next.0",
3
+ "version": "1.3.3-next.2",
4
4
  "main": "../dist/alpha.cjs.js",
5
5
  "types": "../dist/alpha.d.ts"
6
6
  }
@@ -0,0 +1,123 @@
1
+ 'use strict';
2
+
3
+ var stream = require('stream');
4
+ var Scheduler = require('./Scheduler.cjs.js');
5
+
6
+ class IndexBuilder {
7
+ collators;
8
+ decorators;
9
+ documentTypes;
10
+ searchEngine;
11
+ logger;
12
+ constructor(options) {
13
+ this.collators = {};
14
+ this.decorators = {};
15
+ this.documentTypes = {};
16
+ this.logger = options.logger;
17
+ this.searchEngine = options.searchEngine;
18
+ }
19
+ /**
20
+ * Responsible for returning the registered search engine.
21
+ */
22
+ getSearchEngine() {
23
+ return this.searchEngine;
24
+ }
25
+ /**
26
+ * Responsible for returning the registered document types.
27
+ */
28
+ getDocumentTypes() {
29
+ return this.documentTypes;
30
+ }
31
+ /**
32
+ * Makes the index builder aware of a collator that should be executed at the
33
+ * given refresh interval.
34
+ */
35
+ addCollator(options) {
36
+ const { factory, schedule } = options;
37
+ this.logger.info(
38
+ `Added ${factory.constructor.name} collator factory for type ${factory.type}`
39
+ );
40
+ this.collators[factory.type] = {
41
+ factory,
42
+ schedule
43
+ };
44
+ this.documentTypes[factory.type] = {
45
+ visibilityPermission: factory.visibilityPermission
46
+ };
47
+ }
48
+ /**
49
+ * Makes the index builder aware of a decorator. If no types are provided on
50
+ * the decorator, it will be applied to documents from all known collators,
51
+ * otherwise it will only be applied to documents of the given types.
52
+ */
53
+ addDecorator(options) {
54
+ const { factory } = options;
55
+ const types = factory.types || ["*"];
56
+ this.logger.info(
57
+ `Added decorator ${factory.constructor.name} to types ${types.join(
58
+ ", "
59
+ )}`
60
+ );
61
+ types.forEach((type) => {
62
+ if (this.decorators.hasOwnProperty(type)) {
63
+ this.decorators[type].push(factory);
64
+ } else {
65
+ this.decorators[type] = [factory];
66
+ }
67
+ });
68
+ }
69
+ /**
70
+ * Compiles collators and decorators into tasks, which are added to a
71
+ * scheduler returned to the caller.
72
+ */
73
+ async build() {
74
+ const scheduler = new Scheduler.Scheduler({
75
+ logger: this.logger
76
+ });
77
+ Object.keys(this.collators).forEach((type) => {
78
+ const taskLogger = this.logger.child({ documentType: type });
79
+ scheduler.addToSchedule({
80
+ id: `search_index_${type.replace("-", "_").toLocaleLowerCase("en-US")}`,
81
+ scheduledRunner: this.collators[type].schedule,
82
+ task: async () => {
83
+ const collator = await this.collators[type].factory.getCollator();
84
+ taskLogger.info(
85
+ `Collating documents for ${type} via ${this.collators[type].factory.constructor.name}`
86
+ );
87
+ const decorators = await Promise.all(
88
+ (this.decorators["*"] || []).concat(this.decorators[type] || []).map(async (factory) => {
89
+ const decorator = await factory.getDecorator();
90
+ taskLogger.info(
91
+ `Attached decorator via ${factory.constructor.name} to ${type} index pipeline.`
92
+ );
93
+ return decorator;
94
+ })
95
+ );
96
+ const indexer = await this.searchEngine.getIndexer(type);
97
+ return new Promise((resolve, reject) => {
98
+ stream.pipeline(
99
+ [collator, ...decorators, indexer],
100
+ (error) => {
101
+ if (error) {
102
+ taskLogger.error(
103
+ `Collating documents for ${type} failed: ${error}`
104
+ );
105
+ reject(error);
106
+ } else {
107
+ taskLogger.info(`Collating documents for ${type} succeeded`);
108
+ resolve();
109
+ }
110
+ }
111
+ );
112
+ });
113
+ }
114
+ });
115
+ });
116
+ return {
117
+ scheduler
118
+ };
119
+ }
120
+ }
121
+
122
+ exports.IndexBuilder = IndexBuilder;
123
+ //# sourceMappingURL=IndexBuilder.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"IndexBuilder.cjs.js","sources":["../src/IndexBuilder.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n DocumentDecoratorFactory,\n DocumentTypeInfo,\n} from '@backstage/plugin-search-common';\nimport { pipeline, Transform } from 'stream';\nimport { Scheduler } from './Scheduler';\nimport {\n IndexBuilderOptions,\n RegisterCollatorParameters,\n RegisterDecoratorParameters,\n SearchEngine,\n} from './types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Used for adding collators, decorators and compile them into tasks which are added to a scheduler returned to the caller.\n * @public\n */\nexport class IndexBuilder {\n private collators: Record<string, RegisterCollatorParameters>;\n private decorators: Record<string, DocumentDecoratorFactory[]>;\n private documentTypes: Record<string, DocumentTypeInfo>;\n private searchEngine: SearchEngine;\n private logger: LoggerService;\n\n constructor(options: IndexBuilderOptions) {\n this.collators = {};\n this.decorators = {};\n this.documentTypes = {};\n this.logger = options.logger;\n this.searchEngine = options.searchEngine;\n }\n\n /**\n * Responsible for returning the registered search engine.\n */\n getSearchEngine(): SearchEngine {\n return this.searchEngine;\n }\n\n /**\n * Responsible for returning the registered document types.\n */\n getDocumentTypes(): Record<string, DocumentTypeInfo> {\n return this.documentTypes;\n }\n\n /**\n * Makes the index builder aware of a collator that should be executed at the\n * given refresh interval.\n */\n addCollator(options: RegisterCollatorParameters): void {\n const { factory, schedule } = options;\n\n this.logger.info(\n `Added ${factory.constructor.name} collator factory for type ${factory.type}`,\n );\n this.collators[factory.type] = {\n factory,\n schedule,\n };\n this.documentTypes[factory.type] = {\n visibilityPermission: factory.visibilityPermission,\n };\n }\n\n /**\n * Makes the index builder aware of a decorator. If no types are provided on\n * the decorator, it will be applied to documents from all known collators,\n * otherwise it will only be applied to documents of the given types.\n */\n addDecorator(options: RegisterDecoratorParameters): void {\n const { factory } = options;\n const types = factory.types || ['*'];\n this.logger.info(\n `Added decorator ${factory.constructor.name} to types ${types.join(\n ', ',\n )}`,\n );\n types.forEach(type => {\n if (this.decorators.hasOwnProperty(type)) {\n this.decorators[type].push(factory);\n } else {\n this.decorators[type] = [factory];\n }\n });\n }\n\n /**\n * Compiles collators and decorators into tasks, which are added to a\n * scheduler returned to the caller.\n */\n async build(): Promise<{ scheduler: Scheduler }> {\n const scheduler = new Scheduler({\n logger: this.logger,\n });\n\n Object.keys(this.collators).forEach(type => {\n const taskLogger = this.logger.child({ documentType: type });\n scheduler.addToSchedule({\n id: `search_index_${type.replace('-', '_').toLocaleLowerCase('en-US')}`,\n scheduledRunner: this.collators[type].schedule,\n task: async () => {\n // Instantiate the collator.\n const collator = await this.collators[type].factory.getCollator();\n taskLogger.info(\n `Collating documents for ${type} via ${this.collators[type].factory.constructor.name}`,\n );\n\n // Instantiate all relevant decorators.\n const decorators: Transform[] = await Promise.all(\n (this.decorators['*'] || [])\n .concat(this.decorators[type] || [])\n .map(async factory => {\n const decorator = await factory.getDecorator();\n taskLogger.info(\n `Attached decorator via ${factory.constructor.name} to ${type} index pipeline.`,\n );\n return decorator;\n }),\n );\n\n // Instantiate the indexer.\n const indexer = await this.searchEngine.getIndexer(type);\n\n // Compose collator/decorators/indexer into a pipeline\n return new Promise<void>((resolve, reject) => {\n pipeline(\n [collator, ...decorators, indexer],\n (error: NodeJS.ErrnoException | null) => {\n if (error) {\n taskLogger.error(\n `Collating documents for ${type} failed: ${error}`,\n );\n reject(error);\n } else {\n // Signal index pipeline completion!\n taskLogger.info(`Collating documents for ${type} succeeded`);\n resolve();\n }\n },\n );\n });\n },\n });\n });\n\n return {\n scheduler,\n };\n }\n}\n"],"names":["Scheduler","pipeline"],"mappings":";;;;;AAkCO,MAAM,YAAa,CAAA;AAAA,EAChB,SAAA,CAAA;AAAA,EACA,UAAA,CAAA;AAAA,EACA,aAAA,CAAA;AAAA,EACA,YAAA,CAAA;AAAA,EACA,MAAA,CAAA;AAAA,EAER,YAAY,OAA8B,EAAA;AACxC,IAAA,IAAA,CAAK,YAAY,EAAC,CAAA;AAClB,IAAA,IAAA,CAAK,aAAa,EAAC,CAAA;AACnB,IAAA,IAAA,CAAK,gBAAgB,EAAC,CAAA;AACtB,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AACtB,IAAA,IAAA,CAAK,eAAe,OAAQ,CAAA,YAAA,CAAA;AAAA,GAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAgC,GAAA;AAC9B,IAAA,OAAO,IAAK,CAAA,YAAA,CAAA;AAAA,GACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAqD,GAAA;AACnD,IAAA,OAAO,IAAK,CAAA,aAAA,CAAA;AAAA,GACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,OAA2C,EAAA;AACrD,IAAM,MAAA,EAAE,OAAS,EAAA,QAAA,EAAa,GAAA,OAAA,CAAA;AAE9B,IAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,MACV,SAAS,OAAQ,CAAA,WAAA,CAAY,IAAI,CAAA,2BAAA,EAA8B,QAAQ,IAAI,CAAA,CAAA;AAAA,KAC7E,CAAA;AACA,IAAK,IAAA,CAAA,SAAA,CAAU,OAAQ,CAAA,IAAI,CAAI,GAAA;AAAA,MAC7B,OAAA;AAAA,MACA,QAAA;AAAA,KACF,CAAA;AACA,IAAK,IAAA,CAAA,aAAA,CAAc,OAAQ,CAAA,IAAI,CAAI,GAAA;AAAA,MACjC,sBAAsB,OAAQ,CAAA,oBAAA;AAAA,KAChC,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAa,OAA4C,EAAA;AACvD,IAAM,MAAA,EAAE,SAAY,GAAA,OAAA,CAAA;AACpB,IAAA,MAAM,KAAQ,GAAA,OAAA,CAAQ,KAAS,IAAA,CAAC,GAAG,CAAA,CAAA;AACnC,IAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,MACV,CAAmB,gBAAA,EAAA,OAAA,CAAQ,WAAY,CAAA,IAAI,aAAa,KAAM,CAAA,IAAA;AAAA,QAC5D,IAAA;AAAA,OACD,CAAA,CAAA;AAAA,KACH,CAAA;AACA,IAAA,KAAA,CAAM,QAAQ,CAAQ,IAAA,KAAA;AACpB,MAAA,IAAI,IAAK,CAAA,UAAA,CAAW,cAAe,CAAA,IAAI,CAAG,EAAA;AACxC,QAAA,IAAA,CAAK,UAAW,CAAA,IAAI,CAAE,CAAA,IAAA,CAAK,OAAO,CAAA,CAAA;AAAA,OAC7B,MAAA;AACL,QAAA,IAAA,CAAK,UAAW,CAAA,IAAI,CAAI,GAAA,CAAC,OAAO,CAAA,CAAA;AAAA,OAClC;AAAA,KACD,CAAA,CAAA;AAAA,GACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAA2C,GAAA;AAC/C,IAAM,MAAA,SAAA,GAAY,IAAIA,mBAAU,CAAA;AAAA,MAC9B,QAAQ,IAAK,CAAA,MAAA;AAAA,KACd,CAAA,CAAA;AAED,IAAA,MAAA,CAAO,IAAK,CAAA,IAAA,CAAK,SAAS,CAAA,CAAE,QAAQ,CAAQ,IAAA,KAAA;AAC1C,MAAA,MAAM,aAAa,IAAK,CAAA,MAAA,CAAO,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA,CAAA;AAC3D,MAAA,SAAA,CAAU,aAAc,CAAA;AAAA,QACtB,EAAA,EAAI,gBAAgB,IAAK,CAAA,OAAA,CAAQ,KAAK,GAAG,CAAA,CAAE,iBAAkB,CAAA,OAAO,CAAC,CAAA,CAAA;AAAA,QACrE,eAAiB,EAAA,IAAA,CAAK,SAAU,CAAA,IAAI,CAAE,CAAA,QAAA;AAAA,QACtC,MAAM,YAAY;AAEhB,UAAA,MAAM,WAAW,MAAM,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,QAAQ,WAAY,EAAA,CAAA;AAChE,UAAW,UAAA,CAAA,IAAA;AAAA,YACT,CAAA,wBAAA,EAA2B,IAAI,CAAQ,KAAA,EAAA,IAAA,CAAK,UAAU,IAAI,CAAA,CAAE,OAAQ,CAAA,WAAA,CAAY,IAAI,CAAA,CAAA;AAAA,WACtF,CAAA;AAGA,UAAM,MAAA,UAAA,GAA0B,MAAM,OAAQ,CAAA,GAAA;AAAA,YAAA,CAC3C,KAAK,UAAW,CAAA,GAAG,CAAK,IAAA,IACtB,MAAO,CAAA,IAAA,CAAK,UAAW,CAAA,IAAI,KAAK,EAAE,CAClC,CAAA,GAAA,CAAI,OAAM,OAAW,KAAA;AACpB,cAAM,MAAA,SAAA,GAAY,MAAM,OAAA,CAAQ,YAAa,EAAA,CAAA;AAC7C,cAAW,UAAA,CAAA,IAAA;AAAA,gBACT,CAA0B,uBAAA,EAAA,OAAA,CAAQ,WAAY,CAAA,IAAI,OAAO,IAAI,CAAA,gBAAA,CAAA;AAAA,eAC/D,CAAA;AACA,cAAO,OAAA,SAAA,CAAA;AAAA,aACR,CAAA;AAAA,WACL,CAAA;AAGA,UAAA,MAAM,OAAU,GAAA,MAAM,IAAK,CAAA,YAAA,CAAa,WAAW,IAAI,CAAA,CAAA;AAGvD,UAAA,OAAO,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAW,KAAA;AAC5C,YAAAC,eAAA;AAAA,cACE,CAAC,QAAA,EAAU,GAAG,UAAA,EAAY,OAAO,CAAA;AAAA,cACjC,CAAC,KAAwC,KAAA;AACvC,gBAAA,IAAI,KAAO,EAAA;AACT,kBAAW,UAAA,CAAA,KAAA;AAAA,oBACT,CAAA,wBAAA,EAA2B,IAAI,CAAA,SAAA,EAAY,KAAK,CAAA,CAAA;AAAA,mBAClD,CAAA;AACA,kBAAA,MAAA,CAAO,KAAK,CAAA,CAAA;AAAA,iBACP,MAAA;AAEL,kBAAW,UAAA,CAAA,IAAA,CAAK,CAA2B,wBAAA,EAAA,IAAI,CAAY,UAAA,CAAA,CAAA,CAAA;AAC3D,kBAAQ,OAAA,EAAA,CAAA;AAAA,iBACV;AAAA,eACF;AAAA,aACF,CAAA;AAAA,WACD,CAAA,CAAA;AAAA,SACH;AAAA,OACD,CAAA,CAAA;AAAA,KACF,CAAA,CAAA;AAED,IAAO,OAAA;AAAA,MACL,SAAA;AAAA,KACF,CAAA;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,62 @@
1
+ 'use strict';
2
+
3
+ class Scheduler {
4
+ logger;
5
+ schedule;
6
+ abortControllers;
7
+ isRunning;
8
+ constructor(options) {
9
+ this.logger = options.logger;
10
+ this.schedule = {};
11
+ this.abortControllers = [];
12
+ this.isRunning = false;
13
+ }
14
+ /**
15
+ * Adds each task and interval to the schedule.
16
+ * When running the tasks, the scheduler waits at least for the time specified
17
+ * in the interval once the task was completed, before running it again.
18
+ */
19
+ addToSchedule(options) {
20
+ const { id, task, scheduledRunner } = options;
21
+ if (this.isRunning) {
22
+ throw new Error(
23
+ "Cannot add task to schedule that has already been started."
24
+ );
25
+ }
26
+ if (this.schedule[id]) {
27
+ throw new Error(`Task with id ${id} already exists.`);
28
+ }
29
+ this.schedule[id] = { task, scheduledRunner };
30
+ }
31
+ /**
32
+ * Starts the scheduling process for each task
33
+ */
34
+ start() {
35
+ this.logger.info("Starting all scheduled search tasks.");
36
+ this.isRunning = true;
37
+ Object.keys(this.schedule).forEach((id) => {
38
+ const abortController = new AbortController();
39
+ this.abortControllers.push(abortController);
40
+ const { task, scheduledRunner } = this.schedule[id];
41
+ scheduledRunner.run({
42
+ id,
43
+ fn: task,
44
+ signal: abortController.signal
45
+ });
46
+ });
47
+ }
48
+ /**
49
+ * Stop all scheduled tasks.
50
+ */
51
+ stop() {
52
+ this.logger.info("Stopping all scheduled search tasks.");
53
+ for (const abortController of this.abortControllers) {
54
+ abortController.abort();
55
+ }
56
+ this.abortControllers = [];
57
+ this.isRunning = false;
58
+ }
59
+ }
60
+
61
+ exports.Scheduler = Scheduler;
62
+ //# sourceMappingURL=Scheduler.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"Scheduler.cjs.js","sources":["../src/Scheduler.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n SchedulerServiceTaskRunner,\n SchedulerServiceTaskFunction,\n} from '@backstage/backend-plugin-api';\n\ntype TaskEnvelope = {\n task: SchedulerServiceTaskFunction;\n scheduledRunner: SchedulerServiceTaskRunner;\n};\n\n/**\n * ScheduleTaskParameters\n * @public\n */\nexport type ScheduleTaskParameters = {\n id: string;\n task: SchedulerServiceTaskFunction;\n scheduledRunner: SchedulerServiceTaskRunner;\n};\n\n/**\n * Scheduler responsible for all search tasks.\n * @public\n */\nexport class Scheduler {\n private logger: LoggerService;\n private schedule: { [id: string]: TaskEnvelope };\n private abortControllers: AbortController[];\n private isRunning: boolean;\n\n constructor(options: { logger: LoggerService }) {\n this.logger = options.logger;\n this.schedule = {};\n this.abortControllers = [];\n this.isRunning = false;\n }\n\n /**\n * Adds each task and interval to the schedule.\n * When running the tasks, the scheduler waits at least for the time specified\n * in the interval once the task was completed, before running it again.\n */\n addToSchedule(options: ScheduleTaskParameters) {\n const { id, task, scheduledRunner } = options;\n\n if (this.isRunning) {\n throw new Error(\n 'Cannot add task to schedule that has already been started.',\n );\n }\n\n if (this.schedule[id]) {\n throw new Error(`Task with id ${id} already exists.`);\n }\n\n this.schedule[id] = { task, scheduledRunner };\n }\n\n /**\n * Starts the scheduling process for each task\n */\n start() {\n this.logger.info('Starting all scheduled search tasks.');\n this.isRunning = true;\n Object.keys(this.schedule).forEach(id => {\n const abortController = new AbortController();\n this.abortControllers.push(abortController);\n const { task, scheduledRunner } = this.schedule[id];\n scheduledRunner.run({\n id,\n fn: task,\n signal: abortController.signal,\n });\n });\n }\n\n /**\n * Stop all scheduled tasks.\n */\n stop() {\n this.logger.info('Stopping all scheduled search tasks.');\n for (const abortController of this.abortControllers) {\n abortController.abort();\n }\n this.abortControllers = [];\n this.isRunning = false;\n }\n}\n"],"names":[],"mappings":";;AAyCO,MAAM,SAAU,CAAA;AAAA,EACb,MAAA,CAAA;AAAA,EACA,QAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA,EACA,SAAA,CAAA;AAAA,EAER,YAAY,OAAoC,EAAA;AAC9C,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AACtB,IAAA,IAAA,CAAK,WAAW,EAAC,CAAA;AACjB,IAAA,IAAA,CAAK,mBAAmB,EAAC,CAAA;AACzB,IAAA,IAAA,CAAK,SAAY,GAAA,KAAA,CAAA;AAAA,GACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,OAAiC,EAAA;AAC7C,IAAA,MAAM,EAAE,EAAA,EAAI,IAAM,EAAA,eAAA,EAAoB,GAAA,OAAA,CAAA;AAEtC,IAAA,IAAI,KAAK,SAAW,EAAA;AAClB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,4DAAA;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAI,IAAA,IAAA,CAAK,QAAS,CAAA,EAAE,CAAG,EAAA;AACrB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAgB,aAAA,EAAA,EAAE,CAAkB,gBAAA,CAAA,CAAA,CAAA;AAAA,KACtD;AAEA,IAAA,IAAA,CAAK,QAAS,CAAA,EAAE,CAAI,GAAA,EAAE,MAAM,eAAgB,EAAA,CAAA;AAAA,GAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,KAAQ,GAAA;AACN,IAAK,IAAA,CAAA,MAAA,CAAO,KAAK,sCAAsC,CAAA,CAAA;AACvD,IAAA,IAAA,CAAK,SAAY,GAAA,IAAA,CAAA;AACjB,IAAA,MAAA,CAAO,IAAK,CAAA,IAAA,CAAK,QAAQ,CAAA,CAAE,QAAQ,CAAM,EAAA,KAAA;AACvC,MAAM,MAAA,eAAA,GAAkB,IAAI,eAAgB,EAAA,CAAA;AAC5C,MAAK,IAAA,CAAA,gBAAA,CAAiB,KAAK,eAAe,CAAA,CAAA;AAC1C,MAAA,MAAM,EAAE,IAAM,EAAA,eAAA,EAAoB,GAAA,IAAA,CAAK,SAAS,EAAE,CAAA,CAAA;AAClD,MAAA,eAAA,CAAgB,GAAI,CAAA;AAAA,QAClB,EAAA;AAAA,QACA,EAAI,EAAA,IAAA;AAAA,QACJ,QAAQ,eAAgB,CAAA,MAAA;AAAA,OACzB,CAAA,CAAA;AAAA,KACF,CAAA,CAAA;AAAA,GACH;AAAA;AAAA;AAAA;AAAA,EAKA,IAAO,GAAA;AACL,IAAK,IAAA,CAAA,MAAA,CAAO,KAAK,sCAAsC,CAAA,CAAA;AACvD,IAAW,KAAA,MAAA,eAAA,IAAmB,KAAK,gBAAkB,EAAA;AACnD,MAAA,eAAA,CAAgB,KAAM,EAAA,CAAA;AAAA,KACxB;AACA,IAAA,IAAA,CAAK,mBAAmB,EAAC,CAAA;AACzB,IAAA,IAAA,CAAK,SAAY,GAAA,KAAA,CAAA;AAAA,GACnB;AACF;;;;"}
@@ -0,0 +1,61 @@
1
+ 'use strict';
2
+
3
+ var ndjson = require('ndjson');
4
+
5
+ class NewlineDelimitedJsonCollatorFactory {
6
+ constructor(type, searchPattern, reader, logger, visibilityPermission) {
7
+ this.searchPattern = searchPattern;
8
+ this.reader = reader;
9
+ this.logger = logger;
10
+ this.type = type;
11
+ this.visibilityPermission = visibilityPermission;
12
+ }
13
+ type;
14
+ visibilityPermission;
15
+ /**
16
+ * Returns a NewlineDelimitedJsonCollatorFactory instance from configuration
17
+ * and a set of options.
18
+ */
19
+ static fromConfig(_config, options) {
20
+ return new NewlineDelimitedJsonCollatorFactory(
21
+ options.type,
22
+ options.searchPattern,
23
+ options.reader,
24
+ options.logger.child({ documentType: options.type }),
25
+ options.visibilityPermission
26
+ );
27
+ }
28
+ /**
29
+ * Returns the "latest" URL for the given search pattern (e.g. the one at the
30
+ * end of the list, sorted alphabetically).
31
+ */
32
+ async lastUrl() {
33
+ try {
34
+ this.logger.info(
35
+ `Attempting to find latest .ndjson matching ${this.searchPattern}`
36
+ );
37
+ const { files } = await this.reader.search(this.searchPattern);
38
+ const candidates = files.filter((file) => file.url.endsWith(".ndjson")).sort((a, b) => a.url.localeCompare(b.url)).reverse();
39
+ return candidates[0]?.url;
40
+ } catch (e) {
41
+ this.logger.error(`Could not search for ${this.searchPattern}`, e);
42
+ throw e;
43
+ }
44
+ }
45
+ async getCollator() {
46
+ const lastUrl = await this.lastUrl();
47
+ if (!lastUrl) {
48
+ const noMatchingFile = `Could not find an .ndjson file matching ${this.searchPattern}`;
49
+ this.logger.error(noMatchingFile);
50
+ throw new Error(noMatchingFile);
51
+ } else {
52
+ this.logger.info(`Using latest .ndjson file ${lastUrl}`);
53
+ }
54
+ const readerResponse = await this.reader.readUrl(lastUrl);
55
+ const stream = readerResponse.stream();
56
+ return stream.pipe(ndjson.parse());
57
+ }
58
+ }
59
+
60
+ exports.NewlineDelimitedJsonCollatorFactory = NewlineDelimitedJsonCollatorFactory;
61
+ //# sourceMappingURL=NewlineDelimitedJsonCollatorFactory.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"NewlineDelimitedJsonCollatorFactory.cjs.js","sources":["../../src/collators/NewlineDelimitedJsonCollatorFactory.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Config } from '@backstage/config';\nimport { DocumentCollatorFactory } from '@backstage/plugin-search-common';\nimport { Permission } from '@backstage/plugin-permission-common';\nimport { Readable } from 'stream';\nimport { parse as parseNdjson } from 'ndjson';\nimport { LoggerService, UrlReaderService } from '@backstage/backend-plugin-api';\n\n/**\n * Options for instantiate NewlineDelimitedJsonCollatorFactory\n * @public\n */\nexport type NewlineDelimitedJsonCollatorFactoryOptions = {\n type: string;\n searchPattern: string;\n reader: UrlReaderService;\n logger: LoggerService;\n visibilityPermission?: Permission;\n};\n\n/**\n * Factory class producing a collator that can be used to index documents\n * sourced from the latest newline delimited JSON file matching a given search\n * pattern. \"Latest\" is determined by the name of the file (last alphabetically\n * is considered latest).\n *\n * @remarks\n * The reader provided must implement the `search()` method as well as the\n * `readUrl` method whose response includes the `stream()` method. Naturally,\n * the reader must also be configured to understand the given search pattern.\n *\n * @example\n * Here's an example configuration using Google Cloud Storage, which would\n * return the latest file under the `bucket` GCS bucket with files like\n * `xyz-2021.ndjson` or `xyz-2022.ndjson`.\n * ```ts\n * indexBuilder.addCollator({\n * schedule,\n * factory: NewlineDelimitedJsonCollatorFactory.fromConfig(env.config, {\n * type: 'techdocs',\n * searchPattern: 'https://storage.cloud.google.com/bucket/xyz-*',\n * reader: env.reader,\n * logger: env.logger,\n * })\n * });\n * ```\n *\n * @public\n */\nexport class NewlineDelimitedJsonCollatorFactory\n implements DocumentCollatorFactory\n{\n readonly type: string;\n\n public readonly visibilityPermission: Permission | undefined;\n\n private constructor(\n type: string,\n private readonly searchPattern: string,\n private readonly reader: UrlReaderService,\n private readonly logger: LoggerService,\n visibilityPermission: Permission | undefined,\n ) {\n this.type = type;\n this.visibilityPermission = visibilityPermission;\n }\n\n /**\n * Returns a NewlineDelimitedJsonCollatorFactory instance from configuration\n * and a set of options.\n */\n static fromConfig(\n _config: Config,\n options: NewlineDelimitedJsonCollatorFactoryOptions,\n ): NewlineDelimitedJsonCollatorFactory {\n return new NewlineDelimitedJsonCollatorFactory(\n options.type,\n options.searchPattern,\n options.reader,\n options.logger.child({ documentType: options.type }),\n options.visibilityPermission,\n );\n }\n\n /**\n * Returns the \"latest\" URL for the given search pattern (e.g. the one at the\n * end of the list, sorted alphabetically).\n */\n private async lastUrl(): Promise<string | undefined> {\n try {\n // Search for files matching the given pattern, then sort/reverse. The\n // first item in the list will be the \"latest\" file.\n this.logger.info(\n `Attempting to find latest .ndjson matching ${this.searchPattern}`,\n );\n const { files } = await this.reader.search(this.searchPattern);\n const candidates = files\n .filter(file => file.url.endsWith('.ndjson'))\n .sort((a, b) => a.url.localeCompare(b.url))\n .reverse();\n\n return candidates[0]?.url;\n } catch (e) {\n this.logger.error(`Could not search for ${this.searchPattern}`, e);\n throw e;\n }\n }\n\n async getCollator(): Promise<Readable> {\n // Search for files matching the given pattern.\n const lastUrl = await this.lastUrl();\n\n // Abort if no such file could be found.\n if (!lastUrl) {\n const noMatchingFile = `Could not find an .ndjson file matching ${this.searchPattern}`;\n this.logger.error(noMatchingFile);\n throw new Error(noMatchingFile);\n } else {\n this.logger.info(`Using latest .ndjson file ${lastUrl}`);\n }\n\n // Use the UrlReader to try and stream the file.\n const readerResponse = await this.reader.readUrl!(lastUrl);\n const stream = readerResponse.stream!();\n\n // Use ndjson's parser to turn the raw file into an object-mode stream.\n return stream.pipe(parseNdjson());\n }\n}\n"],"names":["parseNdjson"],"mappings":";;;;AAgEO,MAAM,mCAEb,CAAA;AAAA,EAKU,WACN,CAAA,IAAA,EACiB,aACA,EAAA,MAAA,EACA,QACjB,oBACA,EAAA;AAJiB,IAAA,IAAA,CAAA,aAAA,GAAA,aAAA,CAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA,CAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA,CAAA;AAGjB,IAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AACZ,IAAA,IAAA,CAAK,oBAAuB,GAAA,oBAAA,CAAA;AAAA,GAC9B;AAAA,EAbS,IAAA,CAAA;AAAA,EAEO,oBAAA,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBhB,OAAO,UACL,CAAA,OAAA,EACA,OACqC,EAAA;AACrC,IAAA,OAAO,IAAI,mCAAA;AAAA,MACT,OAAQ,CAAA,IAAA;AAAA,MACR,OAAQ,CAAA,aAAA;AAAA,MACR,OAAQ,CAAA,MAAA;AAAA,MACR,QAAQ,MAAO,CAAA,KAAA,CAAM,EAAE,YAAc,EAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MACnD,OAAQ,CAAA,oBAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,OAAuC,GAAA;AACnD,IAAI,IAAA;AAGF,MAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,QACV,CAAA,2CAAA,EAA8C,KAAK,aAAa,CAAA,CAAA;AAAA,OAClE,CAAA;AACA,MAAM,MAAA,EAAE,OAAU,GAAA,MAAM,KAAK,MAAO,CAAA,MAAA,CAAO,KAAK,aAAa,CAAA,CAAA;AAC7D,MAAM,MAAA,UAAA,GAAa,MAChB,MAAO,CAAA,CAAA,IAAA,KAAQ,KAAK,GAAI,CAAA,QAAA,CAAS,SAAS,CAAC,CAAA,CAC3C,KAAK,CAAC,CAAA,EAAG,MAAM,CAAE,CAAA,GAAA,CAAI,cAAc,CAAE,CAAA,GAAG,CAAC,CAAA,CACzC,OAAQ,EAAA,CAAA;AAEX,MAAO,OAAA,UAAA,CAAW,CAAC,CAAG,EAAA,GAAA,CAAA;AAAA,aACf,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAO,KAAM,CAAA,CAAA,qBAAA,EAAwB,IAAK,CAAA,aAAa,IAAI,CAAC,CAAA,CAAA;AACjE,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA,EAEA,MAAM,WAAiC,GAAA;AAErC,IAAM,MAAA,OAAA,GAAU,MAAM,IAAA,CAAK,OAAQ,EAAA,CAAA;AAGnC,IAAA,IAAI,CAAC,OAAS,EAAA;AACZ,MAAM,MAAA,cAAA,GAAiB,CAA2C,wCAAA,EAAA,IAAA,CAAK,aAAa,CAAA,CAAA,CAAA;AACpF,MAAK,IAAA,CAAA,MAAA,CAAO,MAAM,cAAc,CAAA,CAAA;AAChC,MAAM,MAAA,IAAI,MAAM,cAAc,CAAA,CAAA;AAAA,KACzB,MAAA;AACL,MAAA,IAAA,CAAK,MAAO,CAAA,IAAA,CAAK,CAA6B,0BAAA,EAAA,OAAO,CAAE,CAAA,CAAA,CAAA;AAAA,KACzD;AAGA,IAAA,MAAM,cAAiB,GAAA,MAAM,IAAK,CAAA,MAAA,CAAO,QAAS,OAAO,CAAA,CAAA;AACzD,IAAM,MAAA,MAAA,GAAS,eAAe,MAAQ,EAAA,CAAA;AAGtC,IAAO,OAAA,MAAA,CAAO,IAAK,CAAAA,YAAA,EAAa,CAAA,CAAA;AAAA,GAClC;AACF;;;;"}
@@ -0,0 +1,213 @@
1
+ 'use strict';
2
+
3
+ var errors = require('../errors.cjs.js');
4
+ var lunr = require('lunr');
5
+ var uuid = require('uuid');
6
+ var LunrSearchEngineIndexer = require('./LunrSearchEngineIndexer.cjs.js');
7
+
8
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
9
+
10
+ var lunr__default = /*#__PURE__*/_interopDefaultCompat(lunr);
11
+
12
+ class LunrSearchEngine {
13
+ lunrIndices = {};
14
+ docStore;
15
+ logger;
16
+ highlightPreTag;
17
+ highlightPostTag;
18
+ constructor(options) {
19
+ this.logger = options.logger;
20
+ this.docStore = {};
21
+ const uuidTag = uuid.v4();
22
+ this.highlightPreTag = `<${uuidTag}>`;
23
+ this.highlightPostTag = `</${uuidTag}>`;
24
+ }
25
+ translator = ({
26
+ term,
27
+ filters,
28
+ types,
29
+ pageLimit
30
+ }) => {
31
+ const pageSize = pageLimit || 25;
32
+ return {
33
+ lunrQueryBuilder: (q) => {
34
+ const termToken = lunr__default.default.tokenizer(term);
35
+ q.term(termToken, {
36
+ usePipeline: true,
37
+ boost: 100
38
+ });
39
+ q.term(termToken, {
40
+ usePipeline: false,
41
+ boost: 10,
42
+ wildcard: lunr__default.default.Query.wildcard.TRAILING
43
+ });
44
+ q.term(termToken, {
45
+ usePipeline: false,
46
+ editDistance: 2,
47
+ boost: 1
48
+ });
49
+ if (filters) {
50
+ Object.entries(filters).forEach(([field, fieldValue]) => {
51
+ if (!q.allFields.includes(field)) {
52
+ throw new Error(`unrecognised field ${field}`);
53
+ }
54
+ const value = Array.isArray(fieldValue) && fieldValue.length === 1 ? fieldValue[0] : fieldValue;
55
+ if (["string", "number", "boolean"].includes(typeof value)) {
56
+ q.term(
57
+ lunr__default.default.tokenizer(value?.toString()).map(lunr__default.default.stopWordFilter).filter((element) => element !== void 0),
58
+ {
59
+ presence: lunr__default.default.Query.presence.REQUIRED,
60
+ fields: [field]
61
+ }
62
+ );
63
+ } else if (Array.isArray(value)) {
64
+ this.logger.warn(
65
+ `Non-scalar filter value used for field ${field}. Consider using a different Search Engine for better results.`
66
+ );
67
+ q.term(lunr__default.default.tokenizer(value), {
68
+ presence: lunr__default.default.Query.presence.OPTIONAL,
69
+ fields: [field]
70
+ });
71
+ } else {
72
+ this.logger.warn(`Unknown filter type used on field ${field}`);
73
+ }
74
+ });
75
+ }
76
+ },
77
+ documentTypes: types,
78
+ pageSize
79
+ };
80
+ };
81
+ setTranslator(translator) {
82
+ this.translator = translator;
83
+ }
84
+ async getIndexer(type) {
85
+ const indexer = new LunrSearchEngineIndexer.LunrSearchEngineIndexer();
86
+ const indexerLogger = this.logger.child({ documentType: type });
87
+ let errorThrown;
88
+ indexer.on("error", (err) => {
89
+ errorThrown = err;
90
+ });
91
+ indexer.on("close", () => {
92
+ const newDocuments = indexer.getDocumentStore();
93
+ const docStoreExists = this.lunrIndices[type] !== void 0;
94
+ const documentsIndexed = Object.keys(newDocuments).length;
95
+ if (!errorThrown && documentsIndexed > 0) {
96
+ this.lunrIndices[type] = indexer.buildIndex();
97
+ this.docStore = { ...this.docStore, ...newDocuments };
98
+ } else {
99
+ indexerLogger.warn(
100
+ `Index for ${type} was not ${docStoreExists ? "replaced" : "created"}: ${errorThrown ? "an error was encountered" : "indexer received 0 documents"}`
101
+ );
102
+ }
103
+ });
104
+ return indexer;
105
+ }
106
+ async query(query) {
107
+ const { lunrQueryBuilder, documentTypes, pageSize } = this.translator(
108
+ query
109
+ );
110
+ const results = [];
111
+ const indexKeys = Object.keys(this.lunrIndices).filter(
112
+ (type) => !documentTypes || documentTypes.includes(type)
113
+ );
114
+ if (documentTypes?.length && !indexKeys.length) {
115
+ throw new errors.MissingIndexError(
116
+ `Missing index for ${documentTypes?.toString()}. This could be because the index hasn't been created yet or there was a problem during index creation.`
117
+ );
118
+ }
119
+ indexKeys.forEach((type) => {
120
+ try {
121
+ results.push(
122
+ ...this.lunrIndices[type].query(lunrQueryBuilder).map((result) => {
123
+ return {
124
+ result,
125
+ type
126
+ };
127
+ })
128
+ );
129
+ } catch (err) {
130
+ if (err instanceof Error && err.message.startsWith("unrecognised field")) {
131
+ return;
132
+ }
133
+ throw err;
134
+ }
135
+ });
136
+ results.sort((doc1, doc2) => {
137
+ return doc2.result.score - doc1.result.score;
138
+ });
139
+ const { page } = decodePageCursor(query.pageCursor);
140
+ const offset = page * pageSize;
141
+ const hasPreviousPage = page > 0;
142
+ const hasNextPage = results.length > offset + pageSize;
143
+ const nextPageCursor = hasNextPage ? encodePageCursor({ page: page + 1 }) : void 0;
144
+ const previousPageCursor = hasPreviousPage ? encodePageCursor({ page: page - 1 }) : void 0;
145
+ const realResultSet = {
146
+ results: results.slice(offset, offset + pageSize).map((d, index) => ({
147
+ type: d.type,
148
+ document: this.docStore[d.result.ref],
149
+ rank: page * pageSize + index + 1,
150
+ highlight: {
151
+ preTag: this.highlightPreTag,
152
+ postTag: this.highlightPostTag,
153
+ fields: parseHighlightFields({
154
+ preTag: this.highlightPreTag,
155
+ postTag: this.highlightPostTag,
156
+ doc: this.docStore[d.result.ref],
157
+ positionMetadata: d.result.matchData.metadata
158
+ })
159
+ }
160
+ })),
161
+ numberOfResults: results.length,
162
+ nextPageCursor,
163
+ previousPageCursor
164
+ };
165
+ return realResultSet;
166
+ }
167
+ }
168
+ function decodePageCursor(pageCursor) {
169
+ if (!pageCursor) {
170
+ return { page: 0 };
171
+ }
172
+ return {
173
+ page: Number(Buffer.from(pageCursor, "base64").toString("utf-8"))
174
+ };
175
+ }
176
+ function encodePageCursor({ page }) {
177
+ return Buffer.from(`${page}`, "utf-8").toString("base64");
178
+ }
179
+ function parseHighlightFields({
180
+ preTag,
181
+ postTag,
182
+ doc,
183
+ positionMetadata
184
+ }) {
185
+ const highlightFieldPositions = Object.values(positionMetadata).reduce(
186
+ (fieldPositions, metadata) => {
187
+ Object.keys(metadata).map((fieldKey) => {
188
+ const validFieldMetadataPositions = metadata[fieldKey]?.position?.filter((position) => Array.isArray(position));
189
+ if (validFieldMetadataPositions.length) {
190
+ fieldPositions[fieldKey] = fieldPositions[fieldKey] ?? [];
191
+ fieldPositions[fieldKey].push(...validFieldMetadataPositions);
192
+ }
193
+ });
194
+ return fieldPositions;
195
+ },
196
+ {}
197
+ );
198
+ return Object.fromEntries(
199
+ Object.entries(highlightFieldPositions).map(([field, positions]) => {
200
+ positions.sort((a, b) => b[0] - a[0]);
201
+ const highlightedField = positions.reduce((content, pos) => {
202
+ return `${String(content).substring(0, pos[0])}${preTag}${String(content).substring(pos[0], pos[0] + pos[1])}${postTag}${String(content).substring(pos[0] + pos[1])}`;
203
+ }, doc[field] ?? "");
204
+ return [field, highlightedField];
205
+ })
206
+ );
207
+ }
208
+
209
+ exports.LunrSearchEngine = LunrSearchEngine;
210
+ exports.decodePageCursor = decodePageCursor;
211
+ exports.encodePageCursor = encodePageCursor;
212
+ exports.parseHighlightFields = parseHighlightFields;
213
+ //# sourceMappingURL=LunrSearchEngine.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LunrSearchEngine.cjs.js","sources":["../../src/engines/LunrSearchEngine.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n IndexableDocument,\n IndexableResultSet,\n SearchQuery,\n} from '@backstage/plugin-search-common';\nimport { QueryTranslator, SearchEngine } from '../types';\nimport { MissingIndexError } from '../errors';\nimport lunr from 'lunr';\nimport { v4 as uuid } from 'uuid';\nimport { LunrSearchEngineIndexer } from './LunrSearchEngineIndexer';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Type of translated query for the Lunr Search Engine.\n * @public\n */\nexport type ConcreteLunrQuery = {\n lunrQueryBuilder: lunr.Index.QueryBuilder;\n documentTypes?: string[];\n pageSize: number;\n};\n\ntype LunrResultEnvelope = {\n result: lunr.Index.Result;\n type: string;\n};\n\n/**\n * Translator responsible for translating search term and filters to a query that the Lunr Search Engine understands.\n * @public\n */\nexport type LunrQueryTranslator = (query: SearchQuery) => ConcreteLunrQuery;\n\n/**\n * Lunr specific search engine implementation.\n * @public\n */\nexport class LunrSearchEngine implements SearchEngine {\n protected lunrIndices: Record<string, lunr.Index> = {};\n protected docStore: Record<string, IndexableDocument>;\n protected logger: LoggerService;\n protected highlightPreTag: string;\n protected highlightPostTag: string;\n\n constructor(options: { logger: LoggerService }) {\n this.logger = options.logger;\n this.docStore = {};\n const uuidTag = uuid();\n this.highlightPreTag = `<${uuidTag}>`;\n this.highlightPostTag = `</${uuidTag}>`;\n }\n\n protected translator: QueryTranslator = ({\n term,\n filters,\n types,\n pageLimit,\n }: SearchQuery): ConcreteLunrQuery => {\n const pageSize = pageLimit || 25;\n\n return {\n lunrQueryBuilder: q => {\n const termToken = lunr.tokenizer(term);\n\n // Support for typeahead search is based on https://github.com/olivernn/lunr.js/issues/256#issuecomment-295407852\n // look for an exact match and apply a large positive boost\n q.term(termToken, {\n usePipeline: true,\n boost: 100,\n });\n // look for terms that match the beginning of this term and apply a\n // medium boost\n q.term(termToken, {\n usePipeline: false,\n boost: 10,\n wildcard: lunr.Query.wildcard.TRAILING,\n });\n // look for terms that match with an edit distance of 2 and apply a\n // small boost\n q.term(termToken, {\n usePipeline: false,\n editDistance: 2,\n boost: 1,\n });\n\n if (filters) {\n Object.entries(filters).forEach(([field, fieldValue]) => {\n if (!q.allFields.includes(field)) {\n // Throw for unknown field, as this will be a non match\n throw new Error(`unrecognised field ${field}`);\n }\n // Arrays are poorly supported, but we can make it better for single-item arrays,\n // which should be a common case\n const value =\n Array.isArray(fieldValue) && fieldValue.length === 1\n ? fieldValue[0]\n : fieldValue;\n\n // Require that the given field has the given value\n if (['string', 'number', 'boolean'].includes(typeof value)) {\n q.term(\n lunr\n .tokenizer(value?.toString())\n .map(lunr.stopWordFilter)\n .filter(element => element !== undefined),\n {\n presence: lunr.Query.presence.REQUIRED,\n fields: [field],\n },\n );\n } else if (Array.isArray(value)) {\n // Illustrate how multi-value filters could work.\n // But warn that Lurn supports this poorly.\n this.logger.warn(\n `Non-scalar filter value used for field ${field}. Consider using a different Search Engine for better results.`,\n );\n q.term(lunr.tokenizer(value), {\n presence: lunr.Query.presence.OPTIONAL,\n fields: [field],\n });\n } else {\n // Log a warning or something about unknown filter value\n this.logger.warn(`Unknown filter type used on field ${field}`);\n }\n });\n }\n },\n documentTypes: types,\n pageSize,\n };\n };\n\n setTranslator(translator: LunrQueryTranslator) {\n this.translator = translator;\n }\n\n async getIndexer(type: string) {\n const indexer = new LunrSearchEngineIndexer();\n const indexerLogger = this.logger.child({ documentType: type });\n let errorThrown: Error | undefined;\n\n indexer.on('error', err => {\n errorThrown = err;\n });\n\n indexer.on('close', () => {\n // Once the stream is closed, build the index and store the documents in\n // memory for later retrieval.\n const newDocuments = indexer.getDocumentStore();\n const docStoreExists = this.lunrIndices[type] !== undefined;\n const documentsIndexed = Object.keys(newDocuments).length;\n\n // Do not set the index if there was an error or if no documents were\n // indexed. This ensures search continues to work for an index, even in\n // case of transient issues in underlying collators.\n if (!errorThrown && documentsIndexed > 0) {\n this.lunrIndices[type] = indexer.buildIndex();\n this.docStore = { ...this.docStore, ...newDocuments };\n } else {\n indexerLogger.warn(\n `Index for ${type} was not ${\n docStoreExists ? 'replaced' : 'created'\n }: ${\n errorThrown\n ? 'an error was encountered'\n : 'indexer received 0 documents'\n }`,\n );\n }\n });\n\n return indexer;\n }\n\n async query(query: SearchQuery): Promise<IndexableResultSet> {\n const { lunrQueryBuilder, documentTypes, pageSize } = this.translator(\n query,\n ) as ConcreteLunrQuery;\n\n const results: LunrResultEnvelope[] = [];\n\n const indexKeys = Object.keys(this.lunrIndices).filter(\n type => !documentTypes || documentTypes.includes(type),\n );\n\n if (documentTypes?.length && !indexKeys.length) {\n throw new MissingIndexError(\n `Missing index for ${documentTypes?.toString()}. This could be because the index hasn't been created yet or there was a problem during index creation.`,\n );\n }\n\n // Iterate over the filtered list of this.lunrIndex keys.\n indexKeys.forEach(type => {\n try {\n results.push(\n ...this.lunrIndices[type].query(lunrQueryBuilder).map(result => {\n return {\n result: result,\n type: type,\n };\n }),\n );\n } catch (err) {\n // if a field does not exist on a index, we can see that as a no-match\n if (\n err instanceof Error &&\n err.message.startsWith('unrecognised field')\n ) {\n return;\n }\n throw err;\n }\n });\n\n // Sort results.\n results.sort((doc1, doc2) => {\n return doc2.result.score - doc1.result.score;\n });\n\n // Perform paging\n const { page } = decodePageCursor(query.pageCursor);\n const offset = page * pageSize;\n const hasPreviousPage = page > 0;\n const hasNextPage = results.length > offset + pageSize;\n const nextPageCursor = hasNextPage\n ? encodePageCursor({ page: page + 1 })\n : undefined;\n const previousPageCursor = hasPreviousPage\n ? encodePageCursor({ page: page - 1 })\n : undefined;\n\n // Translate results into IndexableResultSet\n const realResultSet: IndexableResultSet = {\n results: results.slice(offset, offset + pageSize).map((d, index) => ({\n type: d.type,\n document: this.docStore[d.result.ref],\n rank: page * pageSize + index + 1,\n highlight: {\n preTag: this.highlightPreTag,\n postTag: this.highlightPostTag,\n fields: parseHighlightFields({\n preTag: this.highlightPreTag,\n postTag: this.highlightPostTag,\n doc: this.docStore[d.result.ref],\n positionMetadata: d.result.matchData.metadata as any,\n }),\n },\n })),\n numberOfResults: results.length,\n nextPageCursor,\n previousPageCursor,\n };\n\n return realResultSet;\n }\n}\n\nexport function decodePageCursor(pageCursor?: string): { page: number } {\n if (!pageCursor) {\n return { page: 0 };\n }\n\n return {\n page: Number(Buffer.from(pageCursor, 'base64').toString('utf-8')),\n };\n}\n\nexport function encodePageCursor({ page }: { page: number }): string {\n return Buffer.from(`${page}`, 'utf-8').toString('base64');\n}\n\ntype ParseHighlightFieldsProps = {\n preTag: string;\n postTag: string;\n doc: any;\n positionMetadata: {\n [term: string]: {\n [field: string]: {\n position: number[][];\n };\n };\n };\n};\n\nexport function parseHighlightFields({\n preTag,\n postTag,\n doc,\n positionMetadata,\n}: ParseHighlightFieldsProps): { [field: string]: string } {\n // Merge the field positions across all query terms\n const highlightFieldPositions = Object.values(positionMetadata).reduce(\n (fieldPositions, metadata) => {\n Object.keys(metadata).map(fieldKey => {\n const validFieldMetadataPositions = metadata[\n fieldKey\n ]?.position?.filter(position => Array.isArray(position));\n if (validFieldMetadataPositions.length) {\n fieldPositions[fieldKey] = fieldPositions[fieldKey] ?? [];\n fieldPositions[fieldKey].push(...validFieldMetadataPositions);\n }\n });\n\n return fieldPositions;\n },\n {} as { [field: string]: number[][] },\n );\n\n return Object.fromEntries(\n Object.entries(highlightFieldPositions).map(([field, positions]) => {\n positions.sort((a, b) => b[0] - a[0]);\n\n const highlightedField = positions.reduce((content, pos) => {\n return (\n `${String(content).substring(0, pos[0])}${preTag}` +\n `${String(content).substring(pos[0], pos[0] + pos[1])}` +\n `${postTag}${String(content).substring(pos[0] + pos[1])}`\n );\n }, doc[field] ?? '');\n\n return [field, highlightedField];\n }),\n );\n}\n"],"names":["uuid","lunr","LunrSearchEngineIndexer","MissingIndexError"],"mappings":";;;;;;;;;;;AAqDO,MAAM,gBAAyC,CAAA;AAAA,EAC1C,cAA0C,EAAC,CAAA;AAAA,EAC3C,QAAA,CAAA;AAAA,EACA,MAAA,CAAA;AAAA,EACA,eAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA,EAEV,YAAY,OAAoC,EAAA;AAC9C,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AACtB,IAAA,IAAA,CAAK,WAAW,EAAC,CAAA;AACjB,IAAA,MAAM,UAAUA,OAAK,EAAA,CAAA;AACrB,IAAK,IAAA,CAAA,eAAA,GAAkB,IAAI,OAAO,CAAA,CAAA,CAAA,CAAA;AAClC,IAAK,IAAA,CAAA,gBAAA,GAAmB,KAAK,OAAO,CAAA,CAAA,CAAA,CAAA;AAAA,GACtC;AAAA,EAEU,aAA8B,CAAC;AAAA,IACvC,IAAA;AAAA,IACA,OAAA;AAAA,IACA,KAAA;AAAA,IACA,SAAA;AAAA,GACoC,KAAA;AACpC,IAAA,MAAM,WAAW,SAAa,IAAA,EAAA,CAAA;AAE9B,IAAO,OAAA;AAAA,MACL,kBAAkB,CAAK,CAAA,KAAA;AACrB,QAAM,MAAA,SAAA,GAAYC,qBAAK,CAAA,SAAA,CAAU,IAAI,CAAA,CAAA;AAIrC,QAAA,CAAA,CAAE,KAAK,SAAW,EAAA;AAAA,UAChB,WAAa,EAAA,IAAA;AAAA,UACb,KAAO,EAAA,GAAA;AAAA,SACR,CAAA,CAAA;AAGD,QAAA,CAAA,CAAE,KAAK,SAAW,EAAA;AAAA,UAChB,WAAa,EAAA,KAAA;AAAA,UACb,KAAO,EAAA,EAAA;AAAA,UACP,QAAA,EAAUA,qBAAK,CAAA,KAAA,CAAM,QAAS,CAAA,QAAA;AAAA,SAC/B,CAAA,CAAA;AAGD,QAAA,CAAA,CAAE,KAAK,SAAW,EAAA;AAAA,UAChB,WAAa,EAAA,KAAA;AAAA,UACb,YAAc,EAAA,CAAA;AAAA,UACd,KAAO,EAAA,CAAA;AAAA,SACR,CAAA,CAAA;AAED,QAAA,IAAI,OAAS,EAAA;AACX,UAAO,MAAA,CAAA,OAAA,CAAQ,OAAO,CAAE,CAAA,OAAA,CAAQ,CAAC,CAAC,KAAA,EAAO,UAAU,CAAM,KAAA;AACvD,YAAA,IAAI,CAAC,CAAA,CAAE,SAAU,CAAA,QAAA,CAAS,KAAK,CAAG,EAAA;AAEhC,cAAA,MAAM,IAAI,KAAA,CAAM,CAAsB,mBAAA,EAAA,KAAK,CAAE,CAAA,CAAA,CAAA;AAAA,aAC/C;AAGA,YAAM,MAAA,KAAA,GACJ,KAAM,CAAA,OAAA,CAAQ,UAAU,CAAA,IAAK,WAAW,MAAW,KAAA,CAAA,GAC/C,UAAW,CAAA,CAAC,CACZ,GAAA,UAAA,CAAA;AAGN,YAAI,IAAA,CAAC,UAAU,QAAU,EAAA,SAAS,EAAE,QAAS,CAAA,OAAO,KAAK,CAAG,EAAA;AAC1D,cAAE,CAAA,CAAA,IAAA;AAAA,gBACAA,qBACG,CAAA,SAAA,CAAU,KAAO,EAAA,QAAA,EAAU,CAAA,CAC3B,GAAI,CAAAA,qBAAA,CAAK,cAAc,CAAA,CACvB,MAAO,CAAA,CAAA,OAAA,KAAW,YAAY,KAAS,CAAA,CAAA;AAAA,gBAC1C;AAAA,kBACE,QAAA,EAAUA,qBAAK,CAAA,KAAA,CAAM,QAAS,CAAA,QAAA;AAAA,kBAC9B,MAAA,EAAQ,CAAC,KAAK,CAAA;AAAA,iBAChB;AAAA,eACF,CAAA;AAAA,aACS,MAAA,IAAA,KAAA,CAAM,OAAQ,CAAA,KAAK,CAAG,EAAA;AAG/B,cAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,gBACV,0CAA0C,KAAK,CAAA,8DAAA,CAAA;AAAA,eACjD,CAAA;AACA,cAAA,CAAA,CAAE,IAAK,CAAAA,qBAAA,CAAK,SAAU,CAAA,KAAK,CAAG,EAAA;AAAA,gBAC5B,QAAA,EAAUA,qBAAK,CAAA,KAAA,CAAM,QAAS,CAAA,QAAA;AAAA,gBAC9B,MAAA,EAAQ,CAAC,KAAK,CAAA;AAAA,eACf,CAAA,CAAA;AAAA,aACI,MAAA;AAEL,cAAA,IAAA,CAAK,MAAO,CAAA,IAAA,CAAK,CAAqC,kCAAA,EAAA,KAAK,CAAE,CAAA,CAAA,CAAA;AAAA,aAC/D;AAAA,WACD,CAAA,CAAA;AAAA,SACH;AAAA,OACF;AAAA,MACA,aAAe,EAAA,KAAA;AAAA,MACf,QAAA;AAAA,KACF,CAAA;AAAA,GACF,CAAA;AAAA,EAEA,cAAc,UAAiC,EAAA;AAC7C,IAAA,IAAA,CAAK,UAAa,GAAA,UAAA,CAAA;AAAA,GACpB;AAAA,EAEA,MAAM,WAAW,IAAc,EAAA;AAC7B,IAAM,MAAA,OAAA,GAAU,IAAIC,+CAAwB,EAAA,CAAA;AAC5C,IAAA,MAAM,gBAAgB,IAAK,CAAA,MAAA,CAAO,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA,CAAA;AAC9D,IAAI,IAAA,WAAA,CAAA;AAEJ,IAAQ,OAAA,CAAA,EAAA,CAAG,SAAS,CAAO,GAAA,KAAA;AACzB,MAAc,WAAA,GAAA,GAAA,CAAA;AAAA,KACf,CAAA,CAAA;AAED,IAAQ,OAAA,CAAA,EAAA,CAAG,SAAS,MAAM;AAGxB,MAAM,MAAA,YAAA,GAAe,QAAQ,gBAAiB,EAAA,CAAA;AAC9C,MAAA,MAAM,cAAiB,GAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAAM,KAAA,KAAA,CAAA,CAAA;AAClD,MAAA,MAAM,gBAAmB,GAAA,MAAA,CAAO,IAAK,CAAA,YAAY,CAAE,CAAA,MAAA,CAAA;AAKnD,MAAI,IAAA,CAAC,WAAe,IAAA,gBAAA,GAAmB,CAAG,EAAA;AACxC,QAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAAI,GAAA,OAAA,CAAQ,UAAW,EAAA,CAAA;AAC5C,QAAA,IAAA,CAAK,WAAW,EAAE,GAAG,IAAK,CAAA,QAAA,EAAU,GAAG,YAAa,EAAA,CAAA;AAAA,OAC/C,MAAA;AACL,QAAc,aAAA,CAAA,IAAA;AAAA,UACZ,CAAA,UAAA,EAAa,IAAI,CACf,SAAA,EAAA,cAAA,GAAiB,aAAa,SAChC,CAAA,EAAA,EACE,WACI,GAAA,0BAAA,GACA,8BACN,CAAA,CAAA;AAAA,SACF,CAAA;AAAA,OACF;AAAA,KACD,CAAA,CAAA;AAED,IAAO,OAAA,OAAA,CAAA;AAAA,GACT;AAAA,EAEA,MAAM,MAAM,KAAiD,EAAA;AAC3D,IAAA,MAAM,EAAE,gBAAA,EAAkB,aAAe,EAAA,QAAA,KAAa,IAAK,CAAA,UAAA;AAAA,MACzD,KAAA;AAAA,KACF,CAAA;AAEA,IAAA,MAAM,UAAgC,EAAC,CAAA;AAEvC,IAAA,MAAM,SAAY,GAAA,MAAA,CAAO,IAAK,CAAA,IAAA,CAAK,WAAW,CAAE,CAAA,MAAA;AAAA,MAC9C,CAAQ,IAAA,KAAA,CAAC,aAAiB,IAAA,aAAA,CAAc,SAAS,IAAI,CAAA;AAAA,KACvD,CAAA;AAEA,IAAA,IAAI,aAAe,EAAA,MAAA,IAAU,CAAC,SAAA,CAAU,MAAQ,EAAA;AAC9C,MAAA,MAAM,IAAIC,wBAAA;AAAA,QACR,CAAA,kBAAA,EAAqB,aAAe,EAAA,QAAA,EAAU,CAAA,uGAAA,CAAA;AAAA,OAChD,CAAA;AAAA,KACF;AAGA,IAAA,SAAA,CAAU,QAAQ,CAAQ,IAAA,KAAA;AACxB,MAAI,IAAA;AACF,QAAQ,OAAA,CAAA,IAAA;AAAA,UACN,GAAG,KAAK,WAAY,CAAA,IAAI,EAAE,KAAM,CAAA,gBAAgB,CAAE,CAAA,GAAA,CAAI,CAAU,MAAA,KAAA;AAC9D,YAAO,OAAA;AAAA,cACL,MAAA;AAAA,cACA,IAAA;AAAA,aACF,CAAA;AAAA,WACD,CAAA;AAAA,SACH,CAAA;AAAA,eACO,GAAK,EAAA;AAEZ,QAAA,IACE,eAAe,KACf,IAAA,GAAA,CAAI,OAAQ,CAAA,UAAA,CAAW,oBAAoB,CAC3C,EAAA;AACA,UAAA,OAAA;AAAA,SACF;AACA,QAAM,MAAA,GAAA,CAAA;AAAA,OACR;AAAA,KACD,CAAA,CAAA;AAGD,IAAQ,OAAA,CAAA,IAAA,CAAK,CAAC,IAAA,EAAM,IAAS,KAAA;AAC3B,MAAA,OAAO,IAAK,CAAA,MAAA,CAAO,KAAQ,GAAA,IAAA,CAAK,MAAO,CAAA,KAAA,CAAA;AAAA,KACxC,CAAA,CAAA;AAGD,IAAA,MAAM,EAAE,IAAA,EAAS,GAAA,gBAAA,CAAiB,MAAM,UAAU,CAAA,CAAA;AAClD,IAAA,MAAM,SAAS,IAAO,GAAA,QAAA,CAAA;AACtB,IAAA,MAAM,kBAAkB,IAAO,GAAA,CAAA,CAAA;AAC/B,IAAM,MAAA,WAAA,GAAc,OAAQ,CAAA,MAAA,GAAS,MAAS,GAAA,QAAA,CAAA;AAC9C,IAAM,MAAA,cAAA,GAAiB,cACnB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AACJ,IAAM,MAAA,kBAAA,GAAqB,kBACvB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AAGJ,IAAA,MAAM,aAAoC,GAAA;AAAA,MACxC,OAAA,EAAS,OAAQ,CAAA,KAAA,CAAM,MAAQ,EAAA,MAAA,GAAS,QAAQ,CAAE,CAAA,GAAA,CAAI,CAAC,CAAA,EAAG,KAAW,MAAA;AAAA,QACnE,MAAM,CAAE,CAAA,IAAA;AAAA,QACR,QAAU,EAAA,IAAA,CAAK,QAAS,CAAA,CAAA,CAAE,OAAO,GAAG,CAAA;AAAA,QACpC,IAAA,EAAM,IAAO,GAAA,QAAA,GAAW,KAAQ,GAAA,CAAA;AAAA,QAChC,SAAW,EAAA;AAAA,UACT,QAAQ,IAAK,CAAA,eAAA;AAAA,UACb,SAAS,IAAK,CAAA,gBAAA;AAAA,UACd,QAAQ,oBAAqB,CAAA;AAAA,YAC3B,QAAQ,IAAK,CAAA,eAAA;AAAA,YACb,SAAS,IAAK,CAAA,gBAAA;AAAA,YACd,GAAK,EAAA,IAAA,CAAK,QAAS,CAAA,CAAA,CAAE,OAAO,GAAG,CAAA;AAAA,YAC/B,gBAAA,EAAkB,CAAE,CAAA,MAAA,CAAO,SAAU,CAAA,QAAA;AAAA,WACtC,CAAA;AAAA,SACH;AAAA,OACA,CAAA,CAAA;AAAA,MACF,iBAAiB,OAAQ,CAAA,MAAA;AAAA,MACzB,cAAA;AAAA,MACA,kBAAA;AAAA,KACF,CAAA;AAEA,IAAO,OAAA,aAAA,CAAA;AAAA,GACT;AACF,CAAA;AAEO,SAAS,iBAAiB,UAAuC,EAAA;AACtE,EAAA,IAAI,CAAC,UAAY,EAAA;AACf,IAAO,OAAA,EAAE,MAAM,CAAE,EAAA,CAAA;AAAA,GACnB;AAEA,EAAO,OAAA;AAAA,IACL,IAAA,EAAM,OAAO,MAAO,CAAA,IAAA,CAAK,YAAY,QAAQ,CAAA,CAAE,QAAS,CAAA,OAAO,CAAC,CAAA;AAAA,GAClE,CAAA;AACF,CAAA;AAEgB,SAAA,gBAAA,CAAiB,EAAE,IAAA,EAAkC,EAAA;AACnE,EAAO,OAAA,MAAA,CAAO,KAAK,CAAG,EAAA,IAAI,IAAI,OAAO,CAAA,CAAE,SAAS,QAAQ,CAAA,CAAA;AAC1D,CAAA;AAeO,SAAS,oBAAqB,CAAA;AAAA,EACnC,MAAA;AAAA,EACA,OAAA;AAAA,EACA,GAAA;AAAA,EACA,gBAAA;AACF,CAA2D,EAAA;AAEzD,EAAA,MAAM,uBAA0B,GAAA,MAAA,CAAO,MAAO,CAAA,gBAAgB,CAAE,CAAA,MAAA;AAAA,IAC9D,CAAC,gBAAgB,QAAa,KAAA;AAC5B,MAAA,MAAA,CAAO,IAAK,CAAA,QAAQ,CAAE,CAAA,GAAA,CAAI,CAAY,QAAA,KAAA;AACpC,QAAM,MAAA,2BAAA,GAA8B,QAClC,CAAA,QACF,CAAG,EAAA,QAAA,EAAU,OAAO,CAAY,QAAA,KAAA,KAAA,CAAM,OAAQ,CAAA,QAAQ,CAAC,CAAA,CAAA;AACvD,QAAA,IAAI,4BAA4B,MAAQ,EAAA;AACtC,UAAA,cAAA,CAAe,QAAQ,CAAA,GAAI,cAAe,CAAA,QAAQ,KAAK,EAAC,CAAA;AACxD,UAAA,cAAA,CAAe,QAAQ,CAAA,CAAE,IAAK,CAAA,GAAG,2BAA2B,CAAA,CAAA;AAAA,SAC9D;AAAA,OACD,CAAA,CAAA;AAED,MAAO,OAAA,cAAA,CAAA;AAAA,KACT;AAAA,IACA,EAAC;AAAA,GACH,CAAA;AAEA,EAAA,OAAO,MAAO,CAAA,WAAA;AAAA,IACZ,MAAA,CAAO,QAAQ,uBAAuB,CAAA,CAAE,IAAI,CAAC,CAAC,KAAO,EAAA,SAAS,CAAM,KAAA;AAClE,MAAU,SAAA,CAAA,IAAA,CAAK,CAAC,CAAG,EAAA,CAAA,KAAM,EAAE,CAAC,CAAA,GAAI,CAAE,CAAA,CAAC,CAAC,CAAA,CAAA;AAEpC,MAAA,MAAM,gBAAmB,GAAA,SAAA,CAAU,MAAO,CAAA,CAAC,SAAS,GAAQ,KAAA;AAC1D,QAAA,OACE,GAAG,MAAO,CAAA,OAAO,EAAE,SAAU,CAAA,CAAA,EAAG,IAAI,CAAC,CAAC,CAAC,CAAA,EAAG,MAAM,CAC7C,EAAA,MAAA,CAAO,OAAO,CAAE,CAAA,SAAA,CAAU,IAAI,CAAC,CAAA,EAAG,GAAI,CAAA,CAAC,IAAI,GAAI,CAAA,CAAC,CAAC,CAAC,CAAA,EAClD,OAAO,CAAG,EAAA,MAAA,CAAO,OAAO,CAAA,CAAE,UAAU,GAAI,CAAA,CAAC,IAAI,GAAI,CAAA,CAAC,CAAC,CAAC,CAAA,CAAA,CAAA;AAAA,OAExD,EAAA,GAAA,CAAI,KAAK,CAAA,IAAK,EAAE,CAAA,CAAA;AAEnB,MAAO,OAAA,CAAC,OAAO,gBAAgB,CAAA,CAAA;AAAA,KAChC,CAAA;AAAA,GACH,CAAA;AACF;;;;;;;"}
@@ -0,0 +1,50 @@
1
+ 'use strict';
2
+
3
+ var lunr = require('lunr');
4
+ var BatchSearchEngineIndexer = require('../indexing/BatchSearchEngineIndexer.cjs.js');
5
+ require('@backstage/errors');
6
+ require('stream');
7
+
8
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
9
+
10
+ var lunr__default = /*#__PURE__*/_interopDefaultCompat(lunr);
11
+
12
+ class LunrSearchEngineIndexer extends BatchSearchEngineIndexer.BatchSearchEngineIndexer {
13
+ schemaInitialized = false;
14
+ builder;
15
+ docStore = {};
16
+ constructor() {
17
+ super({ batchSize: 1e3 });
18
+ this.builder = new lunr__default.default.Builder();
19
+ this.builder.pipeline.add(lunr__default.default.trimmer, lunr__default.default.stopWordFilter, lunr__default.default.stemmer);
20
+ this.builder.searchPipeline.add(lunr__default.default.stemmer);
21
+ this.builder.metadataWhitelist = ["position"];
22
+ }
23
+ // No async initialization required.
24
+ async initialize() {
25
+ }
26
+ async finalize() {
27
+ }
28
+ async index(documents) {
29
+ if (!this.schemaInitialized) {
30
+ Object.keys(documents[0]).forEach((field) => {
31
+ this.builder.field(field);
32
+ });
33
+ this.builder.ref("location");
34
+ this.schemaInitialized = true;
35
+ }
36
+ documents.forEach((document) => {
37
+ this.builder.add(document);
38
+ this.docStore[document.location] = document;
39
+ });
40
+ }
41
+ buildIndex() {
42
+ return this.builder.build();
43
+ }
44
+ getDocumentStore() {
45
+ return this.docStore;
46
+ }
47
+ }
48
+
49
+ exports.LunrSearchEngineIndexer = LunrSearchEngineIndexer;
50
+ //# sourceMappingURL=LunrSearchEngineIndexer.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LunrSearchEngineIndexer.cjs.js","sources":["../../src/engines/LunrSearchEngineIndexer.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport lunr from 'lunr';\nimport { BatchSearchEngineIndexer } from '../indexing';\n\n/**\n * Lunr specific search engine indexer\n * @public\n */\nexport class LunrSearchEngineIndexer extends BatchSearchEngineIndexer {\n private schemaInitialized = false;\n private builder: lunr.Builder;\n private docStore: Record<string, IndexableDocument> = {};\n\n constructor() {\n super({ batchSize: 1000 });\n\n this.builder = new lunr.Builder();\n this.builder.pipeline.add(lunr.trimmer, lunr.stopWordFilter, lunr.stemmer);\n this.builder.searchPipeline.add(lunr.stemmer);\n this.builder.metadataWhitelist = ['position'];\n }\n\n // No async initialization required.\n async initialize(): Promise<void> {}\n async finalize(): Promise<void> {}\n\n async index(documents: IndexableDocument[]): Promise<void> {\n if (!this.schemaInitialized) {\n // Make this lunr index aware of all relevant fields.\n Object.keys(documents[0]).forEach(field => {\n this.builder.field(field);\n });\n\n // Set \"location\" field as reference field\n this.builder.ref('location');\n\n this.schemaInitialized = true;\n }\n\n documents.forEach(document => {\n // Add document to Lunar index\n this.builder.add(document);\n\n // Store documents in memory to be able to look up document using the ref during query time\n // This is not how you should implement your SearchEngine implementation! Do not copy!\n this.docStore[document.location] = document;\n });\n }\n\n buildIndex() {\n return this.builder.build();\n }\n\n getDocumentStore() {\n return this.docStore;\n }\n}\n"],"names":["BatchSearchEngineIndexer","lunr"],"mappings":";;;;;;;;;;;AAwBO,MAAM,gCAAgCA,iDAAyB,CAAA;AAAA,EAC5D,iBAAoB,GAAA,KAAA,CAAA;AAAA,EACpB,OAAA,CAAA;AAAA,EACA,WAA8C,EAAC,CAAA;AAAA,EAEvD,WAAc,GAAA;AACZ,IAAM,KAAA,CAAA,EAAE,SAAW,EAAA,GAAA,EAAM,CAAA,CAAA;AAEzB,IAAK,IAAA,CAAA,OAAA,GAAU,IAAIC,qBAAA,CAAK,OAAQ,EAAA,CAAA;AAChC,IAAK,IAAA,CAAA,OAAA,CAAQ,SAAS,GAAI,CAAAA,qBAAA,CAAK,SAASA,qBAAK,CAAA,cAAA,EAAgBA,sBAAK,OAAO,CAAA,CAAA;AACzE,IAAA,IAAA,CAAK,OAAQ,CAAA,cAAA,CAAe,GAAI,CAAAA,qBAAA,CAAK,OAAO,CAAA,CAAA;AAC5C,IAAK,IAAA,CAAA,OAAA,CAAQ,iBAAoB,GAAA,CAAC,UAAU,CAAA,CAAA;AAAA,GAC9C;AAAA;AAAA,EAGA,MAAM,UAA4B,GAAA;AAAA,GAAC;AAAA,EACnC,MAAM,QAA0B,GAAA;AAAA,GAAC;AAAA,EAEjC,MAAM,MAAM,SAA+C,EAAA;AACzD,IAAI,IAAA,CAAC,KAAK,iBAAmB,EAAA;AAE3B,MAAA,MAAA,CAAO,KAAK,SAAU,CAAA,CAAC,CAAC,CAAA,CAAE,QAAQ,CAAS,KAAA,KAAA;AACzC,QAAK,IAAA,CAAA,OAAA,CAAQ,MAAM,KAAK,CAAA,CAAA;AAAA,OACzB,CAAA,CAAA;AAGD,MAAK,IAAA,CAAA,OAAA,CAAQ,IAAI,UAAU,CAAA,CAAA;AAE3B,MAAA,IAAA,CAAK,iBAAoB,GAAA,IAAA,CAAA;AAAA,KAC3B;AAEA,IAAA,SAAA,CAAU,QAAQ,CAAY,QAAA,KAAA;AAE5B,MAAK,IAAA,CAAA,OAAA,CAAQ,IAAI,QAAQ,CAAA,CAAA;AAIzB,MAAK,IAAA,CAAA,QAAA,CAAS,QAAS,CAAA,QAAQ,CAAI,GAAA,QAAA,CAAA;AAAA,KACpC,CAAA,CAAA;AAAA,GACH;AAAA,EAEA,UAAa,GAAA;AACX,IAAO,OAAA,IAAA,CAAK,QAAQ,KAAM,EAAA,CAAA;AAAA,GAC5B;AAAA,EAEA,gBAAmB,GAAA;AACjB,IAAA,OAAO,IAAK,CAAA,QAAA,CAAA;AAAA,GACd;AACF;;;;"}