@backstage/plugin-search-backend-module-pg 0.5.36-next.0 → 0.5.36-next.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # @backstage/plugin-search-backend-module-pg
2
2
 
3
+ ## 0.5.36-next.2
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies
8
+ - @backstage/backend-plugin-api@1.0.1-next.1
9
+ - @backstage/config@1.2.0
10
+ - @backstage/plugin-search-backend-node@1.3.3-next.2
11
+ - @backstage/plugin-search-common@1.2.14
12
+
13
+ ## 0.5.36-next.1
14
+
15
+ ### Patch Changes
16
+
17
+ - Updated dependencies
18
+ - @backstage/backend-plugin-api@1.0.1-next.0
19
+ - @backstage/config@1.2.0
20
+ - @backstage/plugin-search-backend-node@1.3.3-next.1
21
+ - @backstage/plugin-search-common@1.2.14
22
+
3
23
  ## 0.5.36-next.0
4
24
 
5
25
  ### Patch Changes
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-search-backend-module-pg__alpha",
3
- "version": "0.5.36-next.0",
3
+ "version": "0.5.36-next.2",
4
4
  "main": "../dist/alpha.cjs.js",
5
5
  "types": "../dist/alpha.d.ts"
6
6
  }
@@ -0,0 +1,131 @@
1
+ 'use strict';
2
+
3
+ var PgSearchEngineIndexer = require('./PgSearchEngineIndexer.cjs.js');
4
+ var DatabaseDocumentStore = require('../database/DatabaseDocumentStore.cjs.js');
5
+ var uuid = require('uuid');
6
+
7
+ class PgSearchEngine {
8
+ /**
9
+ * @deprecated This will be marked as private in a future release, please us fromConfig instead
10
+ */
11
+ constructor(databaseStore, config, logger) {
12
+ this.databaseStore = databaseStore;
13
+ const uuidTag = uuid.v4();
14
+ const highlightConfig = config.getOptionalConfig(
15
+ "search.pg.highlightOptions"
16
+ );
17
+ const highlightOptions = {
18
+ preTag: `<${uuidTag}>`,
19
+ postTag: `</${uuidTag}>`,
20
+ useHighlight: highlightConfig?.getOptionalBoolean("useHighlight") ?? true,
21
+ maxWords: highlightConfig?.getOptionalNumber("maxWords") ?? 35,
22
+ minWords: highlightConfig?.getOptionalNumber("minWords") ?? 15,
23
+ shortWord: highlightConfig?.getOptionalNumber("shortWord") ?? 3,
24
+ highlightAll: highlightConfig?.getOptionalBoolean("highlightAll") ?? false,
25
+ maxFragments: highlightConfig?.getOptionalNumber("maxFragments") ?? 0,
26
+ fragmentDelimiter: highlightConfig?.getOptionalString("fragmentDelimiter") ?? " ... "
27
+ };
28
+ this.highlightOptions = highlightOptions;
29
+ this.indexerBatchSize = config.getOptionalNumber("search.pg.indexerBatchSize") ?? 1e3;
30
+ this.logger = logger;
31
+ }
32
+ logger;
33
+ highlightOptions;
34
+ indexerBatchSize;
35
+ /**
36
+ * @deprecated This will be removed in a future release, please use fromConfig instead
37
+ */
38
+ static async from(options) {
39
+ return new PgSearchEngine(
40
+ await DatabaseDocumentStore.DatabaseDocumentStore.create(options.database),
41
+ options.config,
42
+ options.logger
43
+ );
44
+ }
45
+ static async fromConfig(config, options) {
46
+ return new PgSearchEngine(
47
+ await DatabaseDocumentStore.DatabaseDocumentStore.create(options.database),
48
+ config,
49
+ options.logger
50
+ );
51
+ }
52
+ static async supported(database) {
53
+ return await DatabaseDocumentStore.DatabaseDocumentStore.supported(await database.getClient());
54
+ }
55
+ translator(query, options) {
56
+ const pageSize = query.pageLimit || 25;
57
+ const { page } = decodePageCursor(query.pageCursor);
58
+ const offset = page * pageSize;
59
+ const limit = pageSize + 1;
60
+ return {
61
+ pgQuery: {
62
+ pgTerm: query.term.split(/\s/).map((p) => p.replace(/[\0()|&:*!]/g, "").trim()).filter((p) => p !== "").map((p) => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`).join("&"),
63
+ fields: query.filters,
64
+ types: query.types,
65
+ offset,
66
+ limit,
67
+ options: options.highlightOptions
68
+ },
69
+ pageSize
70
+ };
71
+ }
72
+ setTranslator(translator) {
73
+ this.translator = translator;
74
+ }
75
+ async getIndexer(type) {
76
+ return new PgSearchEngineIndexer.PgSearchEngineIndexer({
77
+ batchSize: this.indexerBatchSize,
78
+ type,
79
+ databaseStore: this.databaseStore,
80
+ logger: this.logger?.child({ documentType: type })
81
+ });
82
+ }
83
+ async query(query) {
84
+ const { pgQuery, pageSize } = this.translator(query, {
85
+ highlightOptions: this.highlightOptions
86
+ });
87
+ const rows = await this.databaseStore.transaction(
88
+ async (tx) => this.databaseStore.query(tx, pgQuery)
89
+ );
90
+ const { page } = decodePageCursor(query.pageCursor);
91
+ const hasNextPage = rows.length > pageSize;
92
+ const hasPreviousPage = page > 0;
93
+ const pageRows = rows.slice(0, pageSize);
94
+ const nextPageCursor = hasNextPage ? encodePageCursor({ page: page + 1 }) : void 0;
95
+ const previousPageCursor = hasPreviousPage ? encodePageCursor({ page: page - 1 }) : void 0;
96
+ const results = pageRows.map(
97
+ ({ type, document, highlight }, index) => ({
98
+ type,
99
+ document,
100
+ rank: page * pageSize + index + 1,
101
+ highlight: {
102
+ preTag: pgQuery.options.preTag,
103
+ postTag: pgQuery.options.postTag,
104
+ fields: highlight ? {
105
+ text: highlight.text,
106
+ title: highlight.title,
107
+ location: highlight.location,
108
+ path: ""
109
+ } : {}
110
+ }
111
+ })
112
+ );
113
+ return { results, nextPageCursor, previousPageCursor };
114
+ }
115
+ }
116
+ function decodePageCursor(pageCursor) {
117
+ if (!pageCursor) {
118
+ return { page: 0 };
119
+ }
120
+ return {
121
+ page: Number(Buffer.from(pageCursor, "base64").toString("utf-8"))
122
+ };
123
+ }
124
+ function encodePageCursor({ page }) {
125
+ return Buffer.from(`${page}`, "utf-8").toString("base64");
126
+ }
127
+
128
+ exports.PgSearchEngine = PgSearchEngine;
129
+ exports.decodePageCursor = decodePageCursor;
130
+ exports.encodePageCursor = encodePageCursor;
131
+ //# sourceMappingURL=PgSearchEngine.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"PgSearchEngine.cjs.js","sources":["../../src/PgSearchEngine/PgSearchEngine.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SearchEngine } from '@backstage/plugin-search-backend-node';\nimport {\n SearchQuery,\n IndexableResultSet,\n IndexableResult,\n} from '@backstage/plugin-search-common';\nimport { PgSearchEngineIndexer } from './PgSearchEngineIndexer';\nimport {\n DatabaseDocumentStore,\n DatabaseStore,\n PgSearchQuery,\n} from '../database';\nimport { v4 as uuid } from 'uuid';\nimport { Config } from '@backstage/config';\nimport { DatabaseService, LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Search query that the Postgres search engine understands.\n * @public\n */\nexport type ConcretePgSearchQuery = {\n pgQuery: PgSearchQuery;\n pageSize: number;\n};\n\n/**\n * Options available for the Postgres specific query translator.\n * @public\n */\nexport type PgSearchQueryTranslatorOptions = {\n highlightOptions: PgSearchHighlightOptions;\n};\n\n/**\n * Postgres specific query translator.\n * @public\n */\nexport type PgSearchQueryTranslator = (\n query: SearchQuery,\n options: PgSearchQueryTranslatorOptions,\n) => ConcretePgSearchQuery;\n\n/**\n * Options to instantiate PgSearchEngine\n * @public\n */\nexport type PgSearchOptions = {\n database: DatabaseService;\n logger?: LoggerService;\n};\n\n/**\n * Options for highlighting search terms\n * @public\n */\nexport type PgSearchHighlightOptions = {\n useHighlight?: boolean;\n maxWords?: number;\n minWords?: number;\n shortWord?: number;\n highlightAll?: boolean;\n maxFragments?: number;\n fragmentDelimiter?: string;\n preTag: string;\n postTag: string;\n};\n\n/** @public */\nexport class PgSearchEngine implements SearchEngine {\n private readonly logger?: LoggerService;\n private readonly highlightOptions: PgSearchHighlightOptions;\n private readonly indexerBatchSize: number;\n\n /**\n * @deprecated This will be marked as private in a future release, please us fromConfig instead\n */\n constructor(\n private readonly databaseStore: DatabaseStore,\n config: Config,\n logger?: LoggerService,\n ) {\n const uuidTag = uuid();\n const highlightConfig = config.getOptionalConfig(\n 'search.pg.highlightOptions',\n );\n\n const highlightOptions: PgSearchHighlightOptions = {\n preTag: `<${uuidTag}>`,\n postTag: `</${uuidTag}>`,\n useHighlight: highlightConfig?.getOptionalBoolean('useHighlight') ?? true,\n maxWords: highlightConfig?.getOptionalNumber('maxWords') ?? 35,\n minWords: highlightConfig?.getOptionalNumber('minWords') ?? 15,\n shortWord: highlightConfig?.getOptionalNumber('shortWord') ?? 3,\n highlightAll:\n highlightConfig?.getOptionalBoolean('highlightAll') ?? false,\n maxFragments: highlightConfig?.getOptionalNumber('maxFragments') ?? 0,\n fragmentDelimiter:\n highlightConfig?.getOptionalString('fragmentDelimiter') ?? ' ... ',\n };\n this.highlightOptions = highlightOptions;\n this.indexerBatchSize =\n config.getOptionalNumber('search.pg.indexerBatchSize') ?? 1000;\n this.logger = logger;\n }\n\n /**\n * @deprecated This will be removed in a future release, please use fromConfig instead\n */\n static async from(options: {\n database: DatabaseService;\n config: Config;\n logger?: LoggerService;\n }): Promise<PgSearchEngine> {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(options.database),\n options.config,\n options.logger,\n );\n }\n\n static async fromConfig(config: Config, options: PgSearchOptions) {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(options.database),\n config,\n options.logger,\n );\n }\n\n static async supported(database: DatabaseService): Promise<boolean> {\n return await DatabaseDocumentStore.supported(await database.getClient());\n }\n\n translator(\n query: SearchQuery,\n options: PgSearchQueryTranslatorOptions,\n ): ConcretePgSearchQuery {\n const pageSize = query.pageLimit || 25;\n const { page } = decodePageCursor(query.pageCursor);\n const offset = page * pageSize;\n // We request more result to know whether there is another page\n const limit = pageSize + 1;\n\n return {\n pgQuery: {\n pgTerm: query.term\n .split(/\\s/)\n .map(p => p.replace(/[\\0()|&:*!]/g, '').trim())\n .filter(p => p !== '')\n .map(p => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`)\n .join('&'),\n fields: query.filters as Record<string, string | string[]>,\n types: query.types,\n offset,\n limit,\n options: options.highlightOptions,\n },\n pageSize,\n };\n }\n\n setTranslator(translator: PgSearchQueryTranslator) {\n this.translator = translator;\n }\n\n async getIndexer(type: string) {\n return new PgSearchEngineIndexer({\n batchSize: this.indexerBatchSize,\n type,\n databaseStore: this.databaseStore,\n logger: this.logger?.child({ documentType: type }),\n });\n }\n\n async query(query: SearchQuery): Promise<IndexableResultSet> {\n const { pgQuery, pageSize } = this.translator(query, {\n highlightOptions: this.highlightOptions,\n });\n\n const rows = await this.databaseStore.transaction(async tx =>\n this.databaseStore.query(tx, pgQuery),\n );\n\n // We requested one result more than the page size to know whether there is\n // another page.\n const { page } = decodePageCursor(query.pageCursor);\n const hasNextPage = rows.length > pageSize;\n const hasPreviousPage = page > 0;\n const pageRows = rows.slice(0, pageSize);\n const nextPageCursor = hasNextPage\n ? encodePageCursor({ page: page + 1 })\n : undefined;\n const previousPageCursor = hasPreviousPage\n ? encodePageCursor({ page: page - 1 })\n : undefined;\n\n const results = pageRows.map(\n ({ type, document, highlight }, index): IndexableResult => ({\n type,\n document,\n rank: page * pageSize + index + 1,\n highlight: {\n preTag: pgQuery.options.preTag,\n postTag: pgQuery.options.postTag,\n fields: highlight\n ? {\n text: highlight.text,\n title: highlight.title,\n location: highlight.location,\n path: '',\n }\n : {},\n },\n }),\n );\n\n return { results, nextPageCursor, previousPageCursor };\n }\n}\n\nexport function decodePageCursor(pageCursor?: string): { page: number } {\n if (!pageCursor) {\n return { page: 0 };\n }\n\n return {\n page: Number(Buffer.from(pageCursor, 'base64').toString('utf-8')),\n };\n}\n\nexport function encodePageCursor({ page }: { page: number }): string {\n return Buffer.from(`${page}`, 'utf-8').toString('base64');\n}\n"],"names":["uuid","DatabaseDocumentStore","PgSearchEngineIndexer"],"mappings":";;;;;;AAoFO,MAAM,cAAuC,CAAA;AAAA;AAAA;AAAA;AAAA,EAQlD,WAAA,CACmB,aACjB,EAAA,MAAA,EACA,MACA,EAAA;AAHiB,IAAA,IAAA,CAAA,aAAA,GAAA,aAAA,CAAA;AAIjB,IAAA,MAAM,UAAUA,OAAK,EAAA,CAAA;AACrB,IAAA,MAAM,kBAAkB,MAAO,CAAA,iBAAA;AAAA,MAC7B,4BAAA;AAAA,KACF,CAAA;AAEA,IAAA,MAAM,gBAA6C,GAAA;AAAA,MACjD,MAAA,EAAQ,IAAI,OAAO,CAAA,CAAA,CAAA;AAAA,MACnB,OAAA,EAAS,KAAK,OAAO,CAAA,CAAA,CAAA;AAAA,MACrB,YAAc,EAAA,eAAA,EAAiB,kBAAmB,CAAA,cAAc,CAAK,IAAA,IAAA;AAAA,MACrE,QAAU,EAAA,eAAA,EAAiB,iBAAkB,CAAA,UAAU,CAAK,IAAA,EAAA;AAAA,MAC5D,QAAU,EAAA,eAAA,EAAiB,iBAAkB,CAAA,UAAU,CAAK,IAAA,EAAA;AAAA,MAC5D,SAAW,EAAA,eAAA,EAAiB,iBAAkB,CAAA,WAAW,CAAK,IAAA,CAAA;AAAA,MAC9D,YACE,EAAA,eAAA,EAAiB,kBAAmB,CAAA,cAAc,CAAK,IAAA,KAAA;AAAA,MACzD,YAAc,EAAA,eAAA,EAAiB,iBAAkB,CAAA,cAAc,CAAK,IAAA,CAAA;AAAA,MACpE,iBACE,EAAA,eAAA,EAAiB,iBAAkB,CAAA,mBAAmB,CAAK,IAAA,OAAA;AAAA,KAC/D,CAAA;AACA,IAAA,IAAA,CAAK,gBAAmB,GAAA,gBAAA,CAAA;AACxB,IAAA,IAAA,CAAK,gBACH,GAAA,MAAA,CAAO,iBAAkB,CAAA,4BAA4B,CAAK,IAAA,GAAA,CAAA;AAC5D,IAAA,IAAA,CAAK,MAAS,GAAA,MAAA,CAAA;AAAA,GAChB;AAAA,EAlCiB,MAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA;AAAA;AAAA;AAAA,EAqCjB,aAAa,KAAK,OAIU,EAAA;AAC1B,IAAA,OAAO,IAAI,cAAA;AAAA,MACT,MAAMC,2CAAA,CAAsB,MAAO,CAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACnD,OAAQ,CAAA,MAAA;AAAA,MACR,OAAQ,CAAA,MAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA,EAEA,aAAa,UAAW,CAAA,MAAA,EAAgB,OAA0B,EAAA;AAChE,IAAA,OAAO,IAAI,cAAA;AAAA,MACT,MAAMA,2CAAA,CAAsB,MAAO,CAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACnD,MAAA;AAAA,MACA,OAAQ,CAAA,MAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA,EAEA,aAAa,UAAU,QAA6C,EAAA;AAClE,IAAA,OAAO,MAAMA,2CAAsB,CAAA,SAAA,CAAU,MAAM,QAAA,CAAS,WAAW,CAAA,CAAA;AAAA,GACzE;AAAA,EAEA,UAAA,CACE,OACA,OACuB,EAAA;AACvB,IAAM,MAAA,QAAA,GAAW,MAAM,SAAa,IAAA,EAAA,CAAA;AACpC,IAAA,MAAM,EAAE,IAAA,EAAS,GAAA,gBAAA,CAAiB,MAAM,UAAU,CAAA,CAAA;AAClD,IAAA,MAAM,SAAS,IAAO,GAAA,QAAA,CAAA;AAEtB,IAAA,MAAM,QAAQ,QAAW,GAAA,CAAA,CAAA;AAEzB,IAAO,OAAA;AAAA,MACL,OAAS,EAAA;AAAA,QACP,QAAQ,KAAM,CAAA,IAAA,CACX,KAAM,CAAA,IAAI,EACV,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,OAAA,CAAQ,gBAAgB,EAAE,CAAA,CAAE,IAAK,EAAC,EAC7C,MAAO,CAAA,CAAA,CAAA,KAAK,CAAM,KAAA,EAAE,EACpB,GAAI,CAAA,CAAA,CAAA,KAAK,CAAI,CAAA,EAAA,IAAA,CAAK,UAAU,CAAC,CAAC,CAAM,GAAA,EAAA,IAAA,CAAK,UAAU,CAAC,CAAC,CAAK,GAAA,CAAA,CAAA,CAC1D,KAAK,GAAG,CAAA;AAAA,QACX,QAAQ,KAAM,CAAA,OAAA;AAAA,QACd,OAAO,KAAM,CAAA,KAAA;AAAA,QACb,MAAA;AAAA,QACA,KAAA;AAAA,QACA,SAAS,OAAQ,CAAA,gBAAA;AAAA,OACnB;AAAA,MACA,QAAA;AAAA,KACF,CAAA;AAAA,GACF;AAAA,EAEA,cAAc,UAAqC,EAAA;AACjD,IAAA,IAAA,CAAK,UAAa,GAAA,UAAA,CAAA;AAAA,GACpB;AAAA,EAEA,MAAM,WAAW,IAAc,EAAA;AAC7B,IAAA,OAAO,IAAIC,2CAAsB,CAAA;AAAA,MAC/B,WAAW,IAAK,CAAA,gBAAA;AAAA,MAChB,IAAA;AAAA,MACA,eAAe,IAAK,CAAA,aAAA;AAAA,MACpB,QAAQ,IAAK,CAAA,MAAA,EAAQ,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA;AAAA,KAClD,CAAA,CAAA;AAAA,GACH;AAAA,EAEA,MAAM,MAAM,KAAiD,EAAA;AAC3D,IAAA,MAAM,EAAE,OAAS,EAAA,QAAA,EAAa,GAAA,IAAA,CAAK,WAAW,KAAO,EAAA;AAAA,MACnD,kBAAkB,IAAK,CAAA,gBAAA;AAAA,KACxB,CAAA,CAAA;AAED,IAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,aAAc,CAAA,WAAA;AAAA,MAAY,OAAM,EACtD,KAAA,IAAA,CAAK,aAAc,CAAA,KAAA,CAAM,IAAI,OAAO,CAAA;AAAA,KACtC,CAAA;AAIA,IAAA,MAAM,EAAE,IAAA,EAAS,GAAA,gBAAA,CAAiB,MAAM,UAAU,CAAA,CAAA;AAClD,IAAM,MAAA,WAAA,GAAc,KAAK,MAAS,GAAA,QAAA,CAAA;AAClC,IAAA,MAAM,kBAAkB,IAAO,GAAA,CAAA,CAAA;AAC/B,IAAA,MAAM,QAAW,GAAA,IAAA,CAAK,KAAM,CAAA,CAAA,EAAG,QAAQ,CAAA,CAAA;AACvC,IAAM,MAAA,cAAA,GAAiB,cACnB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AACJ,IAAM,MAAA,kBAAA,GAAqB,kBACvB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AAEJ,IAAA,MAAM,UAAU,QAAS,CAAA,GAAA;AAAA,MACvB,CAAC,EAAE,IAAA,EAAM,QAAU,EAAA,SAAA,IAAa,KAA4B,MAAA;AAAA,QAC1D,IAAA;AAAA,QACA,QAAA;AAAA,QACA,IAAA,EAAM,IAAO,GAAA,QAAA,GAAW,KAAQ,GAAA,CAAA;AAAA,QAChC,SAAW,EAAA;AAAA,UACT,MAAA,EAAQ,QAAQ,OAAQ,CAAA,MAAA;AAAA,UACxB,OAAA,EAAS,QAAQ,OAAQ,CAAA,OAAA;AAAA,UACzB,QAAQ,SACJ,GAAA;AAAA,YACE,MAAM,SAAU,CAAA,IAAA;AAAA,YAChB,OAAO,SAAU,CAAA,KAAA;AAAA,YACjB,UAAU,SAAU,CAAA,QAAA;AAAA,YACpB,IAAM,EAAA,EAAA;AAAA,cAER,EAAC;AAAA,SACP;AAAA,OACF,CAAA;AAAA,KACF,CAAA;AAEA,IAAO,OAAA,EAAE,OAAS,EAAA,cAAA,EAAgB,kBAAmB,EAAA,CAAA;AAAA,GACvD;AACF,CAAA;AAEO,SAAS,iBAAiB,UAAuC,EAAA;AACtE,EAAA,IAAI,CAAC,UAAY,EAAA;AACf,IAAO,OAAA,EAAE,MAAM,CAAE,EAAA,CAAA;AAAA,GACnB;AAEA,EAAO,OAAA;AAAA,IACL,IAAA,EAAM,OAAO,MAAO,CAAA,IAAA,CAAK,YAAY,QAAQ,CAAA,CAAE,QAAS,CAAA,OAAO,CAAC,CAAA;AAAA,GAClE,CAAA;AACF,CAAA;AAEgB,SAAA,gBAAA,CAAiB,EAAE,IAAA,EAAkC,EAAA;AACnE,EAAO,OAAA,MAAA,CAAO,KAAK,CAAG,EAAA,IAAI,IAAI,OAAO,CAAA,CAAE,SAAS,QAAQ,CAAA,CAAA;AAC1D;;;;;;"}
@@ -0,0 +1,81 @@
1
+ 'use strict';
2
+
3
+ var pluginSearchBackendNode = require('@backstage/plugin-search-backend-node');
4
+
5
+ class PgSearchEngineIndexer extends pluginSearchBackendNode.BatchSearchEngineIndexer {
6
+ logger;
7
+ store;
8
+ type;
9
+ tx;
10
+ numRecords = 0;
11
+ constructor(options) {
12
+ super({ batchSize: options.batchSize });
13
+ this.store = options.databaseStore;
14
+ this.type = options.type;
15
+ this.logger = options.logger;
16
+ }
17
+ async initialize() {
18
+ this.tx = await this.store.getTransaction();
19
+ try {
20
+ await this.store.prepareInsert(this.tx);
21
+ } catch (e) {
22
+ this.tx.rollback(e);
23
+ throw e;
24
+ }
25
+ }
26
+ async index(documents) {
27
+ this.numRecords += documents.length;
28
+ const refs = [...new Set(documents.map((d) => d.authorization?.resourceRef))];
29
+ this.logger?.debug(
30
+ `Attempting to index the following entities: ${refs.toString()}`
31
+ );
32
+ try {
33
+ await this.store.insertDocuments(this.tx, this.type, documents);
34
+ } catch (e) {
35
+ this.tx.rollback(e);
36
+ throw e;
37
+ }
38
+ }
39
+ async finalize() {
40
+ if (this.numRecords === 0) {
41
+ this.logger?.warn(
42
+ `Index for ${this.type} was not replaced: indexer received 0 documents`
43
+ );
44
+ this.tx.rollback();
45
+ return;
46
+ }
47
+ try {
48
+ await this.store.completeInsert(this.tx, this.type);
49
+ this.tx.commit();
50
+ } catch (e) {
51
+ this.tx.rollback(e);
52
+ throw e;
53
+ }
54
+ }
55
+ /**
56
+ * Custom handler covering the case where an error occurred somewhere else in
57
+ * the indexing pipeline (e.g. a collator or decorator). In such cases, the
58
+ * finalize method is not called, which leaves a dangling transaction and
59
+ * therefore an open connection to PG. This handler ensures we close the
60
+ * transaction and associated connection.
61
+ *
62
+ * todo(@backstage/search-maintainers): Consider introducing a more
63
+ * formal mechanism for handling such errors in BatchSearchEngineIndexer and
64
+ * replacing this method with it. See: #17291
65
+ *
66
+ * @internal
67
+ */
68
+ async _destroy(error, done) {
69
+ if (!error) {
70
+ done();
71
+ return;
72
+ }
73
+ if (!this.tx.isCompleted()) {
74
+ await this.tx.rollback(error);
75
+ }
76
+ done(error);
77
+ }
78
+ }
79
+
80
+ exports.PgSearchEngineIndexer = PgSearchEngineIndexer;
81
+ //# sourceMappingURL=PgSearchEngineIndexer.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"PgSearchEngineIndexer.cjs.js","sources":["../../src/PgSearchEngine/PgSearchEngineIndexer.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { BatchSearchEngineIndexer } from '@backstage/plugin-search-backend-node';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Knex } from 'knex';\nimport { DatabaseStore } from '../database';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/** @public */\nexport type PgSearchEngineIndexerOptions = {\n batchSize: number;\n type: string;\n databaseStore: DatabaseStore;\n logger?: LoggerService;\n};\n\n/** @public */\nexport class PgSearchEngineIndexer extends BatchSearchEngineIndexer {\n private logger?: LoggerService;\n private store: DatabaseStore;\n private type: string;\n private tx: Knex.Transaction | undefined;\n private numRecords = 0;\n\n constructor(options: PgSearchEngineIndexerOptions) {\n super({ batchSize: options.batchSize });\n this.store = options.databaseStore;\n this.type = options.type;\n this.logger = options.logger;\n }\n\n async initialize(): Promise<void> {\n this.tx = await this.store.getTransaction();\n try {\n await this.store.prepareInsert(this.tx);\n } catch (e) {\n // In case of error, rollback the transaction and re-throw the error so\n // that the stream can be closed and destroyed properly.\n this.tx.rollback(e);\n throw e;\n }\n }\n\n async index(documents: IndexableDocument[]): Promise<void> {\n this.numRecords += documents.length;\n\n const refs = [...new Set(documents.map(d => d.authorization?.resourceRef))];\n this.logger?.debug(\n `Attempting to index the following entities: ${refs.toString()}`,\n );\n\n try {\n await this.store.insertDocuments(this.tx!, this.type, documents);\n } catch (e) {\n // In case of error, rollback the transaction and re-throw the error so\n // that the stream can be closed and destroyed properly.\n this.tx!.rollback(e);\n throw e;\n }\n }\n\n async finalize(): Promise<void> {\n // If no documents were indexed, rollback the transaction, log a warning,\n // and do not continue. This ensures that collators that return empty sets\n // of documents do not cause the index to be deleted.\n if (this.numRecords === 0) {\n this.logger?.warn(\n `Index for ${this.type} was not replaced: indexer received 0 documents`,\n );\n this.tx!.rollback!();\n return;\n }\n\n // Attempt to complete and commit the transaction.\n try {\n await this.store.completeInsert(this.tx!, this.type);\n this.tx!.commit();\n } catch (e) {\n // Otherwise, rollback the transaction and re-throw the error so that the\n // stream can be closed and destroyed properly.\n this.tx!.rollback!(e);\n throw e;\n }\n }\n\n /**\n * Custom handler covering the case where an error occurred somewhere else in\n * the indexing pipeline (e.g. a collator or decorator). In such cases, the\n * finalize method is not called, which leaves a dangling transaction and\n * therefore an open connection to PG. This handler ensures we close the\n * transaction and associated connection.\n *\n * todo(@backstage/search-maintainers): Consider introducing a more\n * formal mechanism for handling such errors in BatchSearchEngineIndexer and\n * replacing this method with it. See: #17291\n *\n * @internal\n */\n async _destroy(error: Error | null, done: (error?: Error | null) => void) {\n // Ignore situations where there was no error.\n if (!error) {\n done();\n return;\n }\n\n if (!this.tx!.isCompleted()) {\n await this.tx!.rollback(error);\n }\n\n done(error);\n }\n}\n"],"names":["BatchSearchEngineIndexer"],"mappings":";;;;AA+BO,MAAM,8BAA8BA,gDAAyB,CAAA;AAAA,EAC1D,MAAA,CAAA;AAAA,EACA,KAAA,CAAA;AAAA,EACA,IAAA,CAAA;AAAA,EACA,EAAA,CAAA;AAAA,EACA,UAAa,GAAA,CAAA,CAAA;AAAA,EAErB,YAAY,OAAuC,EAAA;AACjD,IAAA,KAAA,CAAM,EAAE,SAAA,EAAW,OAAQ,CAAA,SAAA,EAAW,CAAA,CAAA;AACtC,IAAA,IAAA,CAAK,QAAQ,OAAQ,CAAA,aAAA,CAAA;AACrB,IAAA,IAAA,CAAK,OAAO,OAAQ,CAAA,IAAA,CAAA;AACpB,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AAAA,GACxB;AAAA,EAEA,MAAM,UAA4B,GAAA;AAChC,IAAA,IAAA,CAAK,EAAK,GAAA,MAAM,IAAK,CAAA,KAAA,CAAM,cAAe,EAAA,CAAA;AAC1C,IAAI,IAAA;AACF,MAAA,MAAM,IAAK,CAAA,KAAA,CAAM,aAAc,CAAA,IAAA,CAAK,EAAE,CAAA,CAAA;AAAA,aAC/B,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAG,SAAS,CAAC,CAAA,CAAA;AAClB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA,EAEA,MAAM,MAAM,SAA+C,EAAA;AACzD,IAAA,IAAA,CAAK,cAAc,SAAU,CAAA,MAAA,CAAA;AAE7B,IAAA,MAAM,IAAO,GAAA,CAAC,GAAG,IAAI,GAAI,CAAA,SAAA,CAAU,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,aAAA,EAAe,WAAW,CAAC,CAAC,CAAA,CAAA;AAC1E,IAAA,IAAA,CAAK,MAAQ,EAAA,KAAA;AAAA,MACX,CAAA,4CAAA,EAA+C,IAAK,CAAA,QAAA,EAAU,CAAA,CAAA;AAAA,KAChE,CAAA;AAEA,IAAI,IAAA;AACF,MAAA,MAAM,KAAK,KAAM,CAAA,eAAA,CAAgB,KAAK,EAAK,EAAA,IAAA,CAAK,MAAM,SAAS,CAAA,CAAA;AAAA,aACxD,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAI,SAAS,CAAC,CAAA,CAAA;AACnB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA,EAEA,MAAM,QAA0B,GAAA;AAI9B,IAAI,IAAA,IAAA,CAAK,eAAe,CAAG,EAAA;AACzB,MAAA,IAAA,CAAK,MAAQ,EAAA,IAAA;AAAA,QACX,CAAA,UAAA,EAAa,KAAK,IAAI,CAAA,+CAAA,CAAA;AAAA,OACxB,CAAA;AACA,MAAA,IAAA,CAAK,GAAI,QAAU,EAAA,CAAA;AACnB,MAAA,OAAA;AAAA,KACF;AAGA,IAAI,IAAA;AACF,MAAA,MAAM,KAAK,KAAM,CAAA,cAAA,CAAe,IAAK,CAAA,EAAA,EAAK,KAAK,IAAI,CAAA,CAAA;AACnD,MAAA,IAAA,CAAK,GAAI,MAAO,EAAA,CAAA;AAAA,aACT,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAI,SAAU,CAAC,CAAA,CAAA;AACpB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,QAAS,CAAA,KAAA,EAAqB,IAAsC,EAAA;AAExE,IAAA,IAAI,CAAC,KAAO,EAAA;AACV,MAAK,IAAA,EAAA,CAAA;AACL,MAAA,OAAA;AAAA,KACF;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,EAAI,CAAA,WAAA,EAAe,EAAA;AAC3B,MAAM,MAAA,IAAA,CAAK,EAAI,CAAA,QAAA,CAAS,KAAK,CAAA,CAAA;AAAA,KAC/B;AAEA,IAAA,IAAA,CAAK,KAAK,CAAA,CAAA;AAAA,GACZ;AACF;;;;"}
package/dist/alpha.cjs.js CHANGED
@@ -4,9 +4,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
4
4
 
5
5
  var backendPluginApi = require('@backstage/backend-plugin-api');
6
6
  var alpha$1 = require('@backstage/plugin-search-backend-node/alpha');
7
- var PgSearchEngine = require('./cjs/PgSearchEngine-CyA2vr4y.cjs.js');
8
- require('@backstage/plugin-search-backend-node');
9
- require('uuid');
7
+ var PgSearchEngine = require('./PgSearchEngine/PgSearchEngine.cjs.js');
10
8
 
11
9
  var alpha = backendPluginApi.createBackendModule({
12
10
  pluginId: "search",
@@ -1 +1 @@
1
- {"version":3,"file":"alpha.cjs.js","sources":["../src/alpha.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { searchEngineRegistryExtensionPoint } from '@backstage/plugin-search-backend-node/alpha';\nimport { PgSearchEngine } from './PgSearchEngine';\n\n/**\n * @alpha\n * Search backend module for the Postgres engine.\n */\nexport default createBackendModule({\n pluginId: 'search',\n moduleId: 'postgres-engine',\n register(env) {\n env.registerInit({\n deps: {\n searchEngineRegistry: searchEngineRegistryExtensionPoint,\n database: coreServices.database,\n config: coreServices.rootConfig,\n logger: coreServices.logger,\n },\n async init({ searchEngineRegistry, database, config, logger }) {\n if (await PgSearchEngine.supported(database)) {\n searchEngineRegistry.setSearchEngine(\n await PgSearchEngine.fromConfig(config, {\n database,\n logger,\n }),\n );\n } else {\n logger.warn(\n 'Postgres search engine is not supported, skipping registration of search-backend-module-pg',\n );\n }\n },\n });\n },\n});\n"],"names":["createBackendModule","searchEngineRegistryExtensionPoint","coreServices","PgSearchEngine"],"mappings":";;;;;;;;;;AA0BA,YAAeA,oCAAoB,CAAA;AAAA,EACjC,QAAU,EAAA,QAAA;AAAA,EACV,QAAU,EAAA,iBAAA;AAAA,EACV,SAAS,GAAK,EAAA;AACZ,IAAA,GAAA,CAAI,YAAa,CAAA;AAAA,MACf,IAAM,EAAA;AAAA,QACJ,oBAAsB,EAAAC,0CAAA;AAAA,QACtB,UAAUC,6BAAa,CAAA,QAAA;AAAA,QACvB,QAAQA,6BAAa,CAAA,UAAA;AAAA,QACrB,QAAQA,6BAAa,CAAA,MAAA;AAAA,OACvB;AAAA,MACA,MAAM,IAAK,CAAA,EAAE,sBAAsB,QAAU,EAAA,MAAA,EAAQ,QAAU,EAAA;AAC7D,QAAA,IAAI,MAAMC,6BAAA,CAAe,SAAU,CAAA,QAAQ,CAAG,EAAA;AAC5C,UAAqB,oBAAA,CAAA,eAAA;AAAA,YACnB,MAAMA,6BAAe,CAAA,UAAA,CAAW,MAAQ,EAAA;AAAA,cACtC,QAAA;AAAA,cACA,MAAA;AAAA,aACD,CAAA;AAAA,WACH,CAAA;AAAA,SACK,MAAA;AACL,UAAO,MAAA,CAAA,IAAA;AAAA,YACL,4FAAA;AAAA,WACF,CAAA;AAAA,SACF;AAAA,OACF;AAAA,KACD,CAAA,CAAA;AAAA,GACH;AACF,CAAC,CAAA;;;;"}
1
+ {"version":3,"file":"alpha.cjs.js","sources":["../src/alpha.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { searchEngineRegistryExtensionPoint } from '@backstage/plugin-search-backend-node/alpha';\nimport { PgSearchEngine } from './PgSearchEngine';\n\n/**\n * @alpha\n * Search backend module for the Postgres engine.\n */\nexport default createBackendModule({\n pluginId: 'search',\n moduleId: 'postgres-engine',\n register(env) {\n env.registerInit({\n deps: {\n searchEngineRegistry: searchEngineRegistryExtensionPoint,\n database: coreServices.database,\n config: coreServices.rootConfig,\n logger: coreServices.logger,\n },\n async init({ searchEngineRegistry, database, config, logger }) {\n if (await PgSearchEngine.supported(database)) {\n searchEngineRegistry.setSearchEngine(\n await PgSearchEngine.fromConfig(config, {\n database,\n logger,\n }),\n );\n } else {\n logger.warn(\n 'Postgres search engine is not supported, skipping registration of search-backend-module-pg',\n );\n }\n },\n });\n },\n});\n"],"names":["createBackendModule","searchEngineRegistryExtensionPoint","coreServices","PgSearchEngine"],"mappings":";;;;;;;;AA0BA,YAAeA,oCAAoB,CAAA;AAAA,EACjC,QAAU,EAAA,QAAA;AAAA,EACV,QAAU,EAAA,iBAAA;AAAA,EACV,SAAS,GAAK,EAAA;AACZ,IAAA,GAAA,CAAI,YAAa,CAAA;AAAA,MACf,IAAM,EAAA;AAAA,QACJ,oBAAsB,EAAAC,0CAAA;AAAA,QACtB,UAAUC,6BAAa,CAAA,QAAA;AAAA,QACvB,QAAQA,6BAAa,CAAA,UAAA;AAAA,QACrB,QAAQA,6BAAa,CAAA,MAAA;AAAA,OACvB;AAAA,MACA,MAAM,IAAK,CAAA,EAAE,sBAAsB,QAAU,EAAA,MAAA,EAAQ,QAAU,EAAA;AAC7D,QAAA,IAAI,MAAMC,6BAAA,CAAe,SAAU,CAAA,QAAQ,CAAG,EAAA;AAC5C,UAAqB,oBAAA,CAAA,eAAA;AAAA,YACnB,MAAMA,6BAAe,CAAA,UAAA,CAAW,MAAQ,EAAA;AAAA,cACtC,QAAA;AAAA,cACA,MAAA;AAAA,aACD,CAAA;AAAA,WACH,CAAA;AAAA,SACK,MAAA;AACL,UAAO,MAAA,CAAA,IAAA;AAAA,YACL,4FAAA;AAAA,WACF,CAAA;AAAA,SACF;AAAA,OACF;AAAA,KACD,CAAA,CAAA;AAAA,GACH;AACF,CAAC,CAAA;;;;"}
@@ -0,0 +1,117 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+ var util = require('./util.cjs.js');
5
+
6
+ const migrationsDir = backendPluginApi.resolvePackagePath(
7
+ "@backstage/plugin-search-backend-module-pg",
8
+ "migrations"
9
+ );
10
+ class DatabaseDocumentStore {
11
+ constructor(db) {
12
+ this.db = db;
13
+ }
14
+ static async create(database) {
15
+ const knex = await database.getClient();
16
+ try {
17
+ const majorVersion = await util.queryPostgresMajorVersion(knex);
18
+ if (majorVersion < 12) {
19
+ throw new Error(
20
+ `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`
21
+ );
22
+ }
23
+ } catch {
24
+ throw new Error(
25
+ "The PgSearchEngine is only supported when using a postgres database (>=12.x)"
26
+ );
27
+ }
28
+ if (!database.migrations?.skip) {
29
+ await knex.migrate.latest({
30
+ directory: migrationsDir
31
+ });
32
+ }
33
+ return new DatabaseDocumentStore(knex);
34
+ }
35
+ static async supported(knex) {
36
+ try {
37
+ const majorVersion = await util.queryPostgresMajorVersion(knex);
38
+ return majorVersion >= 12;
39
+ } catch {
40
+ return false;
41
+ }
42
+ }
43
+ async transaction(fn) {
44
+ return await this.db.transaction(fn);
45
+ }
46
+ async getTransaction() {
47
+ return this.db.transaction();
48
+ }
49
+ async prepareInsert(tx) {
50
+ await tx.raw(
51
+ "CREATE TEMP TABLE documents_to_insert (type text NOT NULL, document jsonb NOT NULL, hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\', '\\\\')::bytea)) STORED) ON COMMIT DROP"
52
+ );
53
+ }
54
+ async completeInsert(tx, type) {
55
+ await tx.insert(
56
+ tx("documents_to_insert").select(
57
+ "type",
58
+ "document",
59
+ "hash"
60
+ )
61
+ ).into(tx.raw("documents (type, document, hash)")).onConflict("hash").ignore();
62
+ const rowsToDelete = tx("documents").select("documents.hash").leftJoin("documents_to_insert", {
63
+ "documents.hash": "documents_to_insert.hash"
64
+ }).whereNull("documents_to_insert.hash");
65
+ await tx("documents").where({ type }).whereIn("hash", rowsToDelete).delete();
66
+ }
67
+ async insertDocuments(tx, type, documents) {
68
+ await tx("documents_to_insert").insert(
69
+ documents.map((document) => ({
70
+ type,
71
+ document
72
+ }))
73
+ );
74
+ }
75
+ async query(tx, searchQuery) {
76
+ const { types, pgTerm, fields, offset, limit, options } = searchQuery;
77
+ const query = tx("documents");
78
+ if (pgTerm) {
79
+ query.from(tx.raw("documents, to_tsquery('english', ?) query", pgTerm)).whereRaw("query @@ body");
80
+ } else {
81
+ query.from("documents");
82
+ }
83
+ if (types) {
84
+ query.whereIn("type", types);
85
+ }
86
+ if (fields) {
87
+ Object.keys(fields).forEach((key) => {
88
+ const value = fields[key];
89
+ const valueArray = Array.isArray(value) ? value : [value];
90
+ const fieldValueCompare = valueArray.map((v) => ({ [key]: v })).map((v) => JSON.stringify(v));
91
+ const arrayValueCompare = valueArray.map((v) => ({ [key]: [v] })).map((v) => JSON.stringify(v));
92
+ const valueCompare = [...fieldValueCompare, ...arrayValueCompare];
93
+ query.whereRaw(
94
+ `(${valueCompare.map(() => "document @> ?").join(" OR ")})`,
95
+ valueCompare
96
+ );
97
+ });
98
+ }
99
+ query.select("type", "document");
100
+ if (pgTerm && options.useHighlight) {
101
+ const headlineOptions = `MaxWords=${options.maxWords}, MinWords=${options.minWords}, ShortWord=${options.shortWord}, HighlightAll=${options.highlightAll}, MaxFragments=${options.maxFragments}, FragmentDelimiter=${options.fragmentDelimiter}, StartSel=${options.preTag}, StopSel=${options.postTag}`;
102
+ query.select(tx.raw('ts_rank_cd(body, query) AS "rank"')).select(
103
+ tx.raw(
104
+ `ts_headline('english', document, query, '${headlineOptions}') as "highlight"`
105
+ )
106
+ ).orderBy("rank", "desc");
107
+ } else if (pgTerm && !options.useHighlight) {
108
+ query.select(tx.raw('ts_rank_cd(body, query) AS "rank"')).orderBy("rank", "desc");
109
+ } else {
110
+ query.select(tx.raw("1 as rank"));
111
+ }
112
+ return await query.offset(offset).limit(limit);
113
+ }
114
+ }
115
+
116
+ exports.DatabaseDocumentStore = DatabaseDocumentStore;
117
+ //# sourceMappingURL=DatabaseDocumentStore.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"DatabaseDocumentStore.cjs.js","sources":["../../src/database/DatabaseDocumentStore.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n DatabaseService,\n resolvePackagePath,\n} from '@backstage/backend-plugin-api';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Knex } from 'knex';\nimport {\n DatabaseStore,\n DocumentResultRow,\n PgSearchQuery,\n RawDocumentRow,\n} from './types';\nimport { queryPostgresMajorVersion } from './util';\n\nconst migrationsDir = resolvePackagePath(\n '@backstage/plugin-search-backend-module-pg',\n 'migrations',\n);\n\n/** @public */\nexport class DatabaseDocumentStore implements DatabaseStore {\n static async create(\n database: DatabaseService,\n ): Promise<DatabaseDocumentStore> {\n const knex = await database.getClient();\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n if (majorVersion < 12) {\n // We are using some features (like generated columns) that aren't\n // available in older postgres versions.\n throw new Error(\n `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`,\n );\n }\n } catch {\n // Actually both mysql and sqlite have a full text search, too. We could\n // implement them separately or add them here.\n throw new Error(\n 'The PgSearchEngine is only supported when using a postgres database (>=12.x)',\n );\n }\n\n if (!database.migrations?.skip) {\n await knex.migrate.latest({\n directory: migrationsDir,\n });\n }\n\n return new DatabaseDocumentStore(knex);\n }\n\n static async supported(knex: Knex): Promise<boolean> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n return majorVersion >= 12;\n } catch {\n return false;\n }\n }\n\n constructor(private readonly db: Knex) {}\n\n async transaction<T>(fn: (tx: Knex.Transaction) => Promise<T>): Promise<T> {\n return await this.db.transaction(fn);\n }\n\n async getTransaction(): Promise<Knex.Transaction> {\n return this.db.transaction();\n }\n\n async prepareInsert(tx: Knex.Transaction): Promise<void> {\n // We create a temporary table to collect the hashes of the documents that\n // we expect to be in the documents table at the end. The table is deleted\n // at the end of the transaction.\n // The hash makes sure that we generate a new row for every change.\n await tx.raw(\n 'CREATE TEMP TABLE documents_to_insert (' +\n 'type text NOT NULL, ' +\n 'document jsonb NOT NULL, ' +\n // Generating the hash requires a trick, as the text to bytea\n // conversation runs into errors in case the text contains a backslash.\n // Therefore we have to escape them.\n \"hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\\\', '\\\\\\\\')::bytea)) STORED\" +\n ') ON COMMIT DROP',\n );\n }\n\n async completeInsert(tx: Knex.Transaction, type: string): Promise<void> {\n // Copy all new rows into the documents table\n await tx\n .insert(\n tx<RawDocumentRow>('documents_to_insert').select(\n 'type',\n 'document',\n 'hash',\n ),\n )\n .into(tx.raw('documents (type, document, hash)'))\n .onConflict('hash')\n .ignore();\n\n // Delete all documents that we don't expect (deleted and changed)\n const rowsToDelete = tx<RawDocumentRow>('documents')\n .select('documents.hash')\n .leftJoin<RawDocumentRow>('documents_to_insert', {\n 'documents.hash': 'documents_to_insert.hash',\n })\n .whereNull('documents_to_insert.hash');\n\n await tx<RawDocumentRow>('documents')\n .where({ type })\n .whereIn('hash', rowsToDelete)\n .delete();\n }\n\n async insertDocuments(\n tx: Knex.Transaction,\n type: string,\n documents: IndexableDocument[],\n ): Promise<void> {\n // Insert all documents into the temporary table to process them later\n await tx<DocumentResultRow>('documents_to_insert').insert(\n documents.map(document => ({\n type,\n document,\n })),\n );\n }\n\n async query(\n tx: Knex.Transaction,\n searchQuery: PgSearchQuery,\n ): Promise<DocumentResultRow[]> {\n const { types, pgTerm, fields, offset, limit, options } = searchQuery;\n // TODO(awanlin): We should make the language a parameter so that we can support more then just english\n // Builds a query like:\n // SELECT ts_rank_cd(body, query) AS rank, type, document,\n // ts_headline('english', document, query) AS highlight\n // FROM documents, to_tsquery('english', 'consent') query\n // WHERE query @@ body AND (document @> '{\"kind\": \"API\"}')\n // ORDER BY rank DESC\n // LIMIT 10;\n const query = tx<DocumentResultRow>('documents');\n\n if (pgTerm) {\n query\n .from(tx.raw(\"documents, to_tsquery('english', ?) query\", pgTerm))\n .whereRaw('query @@ body');\n } else {\n query.from('documents');\n }\n\n if (types) {\n query.whereIn('type', types);\n }\n\n if (fields) {\n Object.keys(fields).forEach(key => {\n const value = fields[key];\n const valueArray = Array.isArray(value) ? value : [value];\n const fieldValueCompare = valueArray\n .map(v => ({ [key]: v }))\n .map(v => JSON.stringify(v));\n const arrayValueCompare = valueArray\n .map(v => ({ [key]: [v] }))\n .map(v => JSON.stringify(v));\n const valueCompare = [...fieldValueCompare, ...arrayValueCompare];\n query.whereRaw(\n `(${valueCompare.map(() => 'document @> ?').join(' OR ')})`,\n valueCompare,\n );\n });\n }\n\n query.select('type', 'document');\n\n if (pgTerm && options.useHighlight) {\n const headlineOptions = `MaxWords=${options.maxWords}, MinWords=${options.minWords}, ShortWord=${options.shortWord}, HighlightAll=${options.highlightAll}, MaxFragments=${options.maxFragments}, FragmentDelimiter=${options.fragmentDelimiter}, StartSel=${options.preTag}, StopSel=${options.postTag}`;\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .select(\n tx.raw(\n `ts_headline(\\'english\\', document, query, '${headlineOptions}') as \"highlight\"`,\n ),\n )\n .orderBy('rank', 'desc');\n } else if (pgTerm && !options.useHighlight) {\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .orderBy('rank', 'desc');\n } else {\n query.select(tx.raw('1 as rank'));\n }\n\n return await query.offset(offset).limit(limit);\n }\n}\n"],"names":["resolvePackagePath","queryPostgresMajorVersion"],"mappings":";;;;;AA6BA,MAAM,aAAgB,GAAAA,mCAAA;AAAA,EACpB,4CAAA;AAAA,EACA,YAAA;AACF,CAAA,CAAA;AAGO,MAAM,qBAA+C,CAAA;AAAA,EA0C1D,YAA6B,EAAU,EAAA;AAAV,IAAA,IAAA,CAAA,EAAA,GAAA,EAAA,CAAA;AAAA,GAAW;AAAA,EAzCxC,aAAa,OACX,QACgC,EAAA;AAChC,IAAM,MAAA,IAAA,GAAO,MAAM,QAAA,CAAS,SAAU,EAAA,CAAA;AACtC,IAAI,IAAA;AACF,MAAM,MAAA,YAAA,GAAe,MAAMC,8BAAA,CAA0B,IAAI,CAAA,CAAA;AAEzD,MAAA,IAAI,eAAe,EAAI,EAAA;AAGrB,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,+EAA+E,YAAY,CAAA,CAAA,CAAA;AAAA,SAC7F,CAAA;AAAA,OACF;AAAA,KACM,CAAA,MAAA;AAGN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,8EAAA;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAI,IAAA,CAAC,QAAS,CAAA,UAAA,EAAY,IAAM,EAAA;AAC9B,MAAM,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA;AAAA,QACxB,SAAW,EAAA,aAAA;AAAA,OACZ,CAAA,CAAA;AAAA,KACH;AAEA,IAAO,OAAA,IAAI,sBAAsB,IAAI,CAAA,CAAA;AAAA,GACvC;AAAA,EAEA,aAAa,UAAU,IAA8B,EAAA;AACnD,IAAI,IAAA;AACF,MAAM,MAAA,YAAA,GAAe,MAAMA,8BAAA,CAA0B,IAAI,CAAA,CAAA;AAEzD,MAAA,OAAO,YAAgB,IAAA,EAAA,CAAA;AAAA,KACjB,CAAA,MAAA;AACN,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAAA,GACF;AAAA,EAIA,MAAM,YAAe,EAAsD,EAAA;AACzE,IAAA,OAAO,MAAM,IAAA,CAAK,EAAG,CAAA,WAAA,CAAY,EAAE,CAAA,CAAA;AAAA,GACrC;AAAA,EAEA,MAAM,cAA4C,GAAA;AAChD,IAAO,OAAA,IAAA,CAAK,GAAG,WAAY,EAAA,CAAA;AAAA,GAC7B;AAAA,EAEA,MAAM,cAAc,EAAqC,EAAA;AAKvD,IAAA,MAAM,EAAG,CAAA,GAAA;AAAA,MACP,mNAAA;AAAA,KAQF,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,cAAe,CAAA,EAAA,EAAsB,IAA6B,EAAA;AAEtE,IAAA,MAAM,EACH,CAAA,MAAA;AAAA,MACC,EAAA,CAAmB,qBAAqB,CAAE,CAAA,MAAA;AAAA,QACxC,MAAA;AAAA,QACA,UAAA;AAAA,QACA,MAAA;AAAA,OACF;AAAA,KACF,CACC,IAAK,CAAA,EAAA,CAAG,GAAI,CAAA,kCAAkC,CAAC,CAC/C,CAAA,UAAA,CAAW,MAAM,CAAA,CACjB,MAAO,EAAA,CAAA;AAGV,IAAM,MAAA,YAAA,GAAe,GAAmB,WAAW,CAAA,CAChD,OAAO,gBAAgB,CAAA,CACvB,SAAyB,qBAAuB,EAAA;AAAA,MAC/C,gBAAkB,EAAA,0BAAA;AAAA,KACnB,CACA,CAAA,SAAA,CAAU,0BAA0B,CAAA,CAAA;AAEvC,IAAA,MAAM,EAAmB,CAAA,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,IAAA,EAAM,CAAA,CACd,OAAQ,CAAA,MAAA,EAAQ,YAAY,CAAA,CAC5B,MAAO,EAAA,CAAA;AAAA,GACZ;AAAA,EAEA,MAAM,eAAA,CACJ,EACA,EAAA,IAAA,EACA,SACe,EAAA;AAEf,IAAM,MAAA,EAAA,CAAsB,qBAAqB,CAAE,CAAA,MAAA;AAAA,MACjD,SAAA,CAAU,IAAI,CAAa,QAAA,MAAA;AAAA,QACzB,IAAA;AAAA,QACA,QAAA;AAAA,OACA,CAAA,CAAA;AAAA,KACJ,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,KACJ,CAAA,EAAA,EACA,WAC8B,EAAA;AAC9B,IAAA,MAAM,EAAE,KAAO,EAAA,MAAA,EAAQ,QAAQ,MAAQ,EAAA,KAAA,EAAO,SAAY,GAAA,WAAA,CAAA;AAS1D,IAAM,MAAA,KAAA,GAAQ,GAAsB,WAAW,CAAA,CAAA;AAE/C,IAAA,IAAI,MAAQ,EAAA;AACV,MACG,KAAA,CAAA,IAAA,CAAK,GAAG,GAAI,CAAA,2CAAA,EAA6C,MAAM,CAAC,CAAA,CAChE,SAAS,eAAe,CAAA,CAAA;AAAA,KACtB,MAAA;AACL,MAAA,KAAA,CAAM,KAAK,WAAW,CAAA,CAAA;AAAA,KACxB;AAEA,IAAA,IAAI,KAAO,EAAA;AACT,MAAM,KAAA,CAAA,OAAA,CAAQ,QAAQ,KAAK,CAAA,CAAA;AAAA,KAC7B;AAEA,IAAA,IAAI,MAAQ,EAAA;AACV,MAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAE,CAAA,OAAA,CAAQ,CAAO,GAAA,KAAA;AACjC,QAAM,MAAA,KAAA,GAAQ,OAAO,GAAG,CAAA,CAAA;AACxB,QAAA,MAAM,aAAa,KAAM,CAAA,OAAA,CAAQ,KAAK,CAAI,GAAA,KAAA,GAAQ,CAAC,KAAK,CAAA,CAAA;AACxD,QAAA,MAAM,oBAAoB,UACvB,CAAA,GAAA,CAAI,CAAM,CAAA,MAAA,EAAE,CAAC,GAAG,GAAG,CAAE,EAAA,CAAE,EACvB,GAAI,CAAA,CAAA,CAAA,KAAK,IAAK,CAAA,SAAA,CAAU,CAAC,CAAC,CAAA,CAAA;AAC7B,QAAA,MAAM,oBAAoB,UACvB,CAAA,GAAA,CAAI,QAAM,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,GAAI,CACzB,CAAA,GAAA,CAAI,OAAK,IAAK,CAAA,SAAA,CAAU,CAAC,CAAC,CAAA,CAAA;AAC7B,QAAA,MAAM,YAAe,GAAA,CAAC,GAAG,iBAAA,EAAmB,GAAG,iBAAiB,CAAA,CAAA;AAChE,QAAM,KAAA,CAAA,QAAA;AAAA,UACJ,CAAA,CAAA,EAAI,aAAa,GAAI,CAAA,MAAM,eAAe,CAAE,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,CAAA,CAAA;AAAA,UACxD,YAAA;AAAA,SACF,CAAA;AAAA,OACD,CAAA,CAAA;AAAA,KACH;AAEA,IAAM,KAAA,CAAA,MAAA,CAAO,QAAQ,UAAU,CAAA,CAAA;AAE/B,IAAI,IAAA,MAAA,IAAU,QAAQ,YAAc,EAAA;AAClC,MAAM,MAAA,eAAA,GAAkB,CAAY,SAAA,EAAA,OAAA,CAAQ,QAAQ,CAAA,WAAA,EAAc,QAAQ,QAAQ,CAAA,YAAA,EAAe,OAAQ,CAAA,SAAS,CAAkB,eAAA,EAAA,OAAA,CAAQ,YAAY,CAAkB,eAAA,EAAA,OAAA,CAAQ,YAAY,CAAA,oBAAA,EAAuB,OAAQ,CAAA,iBAAiB,cAAc,OAAQ,CAAA,MAAM,CAAa,UAAA,EAAA,OAAA,CAAQ,OAAO,CAAA,CAAA,CAAA;AACtS,MAAA,KAAA,CACG,MAAO,CAAA,EAAA,CAAG,GAAI,CAAA,mCAAmC,CAAC,CAClD,CAAA,MAAA;AAAA,QACC,EAAG,CAAA,GAAA;AAAA,UACD,4CAA8C,eAAe,CAAA,iBAAA,CAAA;AAAA,SAC/D;AAAA,OACF,CACC,OAAQ,CAAA,MAAA,EAAQ,MAAM,CAAA,CAAA;AAAA,KAChB,MAAA,IAAA,MAAA,IAAU,CAAC,OAAA,CAAQ,YAAc,EAAA;AAC1C,MACG,KAAA,CAAA,MAAA,CAAO,GAAG,GAAI,CAAA,mCAAmC,CAAC,CAClD,CAAA,OAAA,CAAQ,QAAQ,MAAM,CAAA,CAAA;AAAA,KACpB,MAAA;AACL,MAAA,KAAA,CAAM,MAAO,CAAA,EAAA,CAAG,GAAI,CAAA,WAAW,CAAC,CAAA,CAAA;AAAA,KAClC;AAEA,IAAA,OAAO,MAAM,KAAM,CAAA,MAAA,CAAO,MAAM,CAAA,CAAE,MAAM,KAAK,CAAA,CAAA;AAAA,GAC/C;AACF;;;;"}
@@ -0,0 +1,15 @@
1
+ 'use strict';
2
+
3
+ async function queryPostgresMajorVersion(knex) {
4
+ if (knex.client.config.client !== "pg") {
5
+ throw new Error("Can't resolve version, not a postgres database");
6
+ }
7
+ const { rows } = await knex.raw("SHOW server_version_num");
8
+ const [result] = rows;
9
+ const version = +result.server_version_num;
10
+ const majorVersion = Math.floor(version / 1e4);
11
+ return majorVersion;
12
+ }
13
+
14
+ exports.queryPostgresMajorVersion = queryPostgresMajorVersion;
15
+ //# sourceMappingURL=util.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"util.cjs.js","sources":["../../src/database/util.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Knex } from 'knex';\n\nexport async function queryPostgresMajorVersion(knex: Knex): Promise<number> {\n if (knex.client.config.client !== 'pg') {\n throw new Error(\"Can't resolve version, not a postgres database\");\n }\n\n const { rows } = await knex.raw('SHOW server_version_num');\n const [result] = rows;\n const version = +result.server_version_num;\n const majorVersion = Math.floor(version / 10000);\n return majorVersion;\n}\n"],"names":[],"mappings":";;AAiBA,eAAsB,0BAA0B,IAA6B,EAAA;AAC3E,EAAA,IAAI,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA,KAAW,IAAM,EAAA;AACtC,IAAM,MAAA,IAAI,MAAM,gDAAgD,CAAA,CAAA;AAAA,GAClE;AAEA,EAAA,MAAM,EAAE,IAAK,EAAA,GAAI,MAAM,IAAA,CAAK,IAAI,yBAAyB,CAAA,CAAA;AACzD,EAAM,MAAA,CAAC,MAAM,CAAI,GAAA,IAAA,CAAA;AACjB,EAAM,MAAA,OAAA,GAAU,CAAC,MAAO,CAAA,kBAAA,CAAA;AACxB,EAAA,MAAM,YAAe,GAAA,IAAA,CAAK,KAAM,CAAA,OAAA,GAAU,GAAK,CAAA,CAAA;AAC/C,EAAO,OAAA,YAAA,CAAA;AACT;;;;"}
package/dist/index.cjs.js CHANGED
@@ -1,12 +1,10 @@
1
1
  'use strict';
2
2
 
3
- var PgSearchEngine = require('./cjs/PgSearchEngine-CyA2vr4y.cjs.js');
4
- require('@backstage/plugin-search-backend-node');
5
- require('@backstage/backend-plugin-api');
6
- require('uuid');
3
+ var DatabaseDocumentStore = require('./database/DatabaseDocumentStore.cjs.js');
4
+ var PgSearchEngine = require('./PgSearchEngine/PgSearchEngine.cjs.js');
7
5
 
8
6
 
9
7
 
10
- exports.DatabaseDocumentStore = PgSearchEngine.DatabaseDocumentStore;
8
+ exports.DatabaseDocumentStore = DatabaseDocumentStore.DatabaseDocumentStore;
11
9
  exports.PgSearchEngine = PgSearchEngine.PgSearchEngine;
12
10
  //# sourceMappingURL=index.cjs.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;"}
1
+ {"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-search-backend-module-pg",
3
- "version": "0.5.36-next.0",
3
+ "version": "0.5.36-next.2",
4
4
  "description": "A module for the search backend that implements search using PostgreSQL",
5
5
  "backstage": {
6
6
  "role": "backend-plugin-module",
@@ -50,17 +50,17 @@
50
50
  },
51
51
  "dependencies": {
52
52
  "@backstage/backend-common": "^0.25.0",
53
- "@backstage/backend-plugin-api": "^1.0.1-next.0",
54
- "@backstage/config": "^1.2.0",
55
- "@backstage/plugin-search-backend-node": "^1.3.3-next.0",
56
- "@backstage/plugin-search-common": "^1.2.14",
53
+ "@backstage/backend-plugin-api": "1.0.1-next.1",
54
+ "@backstage/config": "1.2.0",
55
+ "@backstage/plugin-search-backend-node": "1.3.3-next.2",
56
+ "@backstage/plugin-search-common": "1.2.14",
57
57
  "knex": "^3.0.0",
58
58
  "lodash": "^4.17.21",
59
59
  "uuid": "^9.0.0"
60
60
  },
61
61
  "devDependencies": {
62
- "@backstage/backend-test-utils": "^1.0.1-next.0",
63
- "@backstage/cli": "^0.28.0-next.0"
62
+ "@backstage/backend-test-utils": "1.0.1-next.2",
63
+ "@backstage/cli": "0.28.0-next.2"
64
64
  },
65
65
  "configSchema": "config.d.ts"
66
66
  }
@@ -1,326 +0,0 @@
1
- 'use strict';
2
-
3
- var pluginSearchBackendNode = require('@backstage/plugin-search-backend-node');
4
- var backendPluginApi = require('@backstage/backend-plugin-api');
5
- var uuid = require('uuid');
6
-
7
- async function queryPostgresMajorVersion(knex) {
8
- if (knex.client.config.client !== "pg") {
9
- throw new Error("Can't resolve version, not a postgres database");
10
- }
11
- const { rows } = await knex.raw("SHOW server_version_num");
12
- const [result] = rows;
13
- const version = +result.server_version_num;
14
- const majorVersion = Math.floor(version / 1e4);
15
- return majorVersion;
16
- }
17
-
18
- const migrationsDir = backendPluginApi.resolvePackagePath(
19
- "@backstage/plugin-search-backend-module-pg",
20
- "migrations"
21
- );
22
- class DatabaseDocumentStore {
23
- constructor(db) {
24
- this.db = db;
25
- }
26
- static async create(database) {
27
- const knex = await database.getClient();
28
- try {
29
- const majorVersion = await queryPostgresMajorVersion(knex);
30
- if (majorVersion < 12) {
31
- throw new Error(
32
- `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`
33
- );
34
- }
35
- } catch {
36
- throw new Error(
37
- "The PgSearchEngine is only supported when using a postgres database (>=12.x)"
38
- );
39
- }
40
- if (!database.migrations?.skip) {
41
- await knex.migrate.latest({
42
- directory: migrationsDir
43
- });
44
- }
45
- return new DatabaseDocumentStore(knex);
46
- }
47
- static async supported(knex) {
48
- try {
49
- const majorVersion = await queryPostgresMajorVersion(knex);
50
- return majorVersion >= 12;
51
- } catch {
52
- return false;
53
- }
54
- }
55
- async transaction(fn) {
56
- return await this.db.transaction(fn);
57
- }
58
- async getTransaction() {
59
- return this.db.transaction();
60
- }
61
- async prepareInsert(tx) {
62
- await tx.raw(
63
- "CREATE TEMP TABLE documents_to_insert (type text NOT NULL, document jsonb NOT NULL, hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\', '\\\\')::bytea)) STORED) ON COMMIT DROP"
64
- );
65
- }
66
- async completeInsert(tx, type) {
67
- await tx.insert(
68
- tx("documents_to_insert").select(
69
- "type",
70
- "document",
71
- "hash"
72
- )
73
- ).into(tx.raw("documents (type, document, hash)")).onConflict("hash").ignore();
74
- const rowsToDelete = tx("documents").select("documents.hash").leftJoin("documents_to_insert", {
75
- "documents.hash": "documents_to_insert.hash"
76
- }).whereNull("documents_to_insert.hash");
77
- await tx("documents").where({ type }).whereIn("hash", rowsToDelete).delete();
78
- }
79
- async insertDocuments(tx, type, documents) {
80
- await tx("documents_to_insert").insert(
81
- documents.map((document) => ({
82
- type,
83
- document
84
- }))
85
- );
86
- }
87
- async query(tx, searchQuery) {
88
- const { types, pgTerm, fields, offset, limit, options } = searchQuery;
89
- const query = tx("documents");
90
- if (pgTerm) {
91
- query.from(tx.raw("documents, to_tsquery('english', ?) query", pgTerm)).whereRaw("query @@ body");
92
- } else {
93
- query.from("documents");
94
- }
95
- if (types) {
96
- query.whereIn("type", types);
97
- }
98
- if (fields) {
99
- Object.keys(fields).forEach((key) => {
100
- const value = fields[key];
101
- const valueArray = Array.isArray(value) ? value : [value];
102
- const fieldValueCompare = valueArray.map((v) => ({ [key]: v })).map((v) => JSON.stringify(v));
103
- const arrayValueCompare = valueArray.map((v) => ({ [key]: [v] })).map((v) => JSON.stringify(v));
104
- const valueCompare = [...fieldValueCompare, ...arrayValueCompare];
105
- query.whereRaw(
106
- `(${valueCompare.map(() => "document @> ?").join(" OR ")})`,
107
- valueCompare
108
- );
109
- });
110
- }
111
- query.select("type", "document");
112
- if (pgTerm && options.useHighlight) {
113
- const headlineOptions = `MaxWords=${options.maxWords}, MinWords=${options.minWords}, ShortWord=${options.shortWord}, HighlightAll=${options.highlightAll}, MaxFragments=${options.maxFragments}, FragmentDelimiter=${options.fragmentDelimiter}, StartSel=${options.preTag}, StopSel=${options.postTag}`;
114
- query.select(tx.raw('ts_rank_cd(body, query) AS "rank"')).select(
115
- tx.raw(
116
- `ts_headline('english', document, query, '${headlineOptions}') as "highlight"`
117
- )
118
- ).orderBy("rank", "desc");
119
- } else if (pgTerm && !options.useHighlight) {
120
- query.select(tx.raw('ts_rank_cd(body, query) AS "rank"')).orderBy("rank", "desc");
121
- } else {
122
- query.select(tx.raw("1 as rank"));
123
- }
124
- return await query.offset(offset).limit(limit);
125
- }
126
- }
127
-
128
- class PgSearchEngineIndexer extends pluginSearchBackendNode.BatchSearchEngineIndexer {
129
- logger;
130
- store;
131
- type;
132
- tx;
133
- numRecords = 0;
134
- constructor(options) {
135
- super({ batchSize: options.batchSize });
136
- this.store = options.databaseStore;
137
- this.type = options.type;
138
- this.logger = options.logger;
139
- }
140
- async initialize() {
141
- this.tx = await this.store.getTransaction();
142
- try {
143
- await this.store.prepareInsert(this.tx);
144
- } catch (e) {
145
- this.tx.rollback(e);
146
- throw e;
147
- }
148
- }
149
- async index(documents) {
150
- this.numRecords += documents.length;
151
- const refs = [...new Set(documents.map((d) => d.authorization?.resourceRef))];
152
- this.logger?.debug(
153
- `Attempting to index the following entities: ${refs.toString()}`
154
- );
155
- try {
156
- await this.store.insertDocuments(this.tx, this.type, documents);
157
- } catch (e) {
158
- this.tx.rollback(e);
159
- throw e;
160
- }
161
- }
162
- async finalize() {
163
- if (this.numRecords === 0) {
164
- this.logger?.warn(
165
- `Index for ${this.type} was not replaced: indexer received 0 documents`
166
- );
167
- this.tx.rollback();
168
- return;
169
- }
170
- try {
171
- await this.store.completeInsert(this.tx, this.type);
172
- this.tx.commit();
173
- } catch (e) {
174
- this.tx.rollback(e);
175
- throw e;
176
- }
177
- }
178
- /**
179
- * Custom handler covering the case where an error occurred somewhere else in
180
- * the indexing pipeline (e.g. a collator or decorator). In such cases, the
181
- * finalize method is not called, which leaves a dangling transaction and
182
- * therefore an open connection to PG. This handler ensures we close the
183
- * transaction and associated connection.
184
- *
185
- * todo(@backstage/search-maintainers): Consider introducing a more
186
- * formal mechanism for handling such errors in BatchSearchEngineIndexer and
187
- * replacing this method with it. See: #17291
188
- *
189
- * @internal
190
- */
191
- async _destroy(error, done) {
192
- if (!error) {
193
- done();
194
- return;
195
- }
196
- if (!this.tx.isCompleted()) {
197
- await this.tx.rollback(error);
198
- }
199
- done(error);
200
- }
201
- }
202
-
203
- class PgSearchEngine {
204
- /**
205
- * @deprecated This will be marked as private in a future release, please us fromConfig instead
206
- */
207
- constructor(databaseStore, config, logger) {
208
- this.databaseStore = databaseStore;
209
- const uuidTag = uuid.v4();
210
- const highlightConfig = config.getOptionalConfig(
211
- "search.pg.highlightOptions"
212
- );
213
- const highlightOptions = {
214
- preTag: `<${uuidTag}>`,
215
- postTag: `</${uuidTag}>`,
216
- useHighlight: highlightConfig?.getOptionalBoolean("useHighlight") ?? true,
217
- maxWords: highlightConfig?.getOptionalNumber("maxWords") ?? 35,
218
- minWords: highlightConfig?.getOptionalNumber("minWords") ?? 15,
219
- shortWord: highlightConfig?.getOptionalNumber("shortWord") ?? 3,
220
- highlightAll: highlightConfig?.getOptionalBoolean("highlightAll") ?? false,
221
- maxFragments: highlightConfig?.getOptionalNumber("maxFragments") ?? 0,
222
- fragmentDelimiter: highlightConfig?.getOptionalString("fragmentDelimiter") ?? " ... "
223
- };
224
- this.highlightOptions = highlightOptions;
225
- this.indexerBatchSize = config.getOptionalNumber("search.pg.indexerBatchSize") ?? 1e3;
226
- this.logger = logger;
227
- }
228
- logger;
229
- highlightOptions;
230
- indexerBatchSize;
231
- /**
232
- * @deprecated This will be removed in a future release, please use fromConfig instead
233
- */
234
- static async from(options) {
235
- return new PgSearchEngine(
236
- await DatabaseDocumentStore.create(options.database),
237
- options.config,
238
- options.logger
239
- );
240
- }
241
- static async fromConfig(config, options) {
242
- return new PgSearchEngine(
243
- await DatabaseDocumentStore.create(options.database),
244
- config,
245
- options.logger
246
- );
247
- }
248
- static async supported(database) {
249
- return await DatabaseDocumentStore.supported(await database.getClient());
250
- }
251
- translator(query, options) {
252
- const pageSize = query.pageLimit || 25;
253
- const { page } = decodePageCursor(query.pageCursor);
254
- const offset = page * pageSize;
255
- const limit = pageSize + 1;
256
- return {
257
- pgQuery: {
258
- pgTerm: query.term.split(/\s/).map((p) => p.replace(/[\0()|&:*!]/g, "").trim()).filter((p) => p !== "").map((p) => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`).join("&"),
259
- fields: query.filters,
260
- types: query.types,
261
- offset,
262
- limit,
263
- options: options.highlightOptions
264
- },
265
- pageSize
266
- };
267
- }
268
- setTranslator(translator) {
269
- this.translator = translator;
270
- }
271
- async getIndexer(type) {
272
- return new PgSearchEngineIndexer({
273
- batchSize: this.indexerBatchSize,
274
- type,
275
- databaseStore: this.databaseStore,
276
- logger: this.logger?.child({ documentType: type })
277
- });
278
- }
279
- async query(query) {
280
- const { pgQuery, pageSize } = this.translator(query, {
281
- highlightOptions: this.highlightOptions
282
- });
283
- const rows = await this.databaseStore.transaction(
284
- async (tx) => this.databaseStore.query(tx, pgQuery)
285
- );
286
- const { page } = decodePageCursor(query.pageCursor);
287
- const hasNextPage = rows.length > pageSize;
288
- const hasPreviousPage = page > 0;
289
- const pageRows = rows.slice(0, pageSize);
290
- const nextPageCursor = hasNextPage ? encodePageCursor({ page: page + 1 }) : void 0;
291
- const previousPageCursor = hasPreviousPage ? encodePageCursor({ page: page - 1 }) : void 0;
292
- const results = pageRows.map(
293
- ({ type, document, highlight }, index) => ({
294
- type,
295
- document,
296
- rank: page * pageSize + index + 1,
297
- highlight: {
298
- preTag: pgQuery.options.preTag,
299
- postTag: pgQuery.options.postTag,
300
- fields: highlight ? {
301
- text: highlight.text,
302
- title: highlight.title,
303
- location: highlight.location,
304
- path: ""
305
- } : {}
306
- }
307
- })
308
- );
309
- return { results, nextPageCursor, previousPageCursor };
310
- }
311
- }
312
- function decodePageCursor(pageCursor) {
313
- if (!pageCursor) {
314
- return { page: 0 };
315
- }
316
- return {
317
- page: Number(Buffer.from(pageCursor, "base64").toString("utf-8"))
318
- };
319
- }
320
- function encodePageCursor({ page }) {
321
- return Buffer.from(`${page}`, "utf-8").toString("base64");
322
- }
323
-
324
- exports.DatabaseDocumentStore = DatabaseDocumentStore;
325
- exports.PgSearchEngine = PgSearchEngine;
326
- //# sourceMappingURL=PgSearchEngine-CyA2vr4y.cjs.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"PgSearchEngine-CyA2vr4y.cjs.js","sources":["../../src/database/util.ts","../../src/database/DatabaseDocumentStore.ts","../../src/PgSearchEngine/PgSearchEngineIndexer.ts","../../src/PgSearchEngine/PgSearchEngine.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Knex } from 'knex';\n\nexport async function queryPostgresMajorVersion(knex: Knex): Promise<number> {\n if (knex.client.config.client !== 'pg') {\n throw new Error(\"Can't resolve version, not a postgres database\");\n }\n\n const { rows } = await knex.raw('SHOW server_version_num');\n const [result] = rows;\n const version = +result.server_version_num;\n const majorVersion = Math.floor(version / 10000);\n return majorVersion;\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n DatabaseService,\n resolvePackagePath,\n} from '@backstage/backend-plugin-api';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Knex } from 'knex';\nimport {\n DatabaseStore,\n DocumentResultRow,\n PgSearchQuery,\n RawDocumentRow,\n} from './types';\nimport { queryPostgresMajorVersion } from './util';\n\nconst migrationsDir = resolvePackagePath(\n '@backstage/plugin-search-backend-module-pg',\n 'migrations',\n);\n\n/** @public */\nexport class DatabaseDocumentStore implements DatabaseStore {\n static async create(\n database: DatabaseService,\n ): Promise<DatabaseDocumentStore> {\n const knex = await database.getClient();\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n if (majorVersion < 12) {\n // We are using some features (like generated columns) that aren't\n // available in older postgres versions.\n throw new Error(\n `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`,\n );\n }\n } catch {\n // Actually both mysql and sqlite have a full text search, too. We could\n // implement them separately or add them here.\n throw new Error(\n 'The PgSearchEngine is only supported when using a postgres database (>=12.x)',\n );\n }\n\n if (!database.migrations?.skip) {\n await knex.migrate.latest({\n directory: migrationsDir,\n });\n }\n\n return new DatabaseDocumentStore(knex);\n }\n\n static async supported(knex: Knex): Promise<boolean> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n return majorVersion >= 12;\n } catch {\n return false;\n }\n }\n\n constructor(private readonly db: Knex) {}\n\n async transaction<T>(fn: (tx: Knex.Transaction) => Promise<T>): Promise<T> {\n return await this.db.transaction(fn);\n }\n\n async getTransaction(): Promise<Knex.Transaction> {\n return this.db.transaction();\n }\n\n async prepareInsert(tx: Knex.Transaction): Promise<void> {\n // We create a temporary table to collect the hashes of the documents that\n // we expect to be in the documents table at the end. The table is deleted\n // at the end of the transaction.\n // The hash makes sure that we generate a new row for every change.\n await tx.raw(\n 'CREATE TEMP TABLE documents_to_insert (' +\n 'type text NOT NULL, ' +\n 'document jsonb NOT NULL, ' +\n // Generating the hash requires a trick, as the text to bytea\n // conversation runs into errors in case the text contains a backslash.\n // Therefore we have to escape them.\n \"hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\\\', '\\\\\\\\')::bytea)) STORED\" +\n ') ON COMMIT DROP',\n );\n }\n\n async completeInsert(tx: Knex.Transaction, type: string): Promise<void> {\n // Copy all new rows into the documents table\n await tx\n .insert(\n tx<RawDocumentRow>('documents_to_insert').select(\n 'type',\n 'document',\n 'hash',\n ),\n )\n .into(tx.raw('documents (type, document, hash)'))\n .onConflict('hash')\n .ignore();\n\n // Delete all documents that we don't expect (deleted and changed)\n const rowsToDelete = tx<RawDocumentRow>('documents')\n .select('documents.hash')\n .leftJoin<RawDocumentRow>('documents_to_insert', {\n 'documents.hash': 'documents_to_insert.hash',\n })\n .whereNull('documents_to_insert.hash');\n\n await tx<RawDocumentRow>('documents')\n .where({ type })\n .whereIn('hash', rowsToDelete)\n .delete();\n }\n\n async insertDocuments(\n tx: Knex.Transaction,\n type: string,\n documents: IndexableDocument[],\n ): Promise<void> {\n // Insert all documents into the temporary table to process them later\n await tx<DocumentResultRow>('documents_to_insert').insert(\n documents.map(document => ({\n type,\n document,\n })),\n );\n }\n\n async query(\n tx: Knex.Transaction,\n searchQuery: PgSearchQuery,\n ): Promise<DocumentResultRow[]> {\n const { types, pgTerm, fields, offset, limit, options } = searchQuery;\n // TODO(awanlin): We should make the language a parameter so that we can support more then just english\n // Builds a query like:\n // SELECT ts_rank_cd(body, query) AS rank, type, document,\n // ts_headline('english', document, query) AS highlight\n // FROM documents, to_tsquery('english', 'consent') query\n // WHERE query @@ body AND (document @> '{\"kind\": \"API\"}')\n // ORDER BY rank DESC\n // LIMIT 10;\n const query = tx<DocumentResultRow>('documents');\n\n if (pgTerm) {\n query\n .from(tx.raw(\"documents, to_tsquery('english', ?) query\", pgTerm))\n .whereRaw('query @@ body');\n } else {\n query.from('documents');\n }\n\n if (types) {\n query.whereIn('type', types);\n }\n\n if (fields) {\n Object.keys(fields).forEach(key => {\n const value = fields[key];\n const valueArray = Array.isArray(value) ? value : [value];\n const fieldValueCompare = valueArray\n .map(v => ({ [key]: v }))\n .map(v => JSON.stringify(v));\n const arrayValueCompare = valueArray\n .map(v => ({ [key]: [v] }))\n .map(v => JSON.stringify(v));\n const valueCompare = [...fieldValueCompare, ...arrayValueCompare];\n query.whereRaw(\n `(${valueCompare.map(() => 'document @> ?').join(' OR ')})`,\n valueCompare,\n );\n });\n }\n\n query.select('type', 'document');\n\n if (pgTerm && options.useHighlight) {\n const headlineOptions = `MaxWords=${options.maxWords}, MinWords=${options.minWords}, ShortWord=${options.shortWord}, HighlightAll=${options.highlightAll}, MaxFragments=${options.maxFragments}, FragmentDelimiter=${options.fragmentDelimiter}, StartSel=${options.preTag}, StopSel=${options.postTag}`;\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .select(\n tx.raw(\n `ts_headline(\\'english\\', document, query, '${headlineOptions}') as \"highlight\"`,\n ),\n )\n .orderBy('rank', 'desc');\n } else if (pgTerm && !options.useHighlight) {\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .orderBy('rank', 'desc');\n } else {\n query.select(tx.raw('1 as rank'));\n }\n\n return await query.offset(offset).limit(limit);\n }\n}\n","/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { BatchSearchEngineIndexer } from '@backstage/plugin-search-backend-node';\nimport { IndexableDocument } from '@backstage/plugin-search-common';\nimport { Knex } from 'knex';\nimport { DatabaseStore } from '../database';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n/** @public */\nexport type PgSearchEngineIndexerOptions = {\n batchSize: number;\n type: string;\n databaseStore: DatabaseStore;\n logger?: LoggerService;\n};\n\n/** @public */\nexport class PgSearchEngineIndexer extends BatchSearchEngineIndexer {\n private logger?: LoggerService;\n private store: DatabaseStore;\n private type: string;\n private tx: Knex.Transaction | undefined;\n private numRecords = 0;\n\n constructor(options: PgSearchEngineIndexerOptions) {\n super({ batchSize: options.batchSize });\n this.store = options.databaseStore;\n this.type = options.type;\n this.logger = options.logger;\n }\n\n async initialize(): Promise<void> {\n this.tx = await this.store.getTransaction();\n try {\n await this.store.prepareInsert(this.tx);\n } catch (e) {\n // In case of error, rollback the transaction and re-throw the error so\n // that the stream can be closed and destroyed properly.\n this.tx.rollback(e);\n throw e;\n }\n }\n\n async index(documents: IndexableDocument[]): Promise<void> {\n this.numRecords += documents.length;\n\n const refs = [...new Set(documents.map(d => d.authorization?.resourceRef))];\n this.logger?.debug(\n `Attempting to index the following entities: ${refs.toString()}`,\n );\n\n try {\n await this.store.insertDocuments(this.tx!, this.type, documents);\n } catch (e) {\n // In case of error, rollback the transaction and re-throw the error so\n // that the stream can be closed and destroyed properly.\n this.tx!.rollback(e);\n throw e;\n }\n }\n\n async finalize(): Promise<void> {\n // If no documents were indexed, rollback the transaction, log a warning,\n // and do not continue. This ensures that collators that return empty sets\n // of documents do not cause the index to be deleted.\n if (this.numRecords === 0) {\n this.logger?.warn(\n `Index for ${this.type} was not replaced: indexer received 0 documents`,\n );\n this.tx!.rollback!();\n return;\n }\n\n // Attempt to complete and commit the transaction.\n try {\n await this.store.completeInsert(this.tx!, this.type);\n this.tx!.commit();\n } catch (e) {\n // Otherwise, rollback the transaction and re-throw the error so that the\n // stream can be closed and destroyed properly.\n this.tx!.rollback!(e);\n throw e;\n }\n }\n\n /**\n * Custom handler covering the case where an error occurred somewhere else in\n * the indexing pipeline (e.g. a collator or decorator). In such cases, the\n * finalize method is not called, which leaves a dangling transaction and\n * therefore an open connection to PG. This handler ensures we close the\n * transaction and associated connection.\n *\n * todo(@backstage/search-maintainers): Consider introducing a more\n * formal mechanism for handling such errors in BatchSearchEngineIndexer and\n * replacing this method with it. See: #17291\n *\n * @internal\n */\n async _destroy(error: Error | null, done: (error?: Error | null) => void) {\n // Ignore situations where there was no error.\n if (!error) {\n done();\n return;\n }\n\n if (!this.tx!.isCompleted()) {\n await this.tx!.rollback(error);\n }\n\n done(error);\n }\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SearchEngine } from '@backstage/plugin-search-backend-node';\nimport {\n SearchQuery,\n IndexableResultSet,\n IndexableResult,\n} from '@backstage/plugin-search-common';\nimport { PgSearchEngineIndexer } from './PgSearchEngineIndexer';\nimport {\n DatabaseDocumentStore,\n DatabaseStore,\n PgSearchQuery,\n} from '../database';\nimport { v4 as uuid } from 'uuid';\nimport { Config } from '@backstage/config';\nimport { DatabaseService, LoggerService } from '@backstage/backend-plugin-api';\n\n/**\n * Search query that the Postgres search engine understands.\n * @public\n */\nexport type ConcretePgSearchQuery = {\n pgQuery: PgSearchQuery;\n pageSize: number;\n};\n\n/**\n * Options available for the Postgres specific query translator.\n * @public\n */\nexport type PgSearchQueryTranslatorOptions = {\n highlightOptions: PgSearchHighlightOptions;\n};\n\n/**\n * Postgres specific query translator.\n * @public\n */\nexport type PgSearchQueryTranslator = (\n query: SearchQuery,\n options: PgSearchQueryTranslatorOptions,\n) => ConcretePgSearchQuery;\n\n/**\n * Options to instantiate PgSearchEngine\n * @public\n */\nexport type PgSearchOptions = {\n database: DatabaseService;\n logger?: LoggerService;\n};\n\n/**\n * Options for highlighting search terms\n * @public\n */\nexport type PgSearchHighlightOptions = {\n useHighlight?: boolean;\n maxWords?: number;\n minWords?: number;\n shortWord?: number;\n highlightAll?: boolean;\n maxFragments?: number;\n fragmentDelimiter?: string;\n preTag: string;\n postTag: string;\n};\n\n/** @public */\nexport class PgSearchEngine implements SearchEngine {\n private readonly logger?: LoggerService;\n private readonly highlightOptions: PgSearchHighlightOptions;\n private readonly indexerBatchSize: number;\n\n /**\n * @deprecated This will be marked as private in a future release, please us fromConfig instead\n */\n constructor(\n private readonly databaseStore: DatabaseStore,\n config: Config,\n logger?: LoggerService,\n ) {\n const uuidTag = uuid();\n const highlightConfig = config.getOptionalConfig(\n 'search.pg.highlightOptions',\n );\n\n const highlightOptions: PgSearchHighlightOptions = {\n preTag: `<${uuidTag}>`,\n postTag: `</${uuidTag}>`,\n useHighlight: highlightConfig?.getOptionalBoolean('useHighlight') ?? true,\n maxWords: highlightConfig?.getOptionalNumber('maxWords') ?? 35,\n minWords: highlightConfig?.getOptionalNumber('minWords') ?? 15,\n shortWord: highlightConfig?.getOptionalNumber('shortWord') ?? 3,\n highlightAll:\n highlightConfig?.getOptionalBoolean('highlightAll') ?? false,\n maxFragments: highlightConfig?.getOptionalNumber('maxFragments') ?? 0,\n fragmentDelimiter:\n highlightConfig?.getOptionalString('fragmentDelimiter') ?? ' ... ',\n };\n this.highlightOptions = highlightOptions;\n this.indexerBatchSize =\n config.getOptionalNumber('search.pg.indexerBatchSize') ?? 1000;\n this.logger = logger;\n }\n\n /**\n * @deprecated This will be removed in a future release, please use fromConfig instead\n */\n static async from(options: {\n database: DatabaseService;\n config: Config;\n logger?: LoggerService;\n }): Promise<PgSearchEngine> {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(options.database),\n options.config,\n options.logger,\n );\n }\n\n static async fromConfig(config: Config, options: PgSearchOptions) {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(options.database),\n config,\n options.logger,\n );\n }\n\n static async supported(database: DatabaseService): Promise<boolean> {\n return await DatabaseDocumentStore.supported(await database.getClient());\n }\n\n translator(\n query: SearchQuery,\n options: PgSearchQueryTranslatorOptions,\n ): ConcretePgSearchQuery {\n const pageSize = query.pageLimit || 25;\n const { page } = decodePageCursor(query.pageCursor);\n const offset = page * pageSize;\n // We request more result to know whether there is another page\n const limit = pageSize + 1;\n\n return {\n pgQuery: {\n pgTerm: query.term\n .split(/\\s/)\n .map(p => p.replace(/[\\0()|&:*!]/g, '').trim())\n .filter(p => p !== '')\n .map(p => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`)\n .join('&'),\n fields: query.filters as Record<string, string | string[]>,\n types: query.types,\n offset,\n limit,\n options: options.highlightOptions,\n },\n pageSize,\n };\n }\n\n setTranslator(translator: PgSearchQueryTranslator) {\n this.translator = translator;\n }\n\n async getIndexer(type: string) {\n return new PgSearchEngineIndexer({\n batchSize: this.indexerBatchSize,\n type,\n databaseStore: this.databaseStore,\n logger: this.logger?.child({ documentType: type }),\n });\n }\n\n async query(query: SearchQuery): Promise<IndexableResultSet> {\n const { pgQuery, pageSize } = this.translator(query, {\n highlightOptions: this.highlightOptions,\n });\n\n const rows = await this.databaseStore.transaction(async tx =>\n this.databaseStore.query(tx, pgQuery),\n );\n\n // We requested one result more than the page size to know whether there is\n // another page.\n const { page } = decodePageCursor(query.pageCursor);\n const hasNextPage = rows.length > pageSize;\n const hasPreviousPage = page > 0;\n const pageRows = rows.slice(0, pageSize);\n const nextPageCursor = hasNextPage\n ? encodePageCursor({ page: page + 1 })\n : undefined;\n const previousPageCursor = hasPreviousPage\n ? encodePageCursor({ page: page - 1 })\n : undefined;\n\n const results = pageRows.map(\n ({ type, document, highlight }, index): IndexableResult => ({\n type,\n document,\n rank: page * pageSize + index + 1,\n highlight: {\n preTag: pgQuery.options.preTag,\n postTag: pgQuery.options.postTag,\n fields: highlight\n ? {\n text: highlight.text,\n title: highlight.title,\n location: highlight.location,\n path: '',\n }\n : {},\n },\n }),\n );\n\n return { results, nextPageCursor, previousPageCursor };\n }\n}\n\nexport function decodePageCursor(pageCursor?: string): { page: number } {\n if (!pageCursor) {\n return { page: 0 };\n }\n\n return {\n page: Number(Buffer.from(pageCursor, 'base64').toString('utf-8')),\n };\n}\n\nexport function encodePageCursor({ page }: { page: number }): string {\n return Buffer.from(`${page}`, 'utf-8').toString('base64');\n}\n"],"names":["resolvePackagePath","BatchSearchEngineIndexer","uuid"],"mappings":";;;;;;AAiBA,eAAsB,0BAA0B,IAA6B,EAAA;AAC3E,EAAA,IAAI,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA,KAAW,IAAM,EAAA;AACtC,IAAM,MAAA,IAAI,MAAM,gDAAgD,CAAA,CAAA;AAAA,GAClE;AAEA,EAAA,MAAM,EAAE,IAAK,EAAA,GAAI,MAAM,IAAA,CAAK,IAAI,yBAAyB,CAAA,CAAA;AACzD,EAAM,MAAA,CAAC,MAAM,CAAI,GAAA,IAAA,CAAA;AACjB,EAAM,MAAA,OAAA,GAAU,CAAC,MAAO,CAAA,kBAAA,CAAA;AACxB,EAAA,MAAM,YAAe,GAAA,IAAA,CAAK,KAAM,CAAA,OAAA,GAAU,GAAK,CAAA,CAAA;AAC/C,EAAO,OAAA,YAAA,CAAA;AACT;;ACEA,MAAM,aAAgB,GAAAA,mCAAA;AAAA,EACpB,4CAAA;AAAA,EACA,YAAA;AACF,CAAA,CAAA;AAGO,MAAM,qBAA+C,CAAA;AAAA,EA0C1D,YAA6B,EAAU,EAAA;AAAV,IAAA,IAAA,CAAA,EAAA,GAAA,EAAA,CAAA;AAAA,GAAW;AAAA,EAzCxC,aAAa,OACX,QACgC,EAAA;AAChC,IAAM,MAAA,IAAA,GAAO,MAAM,QAAA,CAAS,SAAU,EAAA,CAAA;AACtC,IAAI,IAAA;AACF,MAAM,MAAA,YAAA,GAAe,MAAM,yBAAA,CAA0B,IAAI,CAAA,CAAA;AAEzD,MAAA,IAAI,eAAe,EAAI,EAAA;AAGrB,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,+EAA+E,YAAY,CAAA,CAAA,CAAA;AAAA,SAC7F,CAAA;AAAA,OACF;AAAA,KACM,CAAA,MAAA;AAGN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,8EAAA;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAI,IAAA,CAAC,QAAS,CAAA,UAAA,EAAY,IAAM,EAAA;AAC9B,MAAM,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA;AAAA,QACxB,SAAW,EAAA,aAAA;AAAA,OACZ,CAAA,CAAA;AAAA,KACH;AAEA,IAAO,OAAA,IAAI,sBAAsB,IAAI,CAAA,CAAA;AAAA,GACvC;AAAA,EAEA,aAAa,UAAU,IAA8B,EAAA;AACnD,IAAI,IAAA;AACF,MAAM,MAAA,YAAA,GAAe,MAAM,yBAAA,CAA0B,IAAI,CAAA,CAAA;AAEzD,MAAA,OAAO,YAAgB,IAAA,EAAA,CAAA;AAAA,KACjB,CAAA,MAAA;AACN,MAAO,OAAA,KAAA,CAAA;AAAA,KACT;AAAA,GACF;AAAA,EAIA,MAAM,YAAe,EAAsD,EAAA;AACzE,IAAA,OAAO,MAAM,IAAA,CAAK,EAAG,CAAA,WAAA,CAAY,EAAE,CAAA,CAAA;AAAA,GACrC;AAAA,EAEA,MAAM,cAA4C,GAAA;AAChD,IAAO,OAAA,IAAA,CAAK,GAAG,WAAY,EAAA,CAAA;AAAA,GAC7B;AAAA,EAEA,MAAM,cAAc,EAAqC,EAAA;AAKvD,IAAA,MAAM,EAAG,CAAA,GAAA;AAAA,MACP,mNAAA;AAAA,KAQF,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,cAAe,CAAA,EAAA,EAAsB,IAA6B,EAAA;AAEtE,IAAA,MAAM,EACH,CAAA,MAAA;AAAA,MACC,EAAA,CAAmB,qBAAqB,CAAE,CAAA,MAAA;AAAA,QACxC,MAAA;AAAA,QACA,UAAA;AAAA,QACA,MAAA;AAAA,OACF;AAAA,KACF,CACC,IAAK,CAAA,EAAA,CAAG,GAAI,CAAA,kCAAkC,CAAC,CAC/C,CAAA,UAAA,CAAW,MAAM,CAAA,CACjB,MAAO,EAAA,CAAA;AAGV,IAAM,MAAA,YAAA,GAAe,GAAmB,WAAW,CAAA,CAChD,OAAO,gBAAgB,CAAA,CACvB,SAAyB,qBAAuB,EAAA;AAAA,MAC/C,gBAAkB,EAAA,0BAAA;AAAA,KACnB,CACA,CAAA,SAAA,CAAU,0BAA0B,CAAA,CAAA;AAEvC,IAAA,MAAM,EAAmB,CAAA,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,IAAA,EAAM,CAAA,CACd,OAAQ,CAAA,MAAA,EAAQ,YAAY,CAAA,CAC5B,MAAO,EAAA,CAAA;AAAA,GACZ;AAAA,EAEA,MAAM,eAAA,CACJ,EACA,EAAA,IAAA,EACA,SACe,EAAA;AAEf,IAAM,MAAA,EAAA,CAAsB,qBAAqB,CAAE,CAAA,MAAA;AAAA,MACjD,SAAA,CAAU,IAAI,CAAa,QAAA,MAAA;AAAA,QACzB,IAAA;AAAA,QACA,QAAA;AAAA,OACA,CAAA,CAAA;AAAA,KACJ,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,KACJ,CAAA,EAAA,EACA,WAC8B,EAAA;AAC9B,IAAA,MAAM,EAAE,KAAO,EAAA,MAAA,EAAQ,QAAQ,MAAQ,EAAA,KAAA,EAAO,SAAY,GAAA,WAAA,CAAA;AAS1D,IAAM,MAAA,KAAA,GAAQ,GAAsB,WAAW,CAAA,CAAA;AAE/C,IAAA,IAAI,MAAQ,EAAA;AACV,MACG,KAAA,CAAA,IAAA,CAAK,GAAG,GAAI,CAAA,2CAAA,EAA6C,MAAM,CAAC,CAAA,CAChE,SAAS,eAAe,CAAA,CAAA;AAAA,KACtB,MAAA;AACL,MAAA,KAAA,CAAM,KAAK,WAAW,CAAA,CAAA;AAAA,KACxB;AAEA,IAAA,IAAI,KAAO,EAAA;AACT,MAAM,KAAA,CAAA,OAAA,CAAQ,QAAQ,KAAK,CAAA,CAAA;AAAA,KAC7B;AAEA,IAAA,IAAI,MAAQ,EAAA;AACV,MAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAE,CAAA,OAAA,CAAQ,CAAO,GAAA,KAAA;AACjC,QAAM,MAAA,KAAA,GAAQ,OAAO,GAAG,CAAA,CAAA;AACxB,QAAA,MAAM,aAAa,KAAM,CAAA,OAAA,CAAQ,KAAK,CAAI,GAAA,KAAA,GAAQ,CAAC,KAAK,CAAA,CAAA;AACxD,QAAA,MAAM,oBAAoB,UACvB,CAAA,GAAA,CAAI,CAAM,CAAA,MAAA,EAAE,CAAC,GAAG,GAAG,CAAE,EAAA,CAAE,EACvB,GAAI,CAAA,CAAA,CAAA,KAAK,IAAK,CAAA,SAAA,CAAU,CAAC,CAAC,CAAA,CAAA;AAC7B,QAAA,MAAM,oBAAoB,UACvB,CAAA,GAAA,CAAI,QAAM,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,GAAI,CACzB,CAAA,GAAA,CAAI,OAAK,IAAK,CAAA,SAAA,CAAU,CAAC,CAAC,CAAA,CAAA;AAC7B,QAAA,MAAM,YAAe,GAAA,CAAC,GAAG,iBAAA,EAAmB,GAAG,iBAAiB,CAAA,CAAA;AAChE,QAAM,KAAA,CAAA,QAAA;AAAA,UACJ,CAAA,CAAA,EAAI,aAAa,GAAI,CAAA,MAAM,eAAe,CAAE,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,CAAA,CAAA;AAAA,UACxD,YAAA;AAAA,SACF,CAAA;AAAA,OACD,CAAA,CAAA;AAAA,KACH;AAEA,IAAM,KAAA,CAAA,MAAA,CAAO,QAAQ,UAAU,CAAA,CAAA;AAE/B,IAAI,IAAA,MAAA,IAAU,QAAQ,YAAc,EAAA;AAClC,MAAM,MAAA,eAAA,GAAkB,CAAY,SAAA,EAAA,OAAA,CAAQ,QAAQ,CAAA,WAAA,EAAc,QAAQ,QAAQ,CAAA,YAAA,EAAe,OAAQ,CAAA,SAAS,CAAkB,eAAA,EAAA,OAAA,CAAQ,YAAY,CAAkB,eAAA,EAAA,OAAA,CAAQ,YAAY,CAAA,oBAAA,EAAuB,OAAQ,CAAA,iBAAiB,cAAc,OAAQ,CAAA,MAAM,CAAa,UAAA,EAAA,OAAA,CAAQ,OAAO,CAAA,CAAA,CAAA;AACtS,MAAA,KAAA,CACG,MAAO,CAAA,EAAA,CAAG,GAAI,CAAA,mCAAmC,CAAC,CAClD,CAAA,MAAA;AAAA,QACC,EAAG,CAAA,GAAA;AAAA,UACD,4CAA8C,eAAe,CAAA,iBAAA,CAAA;AAAA,SAC/D;AAAA,OACF,CACC,OAAQ,CAAA,MAAA,EAAQ,MAAM,CAAA,CAAA;AAAA,KAChB,MAAA,IAAA,MAAA,IAAU,CAAC,OAAA,CAAQ,YAAc,EAAA;AAC1C,MACG,KAAA,CAAA,MAAA,CAAO,GAAG,GAAI,CAAA,mCAAmC,CAAC,CAClD,CAAA,OAAA,CAAQ,QAAQ,MAAM,CAAA,CAAA;AAAA,KACpB,MAAA;AACL,MAAA,KAAA,CAAM,MAAO,CAAA,EAAA,CAAG,GAAI,CAAA,WAAW,CAAC,CAAA,CAAA;AAAA,KAClC;AAEA,IAAA,OAAO,MAAM,KAAM,CAAA,MAAA,CAAO,MAAM,CAAA,CAAE,MAAM,KAAK,CAAA,CAAA;AAAA,GAC/C;AACF;;ACtLO,MAAM,8BAA8BC,gDAAyB,CAAA;AAAA,EAC1D,MAAA,CAAA;AAAA,EACA,KAAA,CAAA;AAAA,EACA,IAAA,CAAA;AAAA,EACA,EAAA,CAAA;AAAA,EACA,UAAa,GAAA,CAAA,CAAA;AAAA,EAErB,YAAY,OAAuC,EAAA;AACjD,IAAA,KAAA,CAAM,EAAE,SAAA,EAAW,OAAQ,CAAA,SAAA,EAAW,CAAA,CAAA;AACtC,IAAA,IAAA,CAAK,QAAQ,OAAQ,CAAA,aAAA,CAAA;AACrB,IAAA,IAAA,CAAK,OAAO,OAAQ,CAAA,IAAA,CAAA;AACpB,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AAAA,GACxB;AAAA,EAEA,MAAM,UAA4B,GAAA;AAChC,IAAA,IAAA,CAAK,EAAK,GAAA,MAAM,IAAK,CAAA,KAAA,CAAM,cAAe,EAAA,CAAA;AAC1C,IAAI,IAAA;AACF,MAAA,MAAM,IAAK,CAAA,KAAA,CAAM,aAAc,CAAA,IAAA,CAAK,EAAE,CAAA,CAAA;AAAA,aAC/B,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAG,SAAS,CAAC,CAAA,CAAA;AAClB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA,EAEA,MAAM,MAAM,SAA+C,EAAA;AACzD,IAAA,IAAA,CAAK,cAAc,SAAU,CAAA,MAAA,CAAA;AAE7B,IAAA,MAAM,IAAO,GAAA,CAAC,GAAG,IAAI,GAAI,CAAA,SAAA,CAAU,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,aAAA,EAAe,WAAW,CAAC,CAAC,CAAA,CAAA;AAC1E,IAAA,IAAA,CAAK,MAAQ,EAAA,KAAA;AAAA,MACX,CAAA,4CAAA,EAA+C,IAAK,CAAA,QAAA,EAAU,CAAA,CAAA;AAAA,KAChE,CAAA;AAEA,IAAI,IAAA;AACF,MAAA,MAAM,KAAK,KAAM,CAAA,eAAA,CAAgB,KAAK,EAAK,EAAA,IAAA,CAAK,MAAM,SAAS,CAAA,CAAA;AAAA,aACxD,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAI,SAAS,CAAC,CAAA,CAAA;AACnB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA,EAEA,MAAM,QAA0B,GAAA;AAI9B,IAAI,IAAA,IAAA,CAAK,eAAe,CAAG,EAAA;AACzB,MAAA,IAAA,CAAK,MAAQ,EAAA,IAAA;AAAA,QACX,CAAA,UAAA,EAAa,KAAK,IAAI,CAAA,+CAAA,CAAA;AAAA,OACxB,CAAA;AACA,MAAA,IAAA,CAAK,GAAI,QAAU,EAAA,CAAA;AACnB,MAAA,OAAA;AAAA,KACF;AAGA,IAAI,IAAA;AACF,MAAA,MAAM,KAAK,KAAM,CAAA,cAAA,CAAe,IAAK,CAAA,EAAA,EAAK,KAAK,IAAI,CAAA,CAAA;AACnD,MAAA,IAAA,CAAK,GAAI,MAAO,EAAA,CAAA;AAAA,aACT,CAAG,EAAA;AAGV,MAAK,IAAA,CAAA,EAAA,CAAI,SAAU,CAAC,CAAA,CAAA;AACpB,MAAM,MAAA,CAAA,CAAA;AAAA,KACR;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,QAAS,CAAA,KAAA,EAAqB,IAAsC,EAAA;AAExE,IAAA,IAAI,CAAC,KAAO,EAAA;AACV,MAAK,IAAA,EAAA,CAAA;AACL,MAAA,OAAA;AAAA,KACF;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,EAAI,CAAA,WAAA,EAAe,EAAA;AAC3B,MAAM,MAAA,IAAA,CAAK,EAAI,CAAA,QAAA,CAAS,KAAK,CAAA,CAAA;AAAA,KAC/B;AAEA,IAAA,IAAA,CAAK,KAAK,CAAA,CAAA;AAAA,GACZ;AACF;;ACzCO,MAAM,cAAuC,CAAA;AAAA;AAAA;AAAA;AAAA,EAQlD,WAAA,CACmB,aACjB,EAAA,MAAA,EACA,MACA,EAAA;AAHiB,IAAA,IAAA,CAAA,aAAA,GAAA,aAAA,CAAA;AAIjB,IAAA,MAAM,UAAUC,OAAK,EAAA,CAAA;AACrB,IAAA,MAAM,kBAAkB,MAAO,CAAA,iBAAA;AAAA,MAC7B,4BAAA;AAAA,KACF,CAAA;AAEA,IAAA,MAAM,gBAA6C,GAAA;AAAA,MACjD,MAAA,EAAQ,IAAI,OAAO,CAAA,CAAA,CAAA;AAAA,MACnB,OAAA,EAAS,KAAK,OAAO,CAAA,CAAA,CAAA;AAAA,MACrB,YAAc,EAAA,eAAA,EAAiB,kBAAmB,CAAA,cAAc,CAAK,IAAA,IAAA;AAAA,MACrE,QAAU,EAAA,eAAA,EAAiB,iBAAkB,CAAA,UAAU,CAAK,IAAA,EAAA;AAAA,MAC5D,QAAU,EAAA,eAAA,EAAiB,iBAAkB,CAAA,UAAU,CAAK,IAAA,EAAA;AAAA,MAC5D,SAAW,EAAA,eAAA,EAAiB,iBAAkB,CAAA,WAAW,CAAK,IAAA,CAAA;AAAA,MAC9D,YACE,EAAA,eAAA,EAAiB,kBAAmB,CAAA,cAAc,CAAK,IAAA,KAAA;AAAA,MACzD,YAAc,EAAA,eAAA,EAAiB,iBAAkB,CAAA,cAAc,CAAK,IAAA,CAAA;AAAA,MACpE,iBACE,EAAA,eAAA,EAAiB,iBAAkB,CAAA,mBAAmB,CAAK,IAAA,OAAA;AAAA,KAC/D,CAAA;AACA,IAAA,IAAA,CAAK,gBAAmB,GAAA,gBAAA,CAAA;AACxB,IAAA,IAAA,CAAK,gBACH,GAAA,MAAA,CAAO,iBAAkB,CAAA,4BAA4B,CAAK,IAAA,GAAA,CAAA;AAC5D,IAAA,IAAA,CAAK,MAAS,GAAA,MAAA,CAAA;AAAA,GAChB;AAAA,EAlCiB,MAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA;AAAA;AAAA;AAAA,EAqCjB,aAAa,KAAK,OAIU,EAAA;AAC1B,IAAA,OAAO,IAAI,cAAA;AAAA,MACT,MAAM,qBAAA,CAAsB,MAAO,CAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACnD,OAAQ,CAAA,MAAA;AAAA,MACR,OAAQ,CAAA,MAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA,EAEA,aAAa,UAAW,CAAA,MAAA,EAAgB,OAA0B,EAAA;AAChE,IAAA,OAAO,IAAI,cAAA;AAAA,MACT,MAAM,qBAAA,CAAsB,MAAO,CAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACnD,MAAA;AAAA,MACA,OAAQ,CAAA,MAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA,EAEA,aAAa,UAAU,QAA6C,EAAA;AAClE,IAAA,OAAO,MAAM,qBAAsB,CAAA,SAAA,CAAU,MAAM,QAAA,CAAS,WAAW,CAAA,CAAA;AAAA,GACzE;AAAA,EAEA,UAAA,CACE,OACA,OACuB,EAAA;AACvB,IAAM,MAAA,QAAA,GAAW,MAAM,SAAa,IAAA,EAAA,CAAA;AACpC,IAAA,MAAM,EAAE,IAAA,EAAS,GAAA,gBAAA,CAAiB,MAAM,UAAU,CAAA,CAAA;AAClD,IAAA,MAAM,SAAS,IAAO,GAAA,QAAA,CAAA;AAEtB,IAAA,MAAM,QAAQ,QAAW,GAAA,CAAA,CAAA;AAEzB,IAAO,OAAA;AAAA,MACL,OAAS,EAAA;AAAA,QACP,QAAQ,KAAM,CAAA,IAAA,CACX,KAAM,CAAA,IAAI,EACV,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,OAAA,CAAQ,gBAAgB,EAAE,CAAA,CAAE,IAAK,EAAC,EAC7C,MAAO,CAAA,CAAA,CAAA,KAAK,CAAM,KAAA,EAAE,EACpB,GAAI,CAAA,CAAA,CAAA,KAAK,CAAI,CAAA,EAAA,IAAA,CAAK,UAAU,CAAC,CAAC,CAAM,GAAA,EAAA,IAAA,CAAK,UAAU,CAAC,CAAC,CAAK,GAAA,CAAA,CAAA,CAC1D,KAAK,GAAG,CAAA;AAAA,QACX,QAAQ,KAAM,CAAA,OAAA;AAAA,QACd,OAAO,KAAM,CAAA,KAAA;AAAA,QACb,MAAA;AAAA,QACA,KAAA;AAAA,QACA,SAAS,OAAQ,CAAA,gBAAA;AAAA,OACnB;AAAA,MACA,QAAA;AAAA,KACF,CAAA;AAAA,GACF;AAAA,EAEA,cAAc,UAAqC,EAAA;AACjD,IAAA,IAAA,CAAK,UAAa,GAAA,UAAA,CAAA;AAAA,GACpB;AAAA,EAEA,MAAM,WAAW,IAAc,EAAA;AAC7B,IAAA,OAAO,IAAI,qBAAsB,CAAA;AAAA,MAC/B,WAAW,IAAK,CAAA,gBAAA;AAAA,MAChB,IAAA;AAAA,MACA,eAAe,IAAK,CAAA,aAAA;AAAA,MACpB,QAAQ,IAAK,CAAA,MAAA,EAAQ,MAAM,EAAE,YAAA,EAAc,MAAM,CAAA;AAAA,KAClD,CAAA,CAAA;AAAA,GACH;AAAA,EAEA,MAAM,MAAM,KAAiD,EAAA;AAC3D,IAAA,MAAM,EAAE,OAAS,EAAA,QAAA,EAAa,GAAA,IAAA,CAAK,WAAW,KAAO,EAAA;AAAA,MACnD,kBAAkB,IAAK,CAAA,gBAAA;AAAA,KACxB,CAAA,CAAA;AAED,IAAM,MAAA,IAAA,GAAO,MAAM,IAAA,CAAK,aAAc,CAAA,WAAA;AAAA,MAAY,OAAM,EACtD,KAAA,IAAA,CAAK,aAAc,CAAA,KAAA,CAAM,IAAI,OAAO,CAAA;AAAA,KACtC,CAAA;AAIA,IAAA,MAAM,EAAE,IAAA,EAAS,GAAA,gBAAA,CAAiB,MAAM,UAAU,CAAA,CAAA;AAClD,IAAM,MAAA,WAAA,GAAc,KAAK,MAAS,GAAA,QAAA,CAAA;AAClC,IAAA,MAAM,kBAAkB,IAAO,GAAA,CAAA,CAAA;AAC/B,IAAA,MAAM,QAAW,GAAA,IAAA,CAAK,KAAM,CAAA,CAAA,EAAG,QAAQ,CAAA,CAAA;AACvC,IAAM,MAAA,cAAA,GAAiB,cACnB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AACJ,IAAM,MAAA,kBAAA,GAAqB,kBACvB,gBAAiB,CAAA,EAAE,MAAM,IAAO,GAAA,CAAA,EAAG,CACnC,GAAA,KAAA,CAAA,CAAA;AAEJ,IAAA,MAAM,UAAU,QAAS,CAAA,GAAA;AAAA,MACvB,CAAC,EAAE,IAAA,EAAM,QAAU,EAAA,SAAA,IAAa,KAA4B,MAAA;AAAA,QAC1D,IAAA;AAAA,QACA,QAAA;AAAA,QACA,IAAA,EAAM,IAAO,GAAA,QAAA,GAAW,KAAQ,GAAA,CAAA;AAAA,QAChC,SAAW,EAAA;AAAA,UACT,MAAA,EAAQ,QAAQ,OAAQ,CAAA,MAAA;AAAA,UACxB,OAAA,EAAS,QAAQ,OAAQ,CAAA,OAAA;AAAA,UACzB,QAAQ,SACJ,GAAA;AAAA,YACE,MAAM,SAAU,CAAA,IAAA;AAAA,YAChB,OAAO,SAAU,CAAA,KAAA;AAAA,YACjB,UAAU,SAAU,CAAA,QAAA;AAAA,YACpB,IAAM,EAAA,EAAA;AAAA,cAER,EAAC;AAAA,SACP;AAAA,OACF,CAAA;AAAA,KACF,CAAA;AAEA,IAAO,OAAA,EAAE,OAAS,EAAA,cAAA,EAAgB,kBAAmB,EAAA,CAAA;AAAA,GACvD;AACF,CAAA;AAEO,SAAS,iBAAiB,UAAuC,EAAA;AACtE,EAAA,IAAI,CAAC,UAAY,EAAA;AACf,IAAO,OAAA,EAAE,MAAM,CAAE,EAAA,CAAA;AAAA,GACnB;AAEA,EAAO,OAAA;AAAA,IACL,IAAA,EAAM,OAAO,MAAO,CAAA,IAAA,CAAK,YAAY,QAAQ,CAAA,CAAE,QAAS,CAAA,OAAO,CAAC,CAAA;AAAA,GAClE,CAAA;AACF,CAAA;AAEgB,SAAA,gBAAA,CAAiB,EAAE,IAAA,EAAkC,EAAA;AACnE,EAAO,OAAA,MAAA,CAAO,KAAK,CAAG,EAAA,IAAI,IAAI,OAAO,CAAA,CAAE,SAAS,QAAQ,CAAA,CAAA;AAC1D;;;;;"}