@backstage/plugin-search-backend-module-pg 0.2.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.cjs.js +15 -17
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/package.json +5 -5
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
# @backstage/plugin-search-backend-module-pg
|
|
2
2
|
|
|
3
|
+
## 0.2.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- dcd1a0c3f4: Minor improvement to the API reports, by not unpacking arguments directly
|
|
8
|
+
- Updated dependencies
|
|
9
|
+
- @backstage/backend-common@0.9.13
|
|
10
|
+
|
|
3
11
|
## 0.2.1
|
|
4
12
|
|
|
5
13
|
### Patch Changes
|
package/dist/index.cjs.js
CHANGED
|
@@ -9,7 +9,7 @@ async function queryPostgresMajorVersion(knex) {
|
|
|
9
9
|
if (knex.client.config.client !== "pg") {
|
|
10
10
|
throw new Error("Can't resolve version, not a postgres database");
|
|
11
11
|
}
|
|
12
|
-
const {rows} = await knex.raw("SHOW server_version_num");
|
|
12
|
+
const { rows } = await knex.raw("SHOW server_version_num");
|
|
13
13
|
const [result] = rows;
|
|
14
14
|
const version = +result.server_version_num;
|
|
15
15
|
const majorVersion = Math.floor(version / 1e4);
|
|
@@ -51,7 +51,7 @@ class DatabaseDocumentStore {
|
|
|
51
51
|
}
|
|
52
52
|
async completeInsert(tx, type) {
|
|
53
53
|
await tx.insert(tx("documents_to_insert").select("type", "document", "hash")).into(tx.raw("documents (type, document, hash)")).onConflict("hash").ignore();
|
|
54
|
-
await tx("documents").where({type}).whereNotIn("hash", tx("documents_to_insert").select("hash")).delete();
|
|
54
|
+
await tx("documents").where({ type }).whereNotIn("hash", tx("documents_to_insert").select("hash")).delete();
|
|
55
55
|
}
|
|
56
56
|
async insertDocuments(tx, type, documents) {
|
|
57
57
|
await tx("documents_to_insert").insert(documents.map((document) => ({
|
|
@@ -59,7 +59,7 @@ class DatabaseDocumentStore {
|
|
|
59
59
|
document
|
|
60
60
|
})));
|
|
61
61
|
}
|
|
62
|
-
async query(tx, {types, pgTerm, fields, offset, limit}) {
|
|
62
|
+
async query(tx, { types, pgTerm, fields, offset, limit }) {
|
|
63
63
|
const query = tx("documents");
|
|
64
64
|
if (pgTerm) {
|
|
65
65
|
query.from(tx.raw("documents, to_tsquery('english', ?) query", pgTerm)).whereRaw("query @@ body");
|
|
@@ -73,7 +73,7 @@ class DatabaseDocumentStore {
|
|
|
73
73
|
Object.keys(fields).forEach((key) => {
|
|
74
74
|
const value = fields[key];
|
|
75
75
|
const valueArray = Array.isArray(value) ? value : [value];
|
|
76
|
-
const valueCompare = valueArray.map((v) => ({[key]: v})).map((v) => JSON.stringify(v));
|
|
76
|
+
const valueCompare = valueArray.map((v) => ({ [key]: v })).map((v) => JSON.stringify(v));
|
|
77
77
|
query.whereRaw(`(${valueCompare.map(() => "document @> ?").join(" OR ")})`, valueCompare);
|
|
78
78
|
});
|
|
79
79
|
}
|
|
@@ -91,17 +91,15 @@ class PgSearchEngine {
|
|
|
91
91
|
constructor(databaseStore) {
|
|
92
92
|
this.databaseStore = databaseStore;
|
|
93
93
|
}
|
|
94
|
-
static async from({
|
|
95
|
-
database
|
|
96
|
-
}) {
|
|
97
|
-
return new PgSearchEngine(await DatabaseDocumentStore.create(await database.getClient()));
|
|
94
|
+
static async from(options) {
|
|
95
|
+
return new PgSearchEngine(await DatabaseDocumentStore.create(await options.database.getClient()));
|
|
98
96
|
}
|
|
99
97
|
static async supported(database) {
|
|
100
98
|
return await DatabaseDocumentStore.supported(await database.getClient());
|
|
101
99
|
}
|
|
102
100
|
translator(query) {
|
|
103
101
|
const pageSize = 25;
|
|
104
|
-
const {page} = decodePageCursor(query.pageCursor);
|
|
102
|
+
const { page } = decodePageCursor(query.pageCursor);
|
|
105
103
|
const offset = page * pageSize;
|
|
106
104
|
const limit = pageSize + 1;
|
|
107
105
|
return {
|
|
@@ -129,30 +127,30 @@ class PgSearchEngine {
|
|
|
129
127
|
});
|
|
130
128
|
}
|
|
131
129
|
async query(query) {
|
|
132
|
-
const {pgQuery, pageSize} = this.translator(query);
|
|
130
|
+
const { pgQuery, pageSize } = this.translator(query);
|
|
133
131
|
const rows = await this.databaseStore.transaction(async (tx) => this.databaseStore.query(tx, pgQuery));
|
|
134
|
-
const {page} = decodePageCursor(query.pageCursor);
|
|
132
|
+
const { page } = decodePageCursor(query.pageCursor);
|
|
135
133
|
const hasNextPage = rows.length > pageSize;
|
|
136
134
|
const hasPreviousPage = page > 0;
|
|
137
135
|
const pageRows = rows.slice(0, pageSize);
|
|
138
|
-
const nextPageCursor = hasNextPage ? encodePageCursor({page: page + 1}) : void 0;
|
|
139
|
-
const previousPageCursor = hasPreviousPage ? encodePageCursor({page: page - 1}) : void 0;
|
|
140
|
-
const results = pageRows.map(({type, document}) => ({
|
|
136
|
+
const nextPageCursor = hasNextPage ? encodePageCursor({ page: page + 1 }) : void 0;
|
|
137
|
+
const previousPageCursor = hasPreviousPage ? encodePageCursor({ page: page - 1 }) : void 0;
|
|
138
|
+
const results = pageRows.map(({ type, document }) => ({
|
|
141
139
|
type,
|
|
142
140
|
document
|
|
143
141
|
}));
|
|
144
|
-
return {results, nextPageCursor, previousPageCursor};
|
|
142
|
+
return { results, nextPageCursor, previousPageCursor };
|
|
145
143
|
}
|
|
146
144
|
}
|
|
147
145
|
function decodePageCursor(pageCursor) {
|
|
148
146
|
if (!pageCursor) {
|
|
149
|
-
return {page: 0};
|
|
147
|
+
return { page: 0 };
|
|
150
148
|
}
|
|
151
149
|
return {
|
|
152
150
|
page: Number(Buffer.from(pageCursor, "base64").toString("utf-8"))
|
|
153
151
|
};
|
|
154
152
|
}
|
|
155
|
-
function encodePageCursor({page}) {
|
|
153
|
+
function encodePageCursor({ page }) {
|
|
156
154
|
return Buffer.from(`${page}`, "utf-8").toString("base64");
|
|
157
155
|
}
|
|
158
156
|
|
package/dist/index.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs.js","sources":["../src/database/util.ts","../src/database/DatabaseDocumentStore.ts","../src/PgSearchEngine/PgSearchEngine.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Knex } from 'knex';\n\nexport async function queryPostgresMajorVersion(knex: Knex): Promise<number> {\n if (knex.client.config.client !== 'pg') {\n throw new Error(\"Can't resolve version, not a postgres database\");\n }\n\n const { rows } = await knex.raw('SHOW server_version_num');\n const [result] = rows;\n const version = +result.server_version_num;\n const majorVersion = Math.floor(version / 10000);\n return majorVersion;\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { resolvePackagePath } from '@backstage/backend-common';\nimport { IndexableDocument } from '@backstage/search-common';\nimport { Knex } from 'knex';\nimport {\n DatabaseStore,\n DocumentResultRow,\n PgSearchQuery,\n RawDocumentRow,\n} from './types';\nimport { queryPostgresMajorVersion } from './util';\n\nconst migrationsDir = resolvePackagePath(\n '@backstage/plugin-search-backend-module-pg',\n 'migrations',\n);\n\nexport class DatabaseDocumentStore implements DatabaseStore {\n static async create(knex: Knex): Promise<DatabaseDocumentStore> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n if (majorVersion < 12) {\n // We are using some features (like generated columns) that aren't\n // available in older postgres versions.\n throw new Error(\n `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`,\n );\n }\n } catch {\n // Actually both mysql and sqlite have a full text search, too. We could\n // implement them separately or add them here.\n throw new Error(\n 'The PgSearchEngine is only supported when using a postgres database (>=12.x)',\n );\n }\n\n await knex.migrate.latest({\n directory: migrationsDir,\n });\n return new DatabaseDocumentStore(knex);\n }\n\n static async supported(knex: Knex): Promise<boolean> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n return majorVersion >= 12;\n } catch {\n return false;\n }\n }\n\n constructor(private readonly db: Knex) {}\n\n async transaction<T>(fn: (tx: Knex.Transaction) => Promise<T>): Promise<T> {\n return await this.db.transaction(fn);\n }\n\n async prepareInsert(tx: Knex.Transaction): Promise<void> {\n // We create a temporary table to collect the hashes of the documents that\n // we expect to be in the documents table at the end. The table is deleted\n // at the end of the transaction.\n // The hash makes sure that we generate a new row for every change.\n await tx.raw(\n 'CREATE TEMP TABLE documents_to_insert (' +\n 'type text NOT NULL, ' +\n 'document jsonb NOT NULL, ' +\n // Generating the hash requires a trick, as the text to bytea\n // conversation runs into errors in case the text contains a backslash.\n // Therefore we have to escape them.\n \"hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\\\', '\\\\\\\\')::bytea)) STORED\" +\n ') ON COMMIT DROP',\n );\n }\n\n async completeInsert(tx: Knex.Transaction, type: string): Promise<void> {\n // Copy all new rows into the documents table\n await tx\n .insert(\n tx<RawDocumentRow>('documents_to_insert').select(\n 'type',\n 'document',\n 'hash',\n ),\n )\n .into(tx.raw('documents (type, document, hash)'))\n .onConflict('hash')\n .ignore();\n\n // Delete all documents that we don't expect (deleted and changed)\n await tx<RawDocumentRow>('documents')\n .where({ type })\n .whereNotIn(\n 'hash',\n tx<RawDocumentRow>('documents_to_insert').select('hash'),\n )\n .delete();\n }\n\n async insertDocuments(\n tx: Knex.Transaction,\n type: string,\n documents: IndexableDocument[],\n ): Promise<void> {\n // Insert all documents into the temporary table to process them later\n await tx<DocumentResultRow>('documents_to_insert').insert(\n documents.map(document => ({\n type,\n document,\n })),\n );\n }\n\n async query(\n tx: Knex.Transaction,\n { types, pgTerm, fields, offset, limit }: PgSearchQuery,\n ): Promise<DocumentResultRow[]> {\n // Builds a query like:\n // SELECT ts_rank_cd(body, query) AS rank, type, document\n // FROM documents, to_tsquery('english', 'consent') query\n // WHERE query @@ body AND (document @> '{\"kind\": \"API\"}')\n // ORDER BY rank DESC\n // LIMIT 10;\n const query = tx<DocumentResultRow>('documents');\n\n if (pgTerm) {\n query\n .from(tx.raw(\"documents, to_tsquery('english', ?) query\", pgTerm))\n .whereRaw('query @@ body');\n } else {\n query.from('documents');\n }\n\n if (types) {\n query.whereIn('type', types);\n }\n\n if (fields) {\n Object.keys(fields).forEach(key => {\n const value = fields[key];\n const valueArray = Array.isArray(value) ? value : [value];\n const valueCompare = valueArray\n .map(v => ({ [key]: v }))\n .map(v => JSON.stringify(v));\n query.whereRaw(\n `(${valueCompare.map(() => 'document @> ?').join(' OR ')})`,\n valueCompare,\n );\n });\n }\n\n query.select('type', 'document');\n\n if (pgTerm) {\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .orderBy('rank', 'desc');\n } else {\n query.select(tx.raw('1 as rank'));\n }\n\n return await query.offset(offset).limit(limit);\n }\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { PluginDatabaseManager } from '@backstage/backend-common';\nimport { SearchEngine } from '@backstage/plugin-search-backend-node';\nimport {\n IndexableDocument,\n SearchQuery,\n SearchResultSet,\n} from '@backstage/search-common';\nimport { chunk } from 'lodash';\nimport {\n DatabaseDocumentStore,\n DatabaseStore,\n PgSearchQuery,\n} from '../database';\n\nexport type ConcretePgSearchQuery = {\n pgQuery: PgSearchQuery;\n pageSize: number;\n};\n\nexport class PgSearchEngine implements SearchEngine {\n constructor(private readonly databaseStore: DatabaseStore) {}\n\n static async from({\n database,\n }: {\n database: PluginDatabaseManager;\n }): Promise<PgSearchEngine> {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(await database.getClient()),\n );\n }\n\n static async supported(database: PluginDatabaseManager): Promise<boolean> {\n return await DatabaseDocumentStore.supported(await database.getClient());\n }\n\n translator(query: SearchQuery): ConcretePgSearchQuery {\n const pageSize = 25;\n const { page } = decodePageCursor(query.pageCursor);\n const offset = page * pageSize;\n // We request more result to know whether there is another page\n const limit = pageSize + 1;\n\n return {\n pgQuery: {\n pgTerm: query.term\n .split(/\\s/)\n .map(p => p.replace(/[\\0()|&:*!]/g, '').trim())\n .filter(p => p !== '')\n .map(p => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`)\n .join('&'),\n fields: query.filters as Record<string, string | string[]>,\n types: query.types,\n offset,\n limit,\n },\n pageSize,\n };\n }\n\n setTranslator(\n translator: (query: SearchQuery) => ConcretePgSearchQuery,\n ): void {\n this.translator = translator;\n }\n\n async index(type: string, documents: IndexableDocument[]): Promise<void> {\n await this.databaseStore.transaction(async tx => {\n await this.databaseStore.prepareInsert(tx);\n\n const batchSize = 100;\n for (const documentBatch of chunk(documents, batchSize)) {\n await this.databaseStore.insertDocuments(tx, type, documentBatch);\n }\n\n await this.databaseStore.completeInsert(tx, type);\n });\n }\n\n async query(query: SearchQuery): Promise<SearchResultSet> {\n const { pgQuery, pageSize } = this.translator(query);\n\n const rows = await this.databaseStore.transaction(async tx =>\n this.databaseStore.query(tx, pgQuery),\n );\n\n // We requested one result more than the page size to know whether there is\n // another page.\n const { page } = decodePageCursor(query.pageCursor);\n const hasNextPage = rows.length > pageSize;\n const hasPreviousPage = page > 0;\n const pageRows = rows.slice(0, pageSize);\n const nextPageCursor = hasNextPage\n ? encodePageCursor({ page: page + 1 })\n : undefined;\n const previousPageCursor = hasPreviousPage\n ? encodePageCursor({ page: page - 1 })\n : undefined;\n\n const results = pageRows.map(({ type, document }) => ({\n type,\n document,\n }));\n\n return { results, nextPageCursor, previousPageCursor };\n }\n}\n\nexport function decodePageCursor(pageCursor?: string): { page: number } {\n if (!pageCursor) {\n return { page: 0 };\n }\n\n return {\n page: Number(Buffer.from(pageCursor, 'base64').toString('utf-8')),\n };\n}\n\nexport function encodePageCursor({ page }: { page: number }): string {\n return Buffer.from(`${page}`, 'utf-8').toString('base64');\n}\n"],"names":["resolvePackagePath","chunk"],"mappings":";;;;;;;yCAiBgD,MAA6B;AAC3E,MAAI,KAAK,OAAO,OAAO,WAAW,MAAM;AACtC,UAAM,IAAI,MAAM;AAAA;AAGlB,QAAM,CAAE,QAAS,MAAM,KAAK,IAAI;AAChC,QAAM,CAAC,UAAU;AACjB,QAAM,UAAU,CAAC,OAAO;AACxB,QAAM,eAAe,KAAK,MAAM,UAAU;AAC1C,SAAO;AAAA;;ACAT,MAAM,gBAAgBA,iCACpB,8CACA;4BAG0D;AAAA,EAoC1D,YAA6B,IAAU;AAAV;AAAA;AAAA,eAnChB,OAAO,MAA4C;AAC9D,QAAI;AACF,YAAM,eAAe,MAAM,0BAA0B;AAErD,UAAI,eAAe,IAAI;AAGrB,cAAM,IAAI,MACR,+EAA+E;AAAA;AAAA,YAGnF;AAGA,YAAM,IAAI,MACR;AAAA;AAIJ,UAAM,KAAK,QAAQ,OAAO;AAAA,MACxB,WAAW;AAAA;AAEb,WAAO,IAAI,sBAAsB;AAAA;AAAA,eAGtB,UAAU,MAA8B;AACnD,QAAI;AACF,YAAM,eAAe,MAAM,0BAA0B;AAErD,aAAO,gBAAgB;AAAA,YACvB;AACA,aAAO;AAAA;AAAA;AAAA,QAML,YAAe,IAAsD;AACzE,WAAO,MAAM,KAAK,GAAG,YAAY;AAAA;AAAA,QAG7B,cAAc,IAAqC;AAKvD,UAAM,GAAG,IACP;AAAA;AAAA,QAWE,eAAe,IAAsB,MAA6B;AAEtE,UAAM,GACH,OACC,GAAmB,uBAAuB,OACxC,QACA,YACA,SAGH,KAAK,GAAG,IAAI,qCACZ,WAAW,QACX;AAGH,UAAM,GAAmB,aACtB,MAAM,CAAE,OACR,WACC,QACA,GAAmB,uBAAuB,OAAO,SAElD;AAAA;AAAA,QAGC,gBACJ,IACA,MACA,WACe;AAEf,UAAM,GAAsB,uBAAuB,OACjD,UAAU,IAAI;AAAa,MACzB;AAAA,MACA;AAAA;AAAA;AAAA,QAKA,MACJ,IACA,CAAE,OAAO,QAAQ,QAAQ,QAAQ,QACH;AAO9B,UAAM,QAAQ,GAAsB;AAEpC,QAAI,QAAQ;AACV,YACG,KAAK,GAAG,IAAI,6CAA6C,SACzD,SAAS;AAAA,WACP;AACL,YAAM,KAAK;AAAA;AAGb,QAAI,OAAO;AACT,YAAM,QAAQ,QAAQ;AAAA;AAGxB,QAAI,QAAQ;AACV,aAAO,KAAK,QAAQ,QAAQ,SAAO;AACjC,cAAM,QAAQ,OAAO;AACrB,cAAM,aAAa,MAAM,QAAQ,SAAS,QAAQ,CAAC;AACnD,cAAM,eAAe,WAClB,IAAI,UAAS,MAAM,KACnB,IAAI,OAAK,KAAK,UAAU;AAC3B,cAAM,SACJ,IAAI,aAAa,IAAI,MAAM,iBAAiB,KAAK,YACjD;AAAA;AAAA;AAKN,UAAM,OAAO,QAAQ;AAErB,QAAI,QAAQ;AACV,YACG,OAAO,GAAG,IAAI,sCACd,QAAQ,QAAQ;AAAA,WACd;AACL,YAAM,OAAO,GAAG,IAAI;AAAA;AAGtB,WAAO,MAAM,MAAM,OAAO,QAAQ,MAAM;AAAA;AAAA;;qBC9IQ;AAAA,EAClD,YAA6B,eAA8B;AAA9B;AAAA;AAAA,eAEhB,KAAK;AAAA,IAChB;AAAA,KAG0B;AAC1B,WAAO,IAAI,eACT,MAAM,sBAAsB,OAAO,MAAM,SAAS;AAAA;AAAA,eAIzC,UAAU,UAAmD;AACxE,WAAO,MAAM,sBAAsB,UAAU,MAAM,SAAS;AAAA;AAAA,EAG9D,WAAW,OAA2C;AACpD,UAAM,WAAW;AACjB,UAAM,CAAE,QAAS,iBAAiB,MAAM;AACxC,UAAM,SAAS,OAAO;AAEtB,UAAM,QAAQ,WAAW;AAEzB,WAAO;AAAA,MACL,SAAS;AAAA,QACP,QAAQ,MAAM,KACX,MAAM,MACN,IAAI,OAAK,EAAE,QAAQ,gBAAgB,IAAI,QACvC,OAAO,OAAK,MAAM,IAClB,IAAI,OAAK,IAAI,KAAK,UAAU,QAAQ,KAAK,UAAU,SACnD,KAAK;AAAA,QACR,QAAQ,MAAM;AAAA,QACd,OAAO,MAAM;AAAA,QACb;AAAA,QACA;AAAA;AAAA,MAEF;AAAA;AAAA;AAAA,EAIJ,cACE,YACM;AACN,SAAK,aAAa;AAAA;AAAA,QAGd,MAAM,MAAc,WAA+C;AACvE,UAAM,KAAK,cAAc,YAAY,OAAM,OAAM;AAC/C,YAAM,KAAK,cAAc,cAAc;AAEvC,YAAM,YAAY;AAClB,iBAAW,iBAAiBC,aAAM,WAAW,YAAY;AACvD,cAAM,KAAK,cAAc,gBAAgB,IAAI,MAAM;AAAA;AAGrD,YAAM,KAAK,cAAc,eAAe,IAAI;AAAA;AAAA;AAAA,QAI1C,MAAM,OAA8C;AACxD,UAAM,CAAE,SAAS,YAAa,KAAK,WAAW;AAE9C,UAAM,OAAO,MAAM,KAAK,cAAc,YAAY,OAAM,OACtD,KAAK,cAAc,MAAM,IAAI;AAK/B,UAAM,CAAE,QAAS,iBAAiB,MAAM;AACxC,UAAM,cAAc,KAAK,SAAS;AAClC,UAAM,kBAAkB,OAAO;AAC/B,UAAM,WAAW,KAAK,MAAM,GAAG;AAC/B,UAAM,iBAAiB,cACnB,iBAAiB,CAAE,MAAM,OAAO,MAChC;AACJ,UAAM,qBAAqB,kBACvB,iBAAiB,CAAE,MAAM,OAAO,MAChC;AAEJ,UAAM,UAAU,SAAS,IAAI,CAAC,CAAE,MAAM;AAAgB,MACpD;AAAA,MACA;AAAA;AAGF,WAAO,CAAE,SAAS,gBAAgB;AAAA;AAAA;0BAIL,YAAuC;AACtE,MAAI,CAAC,YAAY;AACf,WAAO,CAAE,MAAM;AAAA;AAGjB,SAAO;AAAA,IACL,MAAM,OAAO,OAAO,KAAK,YAAY,UAAU,SAAS;AAAA;AAAA;0BAI3B,CAAE,OAAkC;AACnE,SAAO,OAAO,KAAK,GAAG,QAAQ,SAAS,SAAS;AAAA;;;;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":["../src/database/util.ts","../src/database/DatabaseDocumentStore.ts","../src/PgSearchEngine/PgSearchEngine.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Knex } from 'knex';\n\nexport async function queryPostgresMajorVersion(knex: Knex): Promise<number> {\n if (knex.client.config.client !== 'pg') {\n throw new Error(\"Can't resolve version, not a postgres database\");\n }\n\n const { rows } = await knex.raw('SHOW server_version_num');\n const [result] = rows;\n const version = +result.server_version_num;\n const majorVersion = Math.floor(version / 10000);\n return majorVersion;\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { resolvePackagePath } from '@backstage/backend-common';\nimport { IndexableDocument } from '@backstage/search-common';\nimport { Knex } from 'knex';\nimport {\n DatabaseStore,\n DocumentResultRow,\n PgSearchQuery,\n RawDocumentRow,\n} from './types';\nimport { queryPostgresMajorVersion } from './util';\n\nconst migrationsDir = resolvePackagePath(\n '@backstage/plugin-search-backend-module-pg',\n 'migrations',\n);\n\nexport class DatabaseDocumentStore implements DatabaseStore {\n static async create(knex: Knex): Promise<DatabaseDocumentStore> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n if (majorVersion < 12) {\n // We are using some features (like generated columns) that aren't\n // available in older postgres versions.\n throw new Error(\n `The PgSearchEngine requires at least postgres version 12 (but is running on ${majorVersion})`,\n );\n }\n } catch {\n // Actually both mysql and sqlite have a full text search, too. We could\n // implement them separately or add them here.\n throw new Error(\n 'The PgSearchEngine is only supported when using a postgres database (>=12.x)',\n );\n }\n\n await knex.migrate.latest({\n directory: migrationsDir,\n });\n return new DatabaseDocumentStore(knex);\n }\n\n static async supported(knex: Knex): Promise<boolean> {\n try {\n const majorVersion = await queryPostgresMajorVersion(knex);\n\n return majorVersion >= 12;\n } catch {\n return false;\n }\n }\n\n constructor(private readonly db: Knex) {}\n\n async transaction<T>(fn: (tx: Knex.Transaction) => Promise<T>): Promise<T> {\n return await this.db.transaction(fn);\n }\n\n async prepareInsert(tx: Knex.Transaction): Promise<void> {\n // We create a temporary table to collect the hashes of the documents that\n // we expect to be in the documents table at the end. The table is deleted\n // at the end of the transaction.\n // The hash makes sure that we generate a new row for every change.\n await tx.raw(\n 'CREATE TEMP TABLE documents_to_insert (' +\n 'type text NOT NULL, ' +\n 'document jsonb NOT NULL, ' +\n // Generating the hash requires a trick, as the text to bytea\n // conversation runs into errors in case the text contains a backslash.\n // Therefore we have to escape them.\n \"hash bytea NOT NULL GENERATED ALWAYS AS (sha256(replace(document::text || type, '\\\\', '\\\\\\\\')::bytea)) STORED\" +\n ') ON COMMIT DROP',\n );\n }\n\n async completeInsert(tx: Knex.Transaction, type: string): Promise<void> {\n // Copy all new rows into the documents table\n await tx\n .insert(\n tx<RawDocumentRow>('documents_to_insert').select(\n 'type',\n 'document',\n 'hash',\n ),\n )\n .into(tx.raw('documents (type, document, hash)'))\n .onConflict('hash')\n .ignore();\n\n // Delete all documents that we don't expect (deleted and changed)\n await tx<RawDocumentRow>('documents')\n .where({ type })\n .whereNotIn(\n 'hash',\n tx<RawDocumentRow>('documents_to_insert').select('hash'),\n )\n .delete();\n }\n\n async insertDocuments(\n tx: Knex.Transaction,\n type: string,\n documents: IndexableDocument[],\n ): Promise<void> {\n // Insert all documents into the temporary table to process them later\n await tx<DocumentResultRow>('documents_to_insert').insert(\n documents.map(document => ({\n type,\n document,\n })),\n );\n }\n\n async query(\n tx: Knex.Transaction,\n { types, pgTerm, fields, offset, limit }: PgSearchQuery,\n ): Promise<DocumentResultRow[]> {\n // Builds a query like:\n // SELECT ts_rank_cd(body, query) AS rank, type, document\n // FROM documents, to_tsquery('english', 'consent') query\n // WHERE query @@ body AND (document @> '{\"kind\": \"API\"}')\n // ORDER BY rank DESC\n // LIMIT 10;\n const query = tx<DocumentResultRow>('documents');\n\n if (pgTerm) {\n query\n .from(tx.raw(\"documents, to_tsquery('english', ?) query\", pgTerm))\n .whereRaw('query @@ body');\n } else {\n query.from('documents');\n }\n\n if (types) {\n query.whereIn('type', types);\n }\n\n if (fields) {\n Object.keys(fields).forEach(key => {\n const value = fields[key];\n const valueArray = Array.isArray(value) ? value : [value];\n const valueCompare = valueArray\n .map(v => ({ [key]: v }))\n .map(v => JSON.stringify(v));\n query.whereRaw(\n `(${valueCompare.map(() => 'document @> ?').join(' OR ')})`,\n valueCompare,\n );\n });\n }\n\n query.select('type', 'document');\n\n if (pgTerm) {\n query\n .select(tx.raw('ts_rank_cd(body, query) AS \"rank\"'))\n .orderBy('rank', 'desc');\n } else {\n query.select(tx.raw('1 as rank'));\n }\n\n return await query.offset(offset).limit(limit);\n }\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { PluginDatabaseManager } from '@backstage/backend-common';\nimport { SearchEngine } from '@backstage/plugin-search-backend-node';\nimport {\n IndexableDocument,\n SearchQuery,\n SearchResultSet,\n} from '@backstage/search-common';\nimport { chunk } from 'lodash';\nimport {\n DatabaseDocumentStore,\n DatabaseStore,\n PgSearchQuery,\n} from '../database';\n\nexport type ConcretePgSearchQuery = {\n pgQuery: PgSearchQuery;\n pageSize: number;\n};\n\nexport class PgSearchEngine implements SearchEngine {\n constructor(private readonly databaseStore: DatabaseStore) {}\n\n static async from(options: {\n database: PluginDatabaseManager;\n }): Promise<PgSearchEngine> {\n return new PgSearchEngine(\n await DatabaseDocumentStore.create(await options.database.getClient()),\n );\n }\n\n static async supported(database: PluginDatabaseManager): Promise<boolean> {\n return await DatabaseDocumentStore.supported(await database.getClient());\n }\n\n translator(query: SearchQuery): ConcretePgSearchQuery {\n const pageSize = 25;\n const { page } = decodePageCursor(query.pageCursor);\n const offset = page * pageSize;\n // We request more result to know whether there is another page\n const limit = pageSize + 1;\n\n return {\n pgQuery: {\n pgTerm: query.term\n .split(/\\s/)\n .map(p => p.replace(/[\\0()|&:*!]/g, '').trim())\n .filter(p => p !== '')\n .map(p => `(${JSON.stringify(p)} | ${JSON.stringify(p)}:*)`)\n .join('&'),\n fields: query.filters as Record<string, string | string[]>,\n types: query.types,\n offset,\n limit,\n },\n pageSize,\n };\n }\n\n setTranslator(\n translator: (query: SearchQuery) => ConcretePgSearchQuery,\n ): void {\n this.translator = translator;\n }\n\n async index(type: string, documents: IndexableDocument[]): Promise<void> {\n await this.databaseStore.transaction(async tx => {\n await this.databaseStore.prepareInsert(tx);\n\n const batchSize = 100;\n for (const documentBatch of chunk(documents, batchSize)) {\n await this.databaseStore.insertDocuments(tx, type, documentBatch);\n }\n\n await this.databaseStore.completeInsert(tx, type);\n });\n }\n\n async query(query: SearchQuery): Promise<SearchResultSet> {\n const { pgQuery, pageSize } = this.translator(query);\n\n const rows = await this.databaseStore.transaction(async tx =>\n this.databaseStore.query(tx, pgQuery),\n );\n\n // We requested one result more than the page size to know whether there is\n // another page.\n const { page } = decodePageCursor(query.pageCursor);\n const hasNextPage = rows.length > pageSize;\n const hasPreviousPage = page > 0;\n const pageRows = rows.slice(0, pageSize);\n const nextPageCursor = hasNextPage\n ? encodePageCursor({ page: page + 1 })\n : undefined;\n const previousPageCursor = hasPreviousPage\n ? encodePageCursor({ page: page - 1 })\n : undefined;\n\n const results = pageRows.map(({ type, document }) => ({\n type,\n document,\n }));\n\n return { results, nextPageCursor, previousPageCursor };\n }\n}\n\nexport function decodePageCursor(pageCursor?: string): { page: number } {\n if (!pageCursor) {\n return { page: 0 };\n }\n\n return {\n page: Number(Buffer.from(pageCursor, 'base64').toString('utf-8')),\n };\n}\n\nexport function encodePageCursor({ page }: { page: number }): string {\n return Buffer.from(`${page}`, 'utf-8').toString('base64');\n}\n"],"names":["resolvePackagePath","chunk"],"mappings":";;;;;;;yCAiBgD,MAA6B;AAC3E,MAAI,KAAK,OAAO,OAAO,WAAW,MAAM;AACtC,UAAM,IAAI,MAAM;AAAA;AAGlB,QAAM,EAAE,SAAS,MAAM,KAAK,IAAI;AAChC,QAAM,CAAC,UAAU;AACjB,QAAM,UAAU,CAAC,OAAO;AACxB,QAAM,eAAe,KAAK,MAAM,UAAU;AAC1C,SAAO;AAAA;;ACAT,MAAM,gBAAgBA,iCACpB,8CACA;4BAG0D;AAAA,EAoC1D,YAA6B,IAAU;AAAV;AAAA;AAAA,eAnChB,OAAO,MAA4C;AAC9D,QAAI;AACF,YAAM,eAAe,MAAM,0BAA0B;AAErD,UAAI,eAAe,IAAI;AAGrB,cAAM,IAAI,MACR,+EAA+E;AAAA;AAAA,YAGnF;AAGA,YAAM,IAAI,MACR;AAAA;AAIJ,UAAM,KAAK,QAAQ,OAAO;AAAA,MACxB,WAAW;AAAA;AAEb,WAAO,IAAI,sBAAsB;AAAA;AAAA,eAGtB,UAAU,MAA8B;AACnD,QAAI;AACF,YAAM,eAAe,MAAM,0BAA0B;AAErD,aAAO,gBAAgB;AAAA,YACvB;AACA,aAAO;AAAA;AAAA;AAAA,QAML,YAAe,IAAsD;AACzE,WAAO,MAAM,KAAK,GAAG,YAAY;AAAA;AAAA,QAG7B,cAAc,IAAqC;AAKvD,UAAM,GAAG,IACP;AAAA;AAAA,QAWE,eAAe,IAAsB,MAA6B;AAEtE,UAAM,GACH,OACC,GAAmB,uBAAuB,OACxC,QACA,YACA,SAGH,KAAK,GAAG,IAAI,qCACZ,WAAW,QACX;AAGH,UAAM,GAAmB,aACtB,MAAM,EAAE,QACR,WACC,QACA,GAAmB,uBAAuB,OAAO,SAElD;AAAA;AAAA,QAGC,gBACJ,IACA,MACA,WACe;AAEf,UAAM,GAAsB,uBAAuB,OACjD,UAAU,IAAI;AAAa,MACzB;AAAA,MACA;AAAA;AAAA;AAAA,QAKA,MACJ,IACA,EAAE,OAAO,QAAQ,QAAQ,QAAQ,SACH;AAO9B,UAAM,QAAQ,GAAsB;AAEpC,QAAI,QAAQ;AACV,YACG,KAAK,GAAG,IAAI,6CAA6C,SACzD,SAAS;AAAA,WACP;AACL,YAAM,KAAK;AAAA;AAGb,QAAI,OAAO;AACT,YAAM,QAAQ,QAAQ;AAAA;AAGxB,QAAI,QAAQ;AACV,aAAO,KAAK,QAAQ,QAAQ,SAAO;AACjC,cAAM,QAAQ,OAAO;AACrB,cAAM,aAAa,MAAM,QAAQ,SAAS,QAAQ,CAAC;AACnD,cAAM,eAAe,WAClB,IAAI,WAAS,MAAM,MACnB,IAAI,OAAK,KAAK,UAAU;AAC3B,cAAM,SACJ,IAAI,aAAa,IAAI,MAAM,iBAAiB,KAAK,YACjD;AAAA;AAAA;AAKN,UAAM,OAAO,QAAQ;AAErB,QAAI,QAAQ;AACV,YACG,OAAO,GAAG,IAAI,sCACd,QAAQ,QAAQ;AAAA,WACd;AACL,YAAM,OAAO,GAAG,IAAI;AAAA;AAGtB,WAAO,MAAM,MAAM,OAAO,QAAQ,MAAM;AAAA;AAAA;;qBC9IQ;AAAA,EAClD,YAA6B,eAA8B;AAA9B;AAAA;AAAA,eAEhB,KAAK,SAEU;AAC1B,WAAO,IAAI,eACT,MAAM,sBAAsB,OAAO,MAAM,QAAQ,SAAS;AAAA;AAAA,eAIjD,UAAU,UAAmD;AACxE,WAAO,MAAM,sBAAsB,UAAU,MAAM,SAAS;AAAA;AAAA,EAG9D,WAAW,OAA2C;AACpD,UAAM,WAAW;AACjB,UAAM,EAAE,SAAS,iBAAiB,MAAM;AACxC,UAAM,SAAS,OAAO;AAEtB,UAAM,QAAQ,WAAW;AAEzB,WAAO;AAAA,MACL,SAAS;AAAA,QACP,QAAQ,MAAM,KACX,MAAM,MACN,IAAI,OAAK,EAAE,QAAQ,gBAAgB,IAAI,QACvC,OAAO,OAAK,MAAM,IAClB,IAAI,OAAK,IAAI,KAAK,UAAU,QAAQ,KAAK,UAAU,SACnD,KAAK;AAAA,QACR,QAAQ,MAAM;AAAA,QACd,OAAO,MAAM;AAAA,QACb;AAAA,QACA;AAAA;AAAA,MAEF;AAAA;AAAA;AAAA,EAIJ,cACE,YACM;AACN,SAAK,aAAa;AAAA;AAAA,QAGd,MAAM,MAAc,WAA+C;AACvE,UAAM,KAAK,cAAc,YAAY,OAAM,OAAM;AAC/C,YAAM,KAAK,cAAc,cAAc;AAEvC,YAAM,YAAY;AAClB,iBAAW,iBAAiBC,aAAM,WAAW,YAAY;AACvD,cAAM,KAAK,cAAc,gBAAgB,IAAI,MAAM;AAAA;AAGrD,YAAM,KAAK,cAAc,eAAe,IAAI;AAAA;AAAA;AAAA,QAI1C,MAAM,OAA8C;AACxD,UAAM,EAAE,SAAS,aAAa,KAAK,WAAW;AAE9C,UAAM,OAAO,MAAM,KAAK,cAAc,YAAY,OAAM,OACtD,KAAK,cAAc,MAAM,IAAI;AAK/B,UAAM,EAAE,SAAS,iBAAiB,MAAM;AACxC,UAAM,cAAc,KAAK,SAAS;AAClC,UAAM,kBAAkB,OAAO;AAC/B,UAAM,WAAW,KAAK,MAAM,GAAG;AAC/B,UAAM,iBAAiB,cACnB,iBAAiB,EAAE,MAAM,OAAO,OAChC;AACJ,UAAM,qBAAqB,kBACvB,iBAAiB,EAAE,MAAM,OAAO,OAChC;AAEJ,UAAM,UAAU,SAAS,IAAI,CAAC,EAAE,MAAM;AAAgB,MACpD;AAAA,MACA;AAAA;AAGF,WAAO,EAAE,SAAS,gBAAgB;AAAA;AAAA;0BAIL,YAAuC;AACtE,MAAI,CAAC,YAAY;AACf,WAAO,EAAE,MAAM;AAAA;AAGjB,SAAO;AAAA,IACL,MAAM,OAAO,OAAO,KAAK,YAAY,UAAU,SAAS;AAAA;AAAA;0BAI3B,EAAE,QAAkC;AACnE,SAAO,OAAO,KAAK,GAAG,QAAQ,SAAS,SAAS;AAAA;;;;;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -46,7 +46,7 @@ declare type ConcretePgSearchQuery = {
|
|
|
46
46
|
declare class PgSearchEngine implements SearchEngine {
|
|
47
47
|
private readonly databaseStore;
|
|
48
48
|
constructor(databaseStore: DatabaseStore);
|
|
49
|
-
static from(
|
|
49
|
+
static from(options: {
|
|
50
50
|
database: PluginDatabaseManager;
|
|
51
51
|
}): Promise<PgSearchEngine>;
|
|
52
52
|
static supported(database: PluginDatabaseManager): Promise<boolean>;
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@backstage/plugin-search-backend-module-pg",
|
|
3
3
|
"description": "A module for the search backend that implements search using PostgreSQL",
|
|
4
|
-
"version": "0.2.
|
|
4
|
+
"version": "0.2.2",
|
|
5
5
|
"main": "dist/index.cjs.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
7
7
|
"license": "Apache-2.0",
|
|
@@ -20,19 +20,19 @@
|
|
|
20
20
|
"clean": "backstage-cli clean"
|
|
21
21
|
},
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@backstage/backend-common": "^0.9.
|
|
23
|
+
"@backstage/backend-common": "^0.9.13",
|
|
24
24
|
"@backstage/plugin-search-backend-node": "^0.4.2",
|
|
25
25
|
"@backstage/search-common": "^0.2.0",
|
|
26
26
|
"knex": "^0.95.1",
|
|
27
27
|
"lodash": "^4.17.21"
|
|
28
28
|
},
|
|
29
29
|
"devDependencies": {
|
|
30
|
-
"@backstage/backend-test-utils": "^0.1.
|
|
31
|
-
"@backstage/cli": "^0.
|
|
30
|
+
"@backstage/backend-test-utils": "^0.1.10",
|
|
31
|
+
"@backstage/cli": "^0.10.1"
|
|
32
32
|
},
|
|
33
33
|
"files": [
|
|
34
34
|
"dist",
|
|
35
35
|
"migrations"
|
|
36
36
|
],
|
|
37
|
-
"gitHead": "
|
|
37
|
+
"gitHead": "562be0b43016294e27af3ad024191bb86b13b1c1"
|
|
38
38
|
}
|