@backstage/plugin-catalog-backend 1.31.1-next.0 → 1.32.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/config.d.ts +7 -0
- package/dist/database/DefaultProviderDatabase.cjs.js +7 -0
- package/dist/database/DefaultProviderDatabase.cjs.js.map +1 -1
- package/dist/database/operations/provider/refreshByRefreshKeys.cjs.js +1 -1
- package/dist/database/operations/provider/refreshByRefreshKeys.cjs.js.map +1 -1
- package/dist/database/operations/stitcher/buildEntitySearch.cjs.js +6 -1
- package/dist/database/operations/stitcher/buildEntitySearch.cjs.js.map +1 -1
- package/dist/index.d.ts +2 -3
- package/dist/processing/evictEntitiesFromOrphanedProviders.cjs.js +47 -0
- package/dist/processing/evictEntitiesFromOrphanedProviders.cjs.js.map +1 -0
- package/dist/schema/openapi/generated/router.cjs.js +21 -5
- package/dist/schema/openapi/generated/router.cjs.js.map +1 -1
- package/dist/service/CatalogBuilder.cjs.js +14 -2
- package/dist/service/CatalogBuilder.cjs.js.map +1 -1
- package/package.json +14 -14
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,55 @@
|
|
|
1
1
|
# @backstage/plugin-catalog-backend
|
|
2
2
|
|
|
3
|
+
## 1.32.0-next.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 4306303: Added a fix in `@backstage/plugin-catalog-backend` to prevent duplicate path keys in entity search if only casing is different.
|
|
8
|
+
- 5243aa4: Fixed an issue occurred when authorizing permissions using custom rules passed via the `PermissionsRegistryService`.
|
|
9
|
+
- Updated dependencies
|
|
10
|
+
- @backstage/integration@1.16.2-next.0
|
|
11
|
+
- @backstage/plugin-events-node@0.4.9-next.2
|
|
12
|
+
- @backstage/backend-openapi-utils@0.5.1-next.1
|
|
13
|
+
- @backstage/backend-plugin-api@1.2.1-next.1
|
|
14
|
+
- @backstage/catalog-client@1.9.1
|
|
15
|
+
- @backstage/catalog-model@1.7.3
|
|
16
|
+
- @backstage/config@1.3.2
|
|
17
|
+
- @backstage/errors@1.2.7
|
|
18
|
+
- @backstage/types@1.2.1
|
|
19
|
+
- @backstage/plugin-catalog-common@1.1.3
|
|
20
|
+
- @backstage/plugin-catalog-node@1.16.1-next.1
|
|
21
|
+
- @backstage/plugin-permission-common@0.8.4
|
|
22
|
+
- @backstage/plugin-permission-node@0.8.9-next.1
|
|
23
|
+
- @backstage/plugin-search-backend-module-catalog@0.3.2-next.1
|
|
24
|
+
- @backstage/plugin-search-common@1.2.17
|
|
25
|
+
|
|
26
|
+
## 1.32.0-next.1
|
|
27
|
+
|
|
28
|
+
### Minor Changes
|
|
29
|
+
|
|
30
|
+
- ca9c51b: Added opt-in ability to evict entities from the catalog whose provider is no longer configured. See [Catalog configuration documentation](https://backstage.io/docs/features/software-catalog/configuration#clean-up-entities-from-orphaned-entity-providers)
|
|
31
|
+
|
|
32
|
+
### Patch Changes
|
|
33
|
+
|
|
34
|
+
- fbc1666: Correctly use the `catalog.useUrlReadersSearch` config.
|
|
35
|
+
- 75cadc1: Minor internal tweak to `refreshByRefreshKeys`
|
|
36
|
+
- Updated dependencies
|
|
37
|
+
- @backstage/backend-openapi-utils@0.5.1-next.1
|
|
38
|
+
- @backstage/backend-plugin-api@1.2.1-next.1
|
|
39
|
+
- @backstage/catalog-client@1.9.1
|
|
40
|
+
- @backstage/catalog-model@1.7.3
|
|
41
|
+
- @backstage/config@1.3.2
|
|
42
|
+
- @backstage/errors@1.2.7
|
|
43
|
+
- @backstage/integration@1.16.1
|
|
44
|
+
- @backstage/types@1.2.1
|
|
45
|
+
- @backstage/plugin-catalog-common@1.1.3
|
|
46
|
+
- @backstage/plugin-catalog-node@1.16.1-next.1
|
|
47
|
+
- @backstage/plugin-events-node@0.4.9-next.1
|
|
48
|
+
- @backstage/plugin-permission-common@0.8.4
|
|
49
|
+
- @backstage/plugin-permission-node@0.8.9-next.1
|
|
50
|
+
- @backstage/plugin-search-backend-module-catalog@0.3.2-next.1
|
|
51
|
+
- @backstage/plugin-search-common@1.2.17
|
|
52
|
+
|
|
3
53
|
## 1.31.1-next.0
|
|
4
54
|
|
|
5
55
|
### Patch Changes
|
package/config.d.ts
CHANGED
|
@@ -155,6 +155,13 @@ export interface Config {
|
|
|
155
155
|
*/
|
|
156
156
|
orphanStrategy?: 'keep' | 'delete';
|
|
157
157
|
|
|
158
|
+
/**
|
|
159
|
+
* The strategy to use for entities that are referenced by providers that are orphaned,
|
|
160
|
+
* i.e. entities with no providers currently configured in the catalog. The default value is
|
|
161
|
+
* "keep".
|
|
162
|
+
*/
|
|
163
|
+
orphanProviderStrategy?: 'keep' | 'delete';
|
|
164
|
+
|
|
158
165
|
/**
|
|
159
166
|
* The strategy to use when stitching together the final entities.
|
|
160
167
|
*/
|
|
@@ -140,6 +140,13 @@ class DefaultProviderDatabase {
|
|
|
140
140
|
}
|
|
141
141
|
}
|
|
142
142
|
}
|
|
143
|
+
async listReferenceSourceKeys(txOpaque) {
|
|
144
|
+
const tx = txOpaque;
|
|
145
|
+
const rows = await tx(
|
|
146
|
+
"refresh_state_references"
|
|
147
|
+
).distinct("source_key").whereNotNull("source_key");
|
|
148
|
+
return rows.map((row) => row.source_key).filter((key) => !!key);
|
|
149
|
+
}
|
|
143
150
|
async refreshByRefreshKeys(txOpaque, options) {
|
|
144
151
|
const tx = txOpaque;
|
|
145
152
|
await refreshByRefreshKeys.refreshByRefreshKeys({ tx, keys: options.keys });
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DefaultProviderDatabase.cjs.js","sources":["../../src/database/DefaultProviderDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringifyEntityRef } from '@backstage/catalog-model';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { v4 as uuid } from 'uuid';\nimport { rethrowError } from './conversion';\nimport { deleteWithEagerPruningOfChildren } from './operations/provider/deleteWithEagerPruningOfChildren';\nimport { refreshByRefreshKeys } from './operations/provider/refreshByRefreshKeys';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { DbRefreshStateReferencesRow, DbRefreshStateRow } from './tables';\nimport {\n ProviderDatabase,\n RefreshByKeyOptions,\n ReplaceUnprocessedEntitiesOptions,\n Transaction,\n} from './types';\nimport { generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProviderDatabase implements ProviderDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n },\n ) {}\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the\n // transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n async replaceUnprocessedEntities(\n txOpaque: Knex | Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex | Knex.Transaction;\n const { toAdd, toUpsert, toRemove } = await this.createDelta(tx, options);\n\n if (toRemove.length) {\n const removedCount = await deleteWithEagerPruningOfChildren({\n knex: tx,\n entityRefs: toRemove,\n sourceKey: options.sourceKey,\n });\n this.options.logger.debug(\n `removed, ${removedCount} entities: ${JSON.stringify(toRemove)}`,\n );\n }\n\n if (toAdd.length) {\n // The reason for this chunking, rather than just massively batch\n // inserting the entire payload, is that we fall back to the individual\n // upsert mechanism below on conflicts. That path is massively slower than\n // the fast batch path, so we don't want to end up accidentally having to\n // for example item-by-item upsert tens of thousands of entities in a\n // large initial delivery dump. The implication is that the size of these\n // chunks needs to weigh the benefit of fast successful inserts, against\n // the drawback of super slow but more rare fallbacks. There's quickly\n // diminishing returns though with turning up this value way high.\n for (const chunk of lodash.chunk(toAdd, 50)) {\n try {\n await tx.batchInsert(\n 'refresh_state',\n chunk.map(item => ({\n entity_id: uuid(),\n entity_ref: stringifyEntityRef(item.deferred.entity),\n unprocessed_entity: JSON.stringify(item.deferred.entity),\n unprocessed_hash: item.hash,\n errors: '',\n location_key: item.deferred.locationKey,\n next_update_at: tx.fn.now(),\n last_discovery_at: tx.fn.now(),\n })),\n BATCH_SIZE,\n );\n await tx.batchInsert(\n 'refresh_state_references',\n chunk.map(item => ({\n source_key: options.sourceKey,\n target_entity_ref: stringifyEntityRef(item.deferred.entity),\n })),\n BATCH_SIZE,\n );\n } catch (error) {\n if (!isDatabaseConflictError(error)) {\n throw error;\n } else {\n this.options.logger.debug(\n `Fast insert path failed, falling back to slow path, ${error}`,\n );\n toUpsert.push(...chunk);\n }\n }\n }\n }\n\n if (toUpsert.length) {\n for (const {\n deferred: { entity, locationKey },\n hash,\n } of toUpsert) {\n const entityRef = stringifyEntityRef(entity);\n\n try {\n let ok = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (!ok) {\n ok = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n }\n if (ok) {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .delete();\n\n await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n ).insert({\n source_key: options.sourceKey,\n target_entity_ref: entityRef,\n });\n } else {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .andWhere({ source_key: options.sourceKey })\n .delete();\n\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Source ${options.sourceKey} detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n }\n }\n } catch (error) {\n this.options.logger.error(\n `Failed to add '${entityRef}' from source '${options.sourceKey}', ${error}`,\n );\n }\n }\n }\n }\n\n async refreshByRefreshKeys(\n txOpaque: Transaction,\n options: RefreshByKeyOptions,\n ) {\n const tx = txOpaque as Knex.Transaction;\n await refreshByRefreshKeys({ tx, keys: options.keys });\n }\n\n private async createDelta(\n tx: Knex | Knex.Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<{\n toAdd: { deferred: DeferredEntity; hash: string }[];\n toUpsert: { deferred: DeferredEntity; hash: string }[];\n toRemove: string[];\n }> {\n if (options.type === 'delta') {\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = options.removed.map(e => e.entityRef);\n\n for (const chunk of lodash.chunk(options.added, 1000)) {\n const entityRefs = chunk.map(e => stringifyEntityRef(e.entity));\n const rows = await tx<DbRefreshStateRow>('refresh_state')\n .select(['entity_ref', 'unprocessed_hash', 'location_key'])\n .whereIn('entity_ref', entityRefs);\n const oldStates = new Map(\n rows.map(row => [\n row.entity_ref,\n {\n unprocessed_hash: row.unprocessed_hash,\n location_key: row.location_key,\n },\n ]),\n );\n\n chunk.forEach((deferred, i) => {\n const entityRef = entityRefs[i];\n const newHash = generateStableHash(deferred.entity);\n const oldState = oldStates.get(entityRef);\n if (oldState === undefined) {\n // Add any entity that does not exist in the database\n toAdd.push({ deferred, hash: newHash });\n } else if (\n (deferred.locationKey ?? null) !== (oldState.location_key ?? null)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(entityRef);\n toAdd.push({ deferred, hash: newHash });\n } else if (newHash !== oldState.unprocessed_hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push({ deferred, hash: newHash });\n }\n });\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n\n // Grab all of the existing references from the same source, and their locationKeys as well\n const oldRefs = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .leftJoin<DbRefreshStateRow>('refresh_state', {\n target_entity_ref: 'entity_ref',\n })\n .where({ source_key: options.sourceKey })\n .select({\n target_entity_ref: 'refresh_state_references.target_entity_ref',\n location_key: 'refresh_state.location_key',\n unprocessed_hash: 'refresh_state.unprocessed_hash',\n });\n\n const items = options.items.map(deferred => ({\n deferred,\n ref: stringifyEntityRef(deferred.entity),\n hash: generateStableHash(deferred.entity),\n }));\n\n const oldRefsSet = new Map(\n oldRefs.map(r => [\n r.target_entity_ref,\n {\n locationKey: r.location_key,\n oldEntityHash: r.unprocessed_hash,\n },\n ]),\n );\n const newRefsSet = new Set(items.map(item => item.ref));\n\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = oldRefs\n .map(row => row.target_entity_ref)\n .filter(ref => !newRefsSet.has(ref));\n\n for (const item of items) {\n const oldRef = oldRefsSet.get(item.ref);\n const upsertItem = { deferred: item.deferred, hash: item.hash };\n if (!oldRef) {\n // Add any entity that does not exist in the database\n toAdd.push(upsertItem);\n } else if (\n (oldRef.locationKey ?? undefined) !==\n (item.deferred.locationKey ?? undefined)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(item.ref);\n toAdd.push(upsertItem);\n } else if (oldRef.oldEntityHash !== item.hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push(upsertItem);\n }\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n}\n"],"names":["rethrowError","deleteWithEagerPruningOfChildren","lodash","uuid","stringifyEntityRef","isDatabaseConflictError","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","refreshByRefreshKeys","toAdd","toUpsert","toRemove","generateStableHash"],"mappings":";;;;;;;;;;;;;;;;;;AA4CA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,uBAAoD,CAAA;AAAA,EAC/D,YACmB,OAIjB,EAAA;AAJiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAAA;AAIhB,EAEH,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAC5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAIV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AACA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMA,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEA,MAAM,0BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,OAAO,QAAU,EAAA,QAAA,KAAa,MAAM,IAAA,CAAK,WAAY,CAAA,EAAA,EAAI,OAAO,CAAA;AAExE,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAM,MAAA,YAAA,GAAe,MAAMC,iEAAiC,CAAA;AAAA,QAC1D,IAAM,EAAA,EAAA;AAAA,QACN,UAAY,EAAA,QAAA;AAAA,QACZ,WAAW,OAAQ,CAAA;AAAA,OACpB,CAAA;AACD,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,QAClB,YAAY,YAAY,CAAA,WAAA,EAAc,IAAK,CAAA,SAAA,CAAU,QAAQ,CAAC,CAAA;AAAA,OAChE;AAAA;AAGF,IAAA,IAAI,MAAM,MAAQ,EAAA;AAUhB,MAAA,KAAA,MAAW,KAAS,IAAAC,uBAAA,CAAO,KAAM,CAAA,KAAA,EAAO,EAAE,CAAG,EAAA;AAC3C,QAAI,IAAA;AACF,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,eAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,WAAWC,OAAK,EAAA;AAAA,cAChB,UAAY,EAAAC,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,cACnD,kBAAoB,EAAA,IAAA,CAAK,SAAU,CAAA,IAAA,CAAK,SAAS,MAAM,CAAA;AAAA,cACvD,kBAAkB,IAAK,CAAA,IAAA;AAAA,cACvB,MAAQ,EAAA,EAAA;AAAA,cACR,YAAA,EAAc,KAAK,QAAS,CAAA,WAAA;AAAA,cAC5B,cAAA,EAAgB,EAAG,CAAA,EAAA,CAAG,GAAI,EAAA;AAAA,cAC1B,iBAAA,EAAmB,EAAG,CAAA,EAAA,CAAG,GAAI;AAAA,aAC7B,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AACA,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,0BAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAAA,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM;AAAA,aAC1D,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AAAA,iBACO,KAAO,EAAA;AACd,UAAI,IAAA,CAACC,wCAAwB,CAAA,KAAK,CAAG,EAAA;AACnC,YAAM,MAAA,KAAA;AAAA,WACD,MAAA;AACL,YAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,cAClB,uDAAuD,KAAK,CAAA;AAAA,aAC9D;AACA,YAAS,QAAA,CAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA;AACxB;AACF;AACF;AAGF,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAW,KAAA,MAAA;AAAA,QACT,QAAA,EAAU,EAAE,MAAA,EAAQ,WAAY,EAAA;AAAA,QAChC;AAAA,WACG,QAAU,EAAA;AACb,QAAM,MAAA,SAAA,GAAYD,gCAAmB,MAAM,CAAA;AAE3C,QAAI,IAAA;AACF,UAAI,IAAA,EAAA,GAAK,MAAME,+CAAwB,CAAA;AAAA,YACrC,EAAA;AAAA,YACA,MAAA;AAAA,YACA,IAAA;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,IAAI,CAAC,EAAI,EAAA;AACP,YAAA,EAAA,GAAK,MAAMC,+CAAwB,CAAA;AAAA,cACjC,EAAA;AAAA,cACA,MAAA;AAAA,cACA,IAAA;AAAA,cACA,WAAA;AAAA,cACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,aACtB,CAAA;AAAA;AAEH,UAAA,IAAI,EAAI,EAAA;AACN,YAAA,MAAM,GAAgC,0BAA0B,CAAA,CAC7D,MAAM,mBAAqB,EAAA,SAAS,EACpC,MAAO,EAAA;AAEV,YAAM,MAAA,EAAA;AAAA,cACJ;AAAA,cACA,MAAO,CAAA;AAAA,cACP,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAA;AAAA,aACpB,CAAA;AAAA,WACI,MAAA;AACL,YAAA,MAAM,EAAgC,CAAA,0BAA0B,CAC7D,CAAA,KAAA,CAAM,qBAAqB,SAAS,CAAA,CACpC,QAAS,CAAA,EAAE,UAAY,EAAA,OAAA,CAAQ,SAAU,EAAC,EAC1C,MAAO,EAAA;AAEV,YAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,cACpD,EAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACD,CAAA;AACD,YAAA,IAAI,cAAgB,EAAA;AAClB,cAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,gBAClB,CAAA,OAAA,EAAU,QAAQ,SAAS,CAAA,gCAAA,EAAmC,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,eAC7I;AAAA;AACF;AACF,iBACO,KAAO,EAAA;AACd,UAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,YAClB,kBAAkB,SAAS,CAAA,eAAA,EAAkB,OAAQ,CAAA,SAAS,MAAM,KAAK,CAAA;AAAA,WAC3E;AAAA;AACF;AACF;AACF;AACF,EAEA,MAAM,oBACJ,CAAA,QAAA,EACA,OACA,EAAA;AACA,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAMC,0CAAqB,EAAE,EAAA,EAAI,IAAM,EAAA,OAAA,CAAQ,MAAM,CAAA;AAAA;AACvD,EAEA,MAAc,WACZ,CAAA,EAAA,EACA,OAKC,EAAA;AACD,IAAI,IAAA,OAAA,CAAQ,SAAS,OAAS,EAAA;AAC5B,MAAMC,MAAAA,MAAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,MAAMC,MAAAA,SAAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,MAAA,MAAMC,YAAW,OAAQ,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,SAAS,CAAA;AAErD,MAAA,KAAA,MAAW,SAASV,uBAAO,CAAA,KAAA,CAAM,OAAQ,CAAA,KAAA,EAAO,GAAI,CAAG,EAAA;AACrD,QAAA,MAAM,aAAa,KAAM,CAAA,GAAA,CAAI,OAAKE,+BAAmB,CAAA,CAAA,CAAE,MAAM,CAAC,CAAA;AAC9D,QAAA,MAAM,IAAO,GAAA,MAAM,EAAsB,CAAA,eAAe,EACrD,MAAO,CAAA,CAAC,YAAc,EAAA,kBAAA,EAAoB,cAAc,CAAC,CACzD,CAAA,OAAA,CAAQ,cAAc,UAAU,CAAA;AACnC,QAAA,MAAM,YAAY,IAAI,GAAA;AAAA,UACpB,IAAA,CAAK,IAAI,CAAO,GAAA,KAAA;AAAA,YACd,GAAI,CAAA,UAAA;AAAA,YACJ;AAAA,cACE,kBAAkB,GAAI,CAAA,gBAAA;AAAA,cACtB,cAAc,GAAI,CAAA;AAAA;AACpB,WACD;AAAA,SACH;AAEA,QAAM,KAAA,CAAA,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAM,KAAA;AAC7B,UAAM,MAAA,SAAA,GAAY,WAAW,CAAC,CAAA;AAC9B,UAAM,MAAA,OAAA,GAAUS,uBAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAClD,UAAM,MAAA,QAAA,GAAW,SAAU,CAAA,GAAA,CAAI,SAAS,CAAA;AACxC,UAAA,IAAI,aAAa,KAAW,CAAA,EAAA;AAE1B,YAAAH,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,sBAErC,QAAS,CAAA,WAAA,IAAe,IAAW,OAAA,QAAA,CAAS,gBAAgB,IAC7D,CAAA,EAAA;AAEA,YAAAE,SAAAA,CAAS,KAAK,SAAS,CAAA;AACvB,YAAAF,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,WACxC,MAAA,IAAW,OAAY,KAAA,QAAA,CAAS,gBAAkB,EAAA;AAEhD,YAAAC,UAAS,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA;AAC3C,SACD,CAAA;AAAA;AAGH,MAAA,OAAO,EAAE,KAAAD,EAAAA,MAAAA,EAAO,QAAAC,EAAAA,SAAAA,EAAU,UAAAC,SAAS,EAAA;AAAA;AAIrC,IAAA,MAAM,UAAU,MAAM,EAAA;AAAA,MACpB;AAAA,KACF,CACG,SAA4B,eAAiB,EAAA;AAAA,MAC5C,iBAAmB,EAAA;AAAA,KACpB,EACA,KAAM,CAAA,EAAE,YAAY,OAAQ,CAAA,SAAA,EAAW,CAAA,CACvC,MAAO,CAAA;AAAA,MACN,iBAAmB,EAAA,4CAAA;AAAA,MACnB,YAAc,EAAA,4BAAA;AAAA,MACd,gBAAkB,EAAA;AAAA,KACnB,CAAA;AAEH,IAAA,MAAM,KAAQ,GAAA,OAAA,CAAQ,KAAM,CAAA,GAAA,CAAI,CAAa,QAAA,MAAA;AAAA,MAC3C,QAAA;AAAA,MACA,GAAA,EAAKR,+BAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,MACvC,IAAA,EAAMS,uBAAmB,CAAA,QAAA,CAAS,MAAM;AAAA,KACxC,CAAA,CAAA;AAEF,IAAA,MAAM,aAAa,IAAI,GAAA;AAAA,MACrB,OAAA,CAAQ,IAAI,CAAK,CAAA,KAAA;AAAA,QACf,CAAE,CAAA,iBAAA;AAAA,QACF;AAAA,UACE,aAAa,CAAE,CAAA,YAAA;AAAA,UACf,eAAe,CAAE,CAAA;AAAA;AACnB,OACD;AAAA,KACH;AACA,IAAM,MAAA,UAAA,GAAa,IAAI,GAAI,CAAA,KAAA,CAAM,IAAI,CAAQ,IAAA,KAAA,IAAA,CAAK,GAAG,CAAC,CAAA;AAEtD,IAAM,MAAA,KAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,IAAM,MAAA,QAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,IAAA,MAAM,QAAW,GAAA,OAAA,CACd,GAAI,CAAA,CAAA,GAAA,KAAO,GAAI,CAAA,iBAAiB,CAChC,CAAA,MAAA,CAAO,CAAO,GAAA,KAAA,CAAC,UAAW,CAAA,GAAA,CAAI,GAAG,CAAC,CAAA;AAErC,IAAA,KAAA,MAAW,QAAQ,KAAO,EAAA;AACxB,MAAA,MAAM,MAAS,GAAA,UAAA,CAAW,GAAI,CAAA,IAAA,CAAK,GAAG,CAAA;AACtC,MAAA,MAAM,aAAa,EAAE,QAAA,EAAU,KAAK,QAAU,EAAA,IAAA,EAAM,KAAK,IAAK,EAAA;AAC9D,MAAA,IAAI,CAAC,MAAQ,EAAA;AAEX,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,kBAEpB,MAAO,CAAA,WAAA,IAAe,aACtB,IAAK,CAAA,QAAA,CAAS,eAAe,KAC9B,CAAA,CAAA,EAAA;AAEA,QAAS,QAAA,CAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AACtB,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,OACZ,MAAA,IAAA,MAAA,CAAO,aAAkB,KAAA,IAAA,CAAK,IAAM,EAAA;AAE7C,QAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAAA;AAC1B;AAGF,IAAO,OAAA,EAAE,KAAO,EAAA,QAAA,EAAU,QAAS,EAAA;AAAA;AAEvC;;;;"}
|
|
1
|
+
{"version":3,"file":"DefaultProviderDatabase.cjs.js","sources":["../../src/database/DefaultProviderDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringifyEntityRef } from '@backstage/catalog-model';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { v4 as uuid } from 'uuid';\nimport { rethrowError } from './conversion';\nimport { deleteWithEagerPruningOfChildren } from './operations/provider/deleteWithEagerPruningOfChildren';\nimport { refreshByRefreshKeys } from './operations/provider/refreshByRefreshKeys';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { DbRefreshStateReferencesRow, DbRefreshStateRow } from './tables';\nimport {\n ProviderDatabase,\n RefreshByKeyOptions,\n ReplaceUnprocessedEntitiesOptions,\n Transaction,\n} from './types';\nimport { generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProviderDatabase implements ProviderDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n },\n ) {}\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the\n // transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n async replaceUnprocessedEntities(\n txOpaque: Knex | Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex | Knex.Transaction;\n const { toAdd, toUpsert, toRemove } = await this.createDelta(tx, options);\n\n if (toRemove.length) {\n const removedCount = await deleteWithEagerPruningOfChildren({\n knex: tx,\n entityRefs: toRemove,\n sourceKey: options.sourceKey,\n });\n this.options.logger.debug(\n `removed, ${removedCount} entities: ${JSON.stringify(toRemove)}`,\n );\n }\n\n if (toAdd.length) {\n // The reason for this chunking, rather than just massively batch\n // inserting the entire payload, is that we fall back to the individual\n // upsert mechanism below on conflicts. That path is massively slower than\n // the fast batch path, so we don't want to end up accidentally having to\n // for example item-by-item upsert tens of thousands of entities in a\n // large initial delivery dump. The implication is that the size of these\n // chunks needs to weigh the benefit of fast successful inserts, against\n // the drawback of super slow but more rare fallbacks. There's quickly\n // diminishing returns though with turning up this value way high.\n for (const chunk of lodash.chunk(toAdd, 50)) {\n try {\n await tx.batchInsert(\n 'refresh_state',\n chunk.map(item => ({\n entity_id: uuid(),\n entity_ref: stringifyEntityRef(item.deferred.entity),\n unprocessed_entity: JSON.stringify(item.deferred.entity),\n unprocessed_hash: item.hash,\n errors: '',\n location_key: item.deferred.locationKey,\n next_update_at: tx.fn.now(),\n last_discovery_at: tx.fn.now(),\n })),\n BATCH_SIZE,\n );\n await tx.batchInsert(\n 'refresh_state_references',\n chunk.map(item => ({\n source_key: options.sourceKey,\n target_entity_ref: stringifyEntityRef(item.deferred.entity),\n })),\n BATCH_SIZE,\n );\n } catch (error) {\n if (!isDatabaseConflictError(error)) {\n throw error;\n } else {\n this.options.logger.debug(\n `Fast insert path failed, falling back to slow path, ${error}`,\n );\n toUpsert.push(...chunk);\n }\n }\n }\n }\n\n if (toUpsert.length) {\n for (const {\n deferred: { entity, locationKey },\n hash,\n } of toUpsert) {\n const entityRef = stringifyEntityRef(entity);\n\n try {\n let ok = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (!ok) {\n ok = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n }\n if (ok) {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .delete();\n\n await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n ).insert({\n source_key: options.sourceKey,\n target_entity_ref: entityRef,\n });\n } else {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .andWhere({ source_key: options.sourceKey })\n .delete();\n\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Source ${options.sourceKey} detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n }\n }\n } catch (error) {\n this.options.logger.error(\n `Failed to add '${entityRef}' from source '${options.sourceKey}', ${error}`,\n );\n }\n }\n }\n }\n\n async listReferenceSourceKeys(txOpaque: Transaction): Promise<string[]> {\n const tx = txOpaque as Knex | Knex.Transaction;\n\n const rows = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .distinct('source_key')\n .whereNotNull('source_key');\n\n return rows\n .map(row => row.source_key)\n .filter((key): key is string => !!key);\n }\n\n async refreshByRefreshKeys(\n txOpaque: Transaction,\n options: RefreshByKeyOptions,\n ) {\n const tx = txOpaque as Knex.Transaction;\n await refreshByRefreshKeys({ tx, keys: options.keys });\n }\n\n private async createDelta(\n tx: Knex | Knex.Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<{\n toAdd: { deferred: DeferredEntity; hash: string }[];\n toUpsert: { deferred: DeferredEntity; hash: string }[];\n toRemove: string[];\n }> {\n if (options.type === 'delta') {\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = options.removed.map(e => e.entityRef);\n\n for (const chunk of lodash.chunk(options.added, 1000)) {\n const entityRefs = chunk.map(e => stringifyEntityRef(e.entity));\n const rows = await tx<DbRefreshStateRow>('refresh_state')\n .select(['entity_ref', 'unprocessed_hash', 'location_key'])\n .whereIn('entity_ref', entityRefs);\n const oldStates = new Map(\n rows.map(row => [\n row.entity_ref,\n {\n unprocessed_hash: row.unprocessed_hash,\n location_key: row.location_key,\n },\n ]),\n );\n\n chunk.forEach((deferred, i) => {\n const entityRef = entityRefs[i];\n const newHash = generateStableHash(deferred.entity);\n const oldState = oldStates.get(entityRef);\n if (oldState === undefined) {\n // Add any entity that does not exist in the database\n toAdd.push({ deferred, hash: newHash });\n } else if (\n (deferred.locationKey ?? null) !== (oldState.location_key ?? null)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(entityRef);\n toAdd.push({ deferred, hash: newHash });\n } else if (newHash !== oldState.unprocessed_hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push({ deferred, hash: newHash });\n }\n });\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n\n // Grab all of the existing references from the same source, and their locationKeys as well\n const oldRefs = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .leftJoin<DbRefreshStateRow>('refresh_state', {\n target_entity_ref: 'entity_ref',\n })\n .where({ source_key: options.sourceKey })\n .select({\n target_entity_ref: 'refresh_state_references.target_entity_ref',\n location_key: 'refresh_state.location_key',\n unprocessed_hash: 'refresh_state.unprocessed_hash',\n });\n\n const items = options.items.map(deferred => ({\n deferred,\n ref: stringifyEntityRef(deferred.entity),\n hash: generateStableHash(deferred.entity),\n }));\n\n const oldRefsSet = new Map(\n oldRefs.map(r => [\n r.target_entity_ref,\n {\n locationKey: r.location_key,\n oldEntityHash: r.unprocessed_hash,\n },\n ]),\n );\n const newRefsSet = new Set(items.map(item => item.ref));\n\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = oldRefs\n .map(row => row.target_entity_ref)\n .filter(ref => !newRefsSet.has(ref));\n\n for (const item of items) {\n const oldRef = oldRefsSet.get(item.ref);\n const upsertItem = { deferred: item.deferred, hash: item.hash };\n if (!oldRef) {\n // Add any entity that does not exist in the database\n toAdd.push(upsertItem);\n } else if (\n (oldRef.locationKey ?? undefined) !==\n (item.deferred.locationKey ?? undefined)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(item.ref);\n toAdd.push(upsertItem);\n } else if (oldRef.oldEntityHash !== item.hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push(upsertItem);\n }\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n}\n"],"names":["rethrowError","deleteWithEagerPruningOfChildren","lodash","uuid","stringifyEntityRef","isDatabaseConflictError","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","refreshByRefreshKeys","toAdd","toUpsert","toRemove","generateStableHash"],"mappings":";;;;;;;;;;;;;;;;;;AA4CA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,uBAAoD,CAAA;AAAA,EAC/D,YACmB,OAIjB,EAAA;AAJiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAAA;AAIhB,EAEH,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAC5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAIV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AACA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMA,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEA,MAAM,0BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,OAAO,QAAU,EAAA,QAAA,KAAa,MAAM,IAAA,CAAK,WAAY,CAAA,EAAA,EAAI,OAAO,CAAA;AAExE,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAM,MAAA,YAAA,GAAe,MAAMC,iEAAiC,CAAA;AAAA,QAC1D,IAAM,EAAA,EAAA;AAAA,QACN,UAAY,EAAA,QAAA;AAAA,QACZ,WAAW,OAAQ,CAAA;AAAA,OACpB,CAAA;AACD,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,QAClB,YAAY,YAAY,CAAA,WAAA,EAAc,IAAK,CAAA,SAAA,CAAU,QAAQ,CAAC,CAAA;AAAA,OAChE;AAAA;AAGF,IAAA,IAAI,MAAM,MAAQ,EAAA;AAUhB,MAAA,KAAA,MAAW,KAAS,IAAAC,uBAAA,CAAO,KAAM,CAAA,KAAA,EAAO,EAAE,CAAG,EAAA;AAC3C,QAAI,IAAA;AACF,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,eAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,WAAWC,OAAK,EAAA;AAAA,cAChB,UAAY,EAAAC,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,cACnD,kBAAoB,EAAA,IAAA,CAAK,SAAU,CAAA,IAAA,CAAK,SAAS,MAAM,CAAA;AAAA,cACvD,kBAAkB,IAAK,CAAA,IAAA;AAAA,cACvB,MAAQ,EAAA,EAAA;AAAA,cACR,YAAA,EAAc,KAAK,QAAS,CAAA,WAAA;AAAA,cAC5B,cAAA,EAAgB,EAAG,CAAA,EAAA,CAAG,GAAI,EAAA;AAAA,cAC1B,iBAAA,EAAmB,EAAG,CAAA,EAAA,CAAG,GAAI;AAAA,aAC7B,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AACA,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,0BAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAAA,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM;AAAA,aAC1D,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AAAA,iBACO,KAAO,EAAA;AACd,UAAI,IAAA,CAACC,wCAAwB,CAAA,KAAK,CAAG,EAAA;AACnC,YAAM,MAAA,KAAA;AAAA,WACD,MAAA;AACL,YAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,cAClB,uDAAuD,KAAK,CAAA;AAAA,aAC9D;AACA,YAAS,QAAA,CAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA;AACxB;AACF;AACF;AAGF,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAW,KAAA,MAAA;AAAA,QACT,QAAA,EAAU,EAAE,MAAA,EAAQ,WAAY,EAAA;AAAA,QAChC;AAAA,WACG,QAAU,EAAA;AACb,QAAM,MAAA,SAAA,GAAYD,gCAAmB,MAAM,CAAA;AAE3C,QAAI,IAAA;AACF,UAAI,IAAA,EAAA,GAAK,MAAME,+CAAwB,CAAA;AAAA,YACrC,EAAA;AAAA,YACA,MAAA;AAAA,YACA,IAAA;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,IAAI,CAAC,EAAI,EAAA;AACP,YAAA,EAAA,GAAK,MAAMC,+CAAwB,CAAA;AAAA,cACjC,EAAA;AAAA,cACA,MAAA;AAAA,cACA,IAAA;AAAA,cACA,WAAA;AAAA,cACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,aACtB,CAAA;AAAA;AAEH,UAAA,IAAI,EAAI,EAAA;AACN,YAAA,MAAM,GAAgC,0BAA0B,CAAA,CAC7D,MAAM,mBAAqB,EAAA,SAAS,EACpC,MAAO,EAAA;AAEV,YAAM,MAAA,EAAA;AAAA,cACJ;AAAA,cACA,MAAO,CAAA;AAAA,cACP,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAA;AAAA,aACpB,CAAA;AAAA,WACI,MAAA;AACL,YAAA,MAAM,EAAgC,CAAA,0BAA0B,CAC7D,CAAA,KAAA,CAAM,qBAAqB,SAAS,CAAA,CACpC,QAAS,CAAA,EAAE,UAAY,EAAA,OAAA,CAAQ,SAAU,EAAC,EAC1C,MAAO,EAAA;AAEV,YAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,cACpD,EAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACD,CAAA;AACD,YAAA,IAAI,cAAgB,EAAA;AAClB,cAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,gBAClB,CAAA,OAAA,EAAU,QAAQ,SAAS,CAAA,gCAAA,EAAmC,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,eAC7I;AAAA;AACF;AACF,iBACO,KAAO,EAAA;AACd,UAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,YAClB,kBAAkB,SAAS,CAAA,eAAA,EAAkB,OAAQ,CAAA,SAAS,MAAM,KAAK,CAAA;AAAA,WAC3E;AAAA;AACF;AACF;AACF;AACF,EAEA,MAAM,wBAAwB,QAA0C,EAAA;AACtE,IAAA,MAAM,EAAK,GAAA,QAAA;AAEX,IAAA,MAAM,OAAO,MAAM,EAAA;AAAA,MACjB;AAAA,KAEC,CAAA,QAAA,CAAS,YAAY,CAAA,CACrB,aAAa,YAAY,CAAA;AAE5B,IAAO,OAAA,IAAA,CACJ,GAAI,CAAA,CAAA,GAAA,KAAO,GAAI,CAAA,UAAU,CACzB,CAAA,MAAA,CAAO,CAAC,GAAA,KAAuB,CAAC,CAAC,GAAG,CAAA;AAAA;AACzC,EAEA,MAAM,oBACJ,CAAA,QAAA,EACA,OACA,EAAA;AACA,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAMC,0CAAqB,EAAE,EAAA,EAAI,IAAM,EAAA,OAAA,CAAQ,MAAM,CAAA;AAAA;AACvD,EAEA,MAAc,WACZ,CAAA,EAAA,EACA,OAKC,EAAA;AACD,IAAI,IAAA,OAAA,CAAQ,SAAS,OAAS,EAAA;AAC5B,MAAMC,MAAAA,MAAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,MAAMC,MAAAA,SAAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,MAAA,MAAMC,YAAW,OAAQ,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,SAAS,CAAA;AAErD,MAAA,KAAA,MAAW,SAASV,uBAAO,CAAA,KAAA,CAAM,OAAQ,CAAA,KAAA,EAAO,GAAI,CAAG,EAAA;AACrD,QAAA,MAAM,aAAa,KAAM,CAAA,GAAA,CAAI,OAAKE,+BAAmB,CAAA,CAAA,CAAE,MAAM,CAAC,CAAA;AAC9D,QAAA,MAAM,IAAO,GAAA,MAAM,EAAsB,CAAA,eAAe,EACrD,MAAO,CAAA,CAAC,YAAc,EAAA,kBAAA,EAAoB,cAAc,CAAC,CACzD,CAAA,OAAA,CAAQ,cAAc,UAAU,CAAA;AACnC,QAAA,MAAM,YAAY,IAAI,GAAA;AAAA,UACpB,IAAA,CAAK,IAAI,CAAO,GAAA,KAAA;AAAA,YACd,GAAI,CAAA,UAAA;AAAA,YACJ;AAAA,cACE,kBAAkB,GAAI,CAAA,gBAAA;AAAA,cACtB,cAAc,GAAI,CAAA;AAAA;AACpB,WACD;AAAA,SACH;AAEA,QAAM,KAAA,CAAA,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAM,KAAA;AAC7B,UAAM,MAAA,SAAA,GAAY,WAAW,CAAC,CAAA;AAC9B,UAAM,MAAA,OAAA,GAAUS,uBAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAClD,UAAM,MAAA,QAAA,GAAW,SAAU,CAAA,GAAA,CAAI,SAAS,CAAA;AACxC,UAAA,IAAI,aAAa,KAAW,CAAA,EAAA;AAE1B,YAAAH,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,sBAErC,QAAS,CAAA,WAAA,IAAe,IAAW,OAAA,QAAA,CAAS,gBAAgB,IAC7D,CAAA,EAAA;AAEA,YAAAE,SAAAA,CAAS,KAAK,SAAS,CAAA;AACvB,YAAAF,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,WACxC,MAAA,IAAW,OAAY,KAAA,QAAA,CAAS,gBAAkB,EAAA;AAEhD,YAAAC,UAAS,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA;AAC3C,SACD,CAAA;AAAA;AAGH,MAAA,OAAO,EAAE,KAAAD,EAAAA,MAAAA,EAAO,QAAAC,EAAAA,SAAAA,EAAU,UAAAC,SAAS,EAAA;AAAA;AAIrC,IAAA,MAAM,UAAU,MAAM,EAAA;AAAA,MACpB;AAAA,KACF,CACG,SAA4B,eAAiB,EAAA;AAAA,MAC5C,iBAAmB,EAAA;AAAA,KACpB,EACA,KAAM,CAAA,EAAE,YAAY,OAAQ,CAAA,SAAA,EAAW,CAAA,CACvC,MAAO,CAAA;AAAA,MACN,iBAAmB,EAAA,4CAAA;AAAA,MACnB,YAAc,EAAA,4BAAA;AAAA,MACd,gBAAkB,EAAA;AAAA,KACnB,CAAA;AAEH,IAAA,MAAM,KAAQ,GAAA,OAAA,CAAQ,KAAM,CAAA,GAAA,CAAI,CAAa,QAAA,MAAA;AAAA,MAC3C,QAAA;AAAA,MACA,GAAA,EAAKR,+BAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,MACvC,IAAA,EAAMS,uBAAmB,CAAA,QAAA,CAAS,MAAM;AAAA,KACxC,CAAA,CAAA;AAEF,IAAA,MAAM,aAAa,IAAI,GAAA;AAAA,MACrB,OAAA,CAAQ,IAAI,CAAK,CAAA,KAAA;AAAA,QACf,CAAE,CAAA,iBAAA;AAAA,QACF;AAAA,UACE,aAAa,CAAE,CAAA,YAAA;AAAA,UACf,eAAe,CAAE,CAAA;AAAA;AACnB,OACD;AAAA,KACH;AACA,IAAM,MAAA,UAAA,GAAa,IAAI,GAAI,CAAA,KAAA,CAAM,IAAI,CAAQ,IAAA,KAAA,IAAA,CAAK,GAAG,CAAC,CAAA;AAEtD,IAAM,MAAA,KAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,IAAM,MAAA,QAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,IAAA,MAAM,QAAW,GAAA,OAAA,CACd,GAAI,CAAA,CAAA,GAAA,KAAO,GAAI,CAAA,iBAAiB,CAChC,CAAA,MAAA,CAAO,CAAO,GAAA,KAAA,CAAC,UAAW,CAAA,GAAA,CAAI,GAAG,CAAC,CAAA;AAErC,IAAA,KAAA,MAAW,QAAQ,KAAO,EAAA;AACxB,MAAA,MAAM,MAAS,GAAA,UAAA,CAAW,GAAI,CAAA,IAAA,CAAK,GAAG,CAAA;AACtC,MAAA,MAAM,aAAa,EAAE,QAAA,EAAU,KAAK,QAAU,EAAA,IAAA,EAAM,KAAK,IAAK,EAAA;AAC9D,MAAA,IAAI,CAAC,MAAQ,EAAA;AAEX,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,kBAEpB,MAAO,CAAA,WAAA,IAAe,aACtB,IAAK,CAAA,QAAA,CAAS,eAAe,KAC9B,CAAA,CAAA,EAAA;AAEA,QAAS,QAAA,CAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AACtB,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,OACZ,MAAA,IAAA,MAAA,CAAO,aAAkB,KAAA,IAAA,CAAK,IAAM,EAAA;AAE7C,QAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAAA;AAC1B;AAGF,IAAO,OAAA,EAAE,KAAO,EAAA,QAAA,EAAU,QAAS,EAAA;AAAA;AAEvC;;;;"}
|
|
@@ -6,7 +6,7 @@ async function refreshByRefreshKeys(options) {
|
|
|
6
6
|
const { tx, keys } = options;
|
|
7
7
|
const hashedKeys = keys.map((k) => util.generateTargetKey(k));
|
|
8
8
|
await tx("refresh_state").whereIn("entity_id", function selectEntityRefs(inner) {
|
|
9
|
-
inner.whereIn("key", hashedKeys).select({
|
|
9
|
+
return inner.whereIn("key", hashedKeys).select({
|
|
10
10
|
entity_id: "refresh_keys.entity_id"
|
|
11
11
|
}).from("refresh_keys");
|
|
12
12
|
}).update({ next_update_at: tx.fn.now() });
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"refreshByRefreshKeys.cjs.js","sources":["../../../../src/database/operations/provider/refreshByRefreshKeys.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Knex } from 'knex';\nimport { DbRefreshStateRow } from '../../tables';\nimport { generateTargetKey } from '../../util';\n\n/**\n * Schedules a future refresh of entities, by so called \"refresh keys\" that may\n * be associated with one or more entities. Note that this does not mean that\n * the refresh happens immediately, but rather that their scheduling time gets\n * moved up the queue and will get picked up eventually by the regular\n * processing loop.\n */\nexport async function refreshByRefreshKeys(options: {\n tx: Knex.Transaction;\n keys: string[];\n}): Promise<void> {\n const { tx, keys } = options;\n\n const hashedKeys = keys.map(k => generateTargetKey(k));\n\n await tx<DbRefreshStateRow>('refresh_state')\n .whereIn('entity_id', function selectEntityRefs(inner) {\n inner\n .whereIn('key', hashedKeys)\n .select({\n entity_id: 'refresh_keys.entity_id',\n })\n .from('refresh_keys');\n })\n .update({ next_update_at: tx.fn.now() });\n}\n"],"names":["generateTargetKey"],"mappings":";;;;AA2BA,eAAsB,qBAAqB,OAGzB,EAAA;AAChB,EAAM,MAAA,EAAE,EAAI,EAAA,IAAA,EAAS,GAAA,OAAA;AAErB,EAAA,MAAM,aAAa,IAAK,CAAA,GAAA,CAAI,CAAK,CAAA,KAAAA,sBAAA,CAAkB,CAAC,CAAC,CAAA;AAErD,EAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,QAAQ,WAAa,EAAA,SAAS,iBAAiB,KAAO,EAAA;AACrD,IAAA,
|
|
1
|
+
{"version":3,"file":"refreshByRefreshKeys.cjs.js","sources":["../../../../src/database/operations/provider/refreshByRefreshKeys.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Knex } from 'knex';\nimport { DbRefreshStateRow } from '../../tables';\nimport { generateTargetKey } from '../../util';\n\n/**\n * Schedules a future refresh of entities, by so called \"refresh keys\" that may\n * be associated with one or more entities. Note that this does not mean that\n * the refresh happens immediately, but rather that their scheduling time gets\n * moved up the queue and will get picked up eventually by the regular\n * processing loop.\n */\nexport async function refreshByRefreshKeys(options: {\n tx: Knex | Knex.Transaction;\n keys: string[];\n}): Promise<void> {\n const { tx, keys } = options;\n\n const hashedKeys = keys.map(k => generateTargetKey(k));\n\n await tx<DbRefreshStateRow>('refresh_state')\n .whereIn('entity_id', function selectEntityRefs(inner) {\n return inner\n .whereIn('key', hashedKeys)\n .select({\n entity_id: 'refresh_keys.entity_id',\n })\n .from('refresh_keys');\n })\n .update({ next_update_at: tx.fn.now() });\n}\n"],"names":["generateTargetKey"],"mappings":";;;;AA2BA,eAAsB,qBAAqB,OAGzB,EAAA;AAChB,EAAM,MAAA,EAAE,EAAI,EAAA,IAAA,EAAS,GAAA,OAAA;AAErB,EAAA,MAAM,aAAa,IAAK,CAAA,GAAA,CAAI,CAAK,CAAA,KAAAA,sBAAA,CAAkB,CAAC,CAAC,CAAA;AAErD,EAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,QAAQ,WAAa,EAAA,SAAS,iBAAiB,KAAO,EAAA;AACrD,IAAA,OAAO,KACJ,CAAA,OAAA,CAAQ,KAAO,EAAA,UAAU,EACzB,MAAO,CAAA;AAAA,MACN,SAAW,EAAA;AAAA,KACZ,CACA,CAAA,IAAA,CAAK,cAAc,CAAA;AAAA,GACvB,EACA,MAAO,CAAA,EAAE,gBAAgB,EAAG,CAAA,EAAA,CAAG,GAAI,EAAA,EAAG,CAAA;AAC3C;;;;"}
|
|
@@ -31,7 +31,12 @@ function traverse(root) {
|
|
|
31
31
|
for (const item of current) {
|
|
32
32
|
visit(path, item);
|
|
33
33
|
if (typeof item === "string") {
|
|
34
|
-
|
|
34
|
+
const pathKey = `${path}.${item}`;
|
|
35
|
+
if (!output.some(
|
|
36
|
+
(kv) => kv.key.toLocaleLowerCase("en-US") === pathKey.toLocaleLowerCase("en-US")
|
|
37
|
+
)) {
|
|
38
|
+
output.push({ key: pathKey, value: true });
|
|
39
|
+
}
|
|
35
40
|
}
|
|
36
41
|
}
|
|
37
42
|
return;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"buildEntitySearch.cjs.js","sources":["../../../../src/database/operations/stitcher/buildEntitySearch.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DEFAULT_NAMESPACE, Entity } from '@backstage/catalog-model';\nimport { InputError } from '@backstage/errors';\nimport { DbSearchRow } from '../../tables';\n\n// These are excluded in the generic loop, either because they do not make sense\n// to index, or because they are special-case always inserted whether they are\n// null or not\nconst SPECIAL_KEYS = [\n 'attachments',\n 'relations',\n 'status',\n 'metadata.name',\n 'metadata.namespace',\n 'metadata.uid',\n 'metadata.etag',\n];\n\n// The maximum length allowed for search values. These columns are indexed, and\n// database engines do not like to index on massive values. For example,\n// postgres will balk after 8191 byte line sizes.\nconst MAX_KEY_LENGTH = 200;\nconst MAX_VALUE_LENGTH = 200;\n\ntype Kv = {\n key: string;\n value: unknown;\n};\n\n// Helper for traversing through a nested structure and outputting a list of\n// path->value entries of the leaves.\n//\n// For example, this yaml structure\n//\n// a: 1\n// b:\n// c: null\n// e: [f, g]\n// h:\n// - i: 1\n// j: k\n// - i: 2\n// j: l\n//\n// will result in\n//\n// \"a\", 1\n// \"b.c\", null\n// \"b.e\": \"f\"\n// \"b.e.f\": true\n// \"b.e\": \"g\"\n// \"b.e.g\": true\n// \"h.i\": 1\n// \"h.j\": \"k\"\n// \"h.i\": 2\n// \"h.j\": \"l\"\nexport function traverse(root: unknown): Kv[] {\n const output: Kv[] = [];\n\n function visit(path: string, current: unknown) {\n if (SPECIAL_KEYS.includes(path)) {\n return;\n }\n\n // empty or scalar\n if (\n current === undefined ||\n current === null ||\n ['string', 'number', 'boolean'].includes(typeof current)\n ) {\n output.push({ key: path, value: current });\n return;\n }\n\n // unknown\n if (typeof current !== 'object') {\n return;\n }\n\n // array\n if (Array.isArray(current)) {\n for (const item of current) {\n // NOTE(freben): The reason that these are output in two different ways,\n // is to support use cases where you want to express that MORE than one\n // tag is present in a list. Since the EntityFilters structure is a\n // record, you can't have several entries of the same key. Therefore\n // you will have to match on\n //\n // { \"a.b\": [\"true\"], \"a.c\": [\"true\"] }\n //\n // rather than\n //\n // { \"a\": [\"b\", \"c\"] }\n //\n // because the latter means EITHER b or c has to be present.\n visit(path, item);\n if (typeof item === 'string') {\n output.push({ key: `${path}.${item}`, value: true });\n }\n }\n return;\n }\n\n // object\n for (const [key, value] of Object.entries(current!)) {\n visit(path ? `${path}.${key}` : key, value);\n }\n }\n\n visit('', root);\n\n return output;\n}\n\n// Translates a number of raw data rows to search table rows\nexport function mapToRows(input: Kv[], entityId: string): DbSearchRow[] {\n const result: DbSearchRow[] = [];\n\n for (const { key: rawKey, value: rawValue } of input) {\n const key = rawKey.toLocaleLowerCase('en-US');\n if (key.length > MAX_KEY_LENGTH) {\n continue;\n }\n if (rawValue === undefined || rawValue === null) {\n result.push({\n entity_id: entityId,\n key,\n original_value: null,\n value: null,\n });\n } else {\n const value = String(rawValue).toLocaleLowerCase('en-US');\n if (value.length <= MAX_VALUE_LENGTH) {\n result.push({\n entity_id: entityId,\n key,\n original_value: String(rawValue),\n value: value,\n });\n } else {\n result.push({\n entity_id: entityId,\n key,\n original_value: null,\n value: null,\n });\n }\n }\n }\n\n return result;\n}\n\n/**\n * Generates all of the search rows that are relevant for this entity.\n *\n * @param entityId - The uid of the entity\n * @param entity - The entity\n * @returns A list of entity search rows\n */\nexport function buildEntitySearch(\n entityId: string,\n entity: Entity,\n): DbSearchRow[] {\n // Visit the base structure recursively\n const raw = traverse(entity);\n\n // Start with some special keys that are always present because you want to\n // be able to easily search for null specifically\n raw.push({ key: 'metadata.name', value: entity.metadata.name });\n raw.push({ key: 'metadata.namespace', value: entity.metadata.namespace });\n raw.push({ key: 'metadata.uid', value: entity.metadata.uid });\n\n // Namespace not specified has the default value \"default\", so we want to\n // match on that as well\n if (!entity.metadata.namespace) {\n raw.push({ key: 'metadata.namespace', value: DEFAULT_NAMESPACE });\n }\n\n // Visit relations\n for (const relation of entity.relations ?? []) {\n raw.push({\n key: `relations.${relation.type}`,\n value: relation.targetRef,\n });\n }\n\n // This validates that there are no keys that vary only in casing, such\n // as `spec.foo` and `spec.Foo`.\n const keys = new Set(raw.map(r => r.key));\n const lowerKeys = new Set(raw.map(r => r.key.toLocaleLowerCase('en-US')));\n if (keys.size !== lowerKeys.size) {\n const difference = [];\n for (const key of keys) {\n const lower = key.toLocaleLowerCase('en-US');\n if (!lowerKeys.delete(lower)) {\n difference.push(lower);\n }\n }\n const badKeys = `'${difference.join(\"', '\")}'`;\n throw new InputError(\n `Entity has duplicate keys that vary only in casing, ${badKeys}`,\n );\n }\n\n return mapToRows(raw, entityId);\n}\n"],"names":["DEFAULT_NAMESPACE","InputError"],"mappings":";;;;;AAuBA,MAAM,YAAe,GAAA;AAAA,EACnB,aAAA;AAAA,EACA,WAAA;AAAA,EACA,QAAA;AAAA,EACA,eAAA;AAAA,EACA,oBAAA;AAAA,EACA,cAAA;AAAA,EACA;AACF,CAAA;AAKA,MAAM,cAAiB,GAAA,GAAA;AACvB,MAAM,gBAAmB,GAAA,GAAA;AAkClB,SAAS,SAAS,IAAqB,EAAA;AAC5C,EAAA,MAAM,SAAe,EAAC;AAEtB,EAAS,SAAA,KAAA,CAAM,MAAc,OAAkB,EAAA;AAC7C,IAAI,IAAA,YAAA,CAAa,QAAS,CAAA,IAAI,CAAG,EAAA;AAC/B,MAAA;AAAA;AAIF,IAAA,IACE,OAAY,KAAA,KAAA,CAAA,IACZ,OAAY,KAAA,IAAA,IACZ,CAAC,QAAA,EAAU,QAAU,EAAA,SAAS,CAAE,CAAA,QAAA,CAAS,OAAO,OAAO,CACvD,EAAA;AACA,MAAA,MAAA,CAAO,KAAK,EAAE,GAAA,EAAK,IAAM,EAAA,KAAA,EAAO,SAAS,CAAA;AACzC,MAAA;AAAA;AAIF,IAAI,IAAA,OAAO,YAAY,QAAU,EAAA;AAC/B,MAAA;AAAA;AAIF,IAAI,IAAA,KAAA,CAAM,OAAQ,CAAA,OAAO,CAAG,EAAA;AAC1B,MAAA,KAAA,MAAW,QAAQ,OAAS,EAAA;AAc1B,QAAA,KAAA,CAAM,MAAM,IAAI,CAAA;AAChB,QAAI,IAAA,OAAO,SAAS,QAAU,EAAA;AAC5B,UAAO,MAAA,CAAA,IAAA,CAAK,EAAE,GAAA,EAAK,CAAG,EAAA,IAAI,IAAI,IAAI,CAAA,CAAA,EAAI,KAAO,EAAA,IAAA,EAAM,CAAA;AAAA;AACrD;AAEF,MAAA;AAAA;AAIF,IAAA,KAAA,MAAW,CAAC,GAAK,EAAA,KAAK,KAAK,MAAO,CAAA,OAAA,CAAQ,OAAQ,CAAG,EAAA;AACnD,MAAA,KAAA,CAAM,OAAO,CAAG,EAAA,IAAI,IAAI,GAAG,CAAA,CAAA,GAAK,KAAK,KAAK,CAAA;AAAA;AAC5C;AAGF,EAAA,KAAA,CAAM,IAAI,IAAI,CAAA;AAEd,EAAO,OAAA,MAAA;AACT;AAGgB,SAAA,SAAA,CAAU,OAAa,QAAiC,EAAA;AACtE,EAAA,MAAM,SAAwB,EAAC;AAE/B,EAAA,KAAA,MAAW,EAAE,GAAK,EAAA,MAAA,EAAQ,KAAO,EAAA,QAAA,MAAc,KAAO,EAAA;AACpD,IAAM,MAAA,GAAA,GAAM,MAAO,CAAA,iBAAA,CAAkB,OAAO,CAAA;AAC5C,IAAI,IAAA,GAAA,CAAI,SAAS,cAAgB,EAAA;AAC/B,MAAA;AAAA;AAEF,IAAI,IAAA,QAAA,KAAa,KAAa,CAAA,IAAA,QAAA,KAAa,IAAM,EAAA;AAC/C,MAAA,MAAA,CAAO,IAAK,CAAA;AAAA,QACV,SAAW,EAAA,QAAA;AAAA,QACX,GAAA;AAAA,QACA,cAAgB,EAAA,IAAA;AAAA,QAChB,KAAO,EAAA;AAAA,OACR,CAAA;AAAA,KACI,MAAA;AACL,MAAA,MAAM,KAAQ,GAAA,MAAA,CAAO,QAAQ,CAAA,CAAE,kBAAkB,OAAO,CAAA;AACxD,MAAI,IAAA,KAAA,CAAM,UAAU,gBAAkB,EAAA;AACpC,QAAA,MAAA,CAAO,IAAK,CAAA;AAAA,UACV,SAAW,EAAA,QAAA;AAAA,UACX,GAAA;AAAA,UACA,cAAA,EAAgB,OAAO,QAAQ,CAAA;AAAA,UAC/B;AAAA,SACD,CAAA;AAAA,OACI,MAAA;AACL,QAAA,MAAA,CAAO,IAAK,CAAA;AAAA,UACV,SAAW,EAAA,QAAA;AAAA,UACX,GAAA;AAAA,UACA,cAAgB,EAAA,IAAA;AAAA,UAChB,KAAO,EAAA;AAAA,SACR,CAAA;AAAA;AACH;AACF;AAGF,EAAO,OAAA,MAAA;AACT;AASgB,SAAA,iBAAA,CACd,UACA,MACe,EAAA;AAEf,EAAM,MAAA,GAAA,GAAM,SAAS,MAAM,CAAA;AAI3B,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,eAAA,EAAiB,OAAO,MAAO,CAAA,QAAA,CAAS,MAAM,CAAA;AAC9D,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,oBAAA,EAAsB,OAAO,MAAO,CAAA,QAAA,CAAS,WAAW,CAAA;AACxE,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,cAAA,EAAgB,OAAO,MAAO,CAAA,QAAA,CAAS,KAAK,CAAA;AAI5D,EAAI,IAAA,CAAC,MAAO,CAAA,QAAA,CAAS,SAAW,EAAA;AAC9B,IAAA,GAAA,CAAI,KAAK,EAAE,GAAA,EAAK,oBAAsB,EAAA,KAAA,EAAOA,gCAAmB,CAAA;AAAA;AAIlE,EAAA,KAAA,MAAW,QAAY,IAAA,MAAA,CAAO,SAAa,IAAA,EAAI,EAAA;AAC7C,IAAA,GAAA,CAAI,IAAK,CAAA;AAAA,MACP,GAAA,EAAK,CAAa,UAAA,EAAA,QAAA,CAAS,IAAI,CAAA,CAAA;AAAA,MAC/B,OAAO,QAAS,CAAA;AAAA,KACjB,CAAA;AAAA;AAKH,EAAM,MAAA,IAAA,GAAO,IAAI,GAAI,CAAA,GAAA,CAAI,IAAI,CAAK,CAAA,KAAA,CAAA,CAAE,GAAG,CAAC,CAAA;AACxC,EAAM,MAAA,SAAA,GAAY,IAAI,GAAA,CAAI,GAAI,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,GAAI,CAAA,iBAAA,CAAkB,OAAO,CAAC,CAAC,CAAA;AACxE,EAAI,IAAA,IAAA,CAAK,IAAS,KAAA,SAAA,CAAU,IAAM,EAAA;AAChC,IAAA,MAAM,aAAa,EAAC;AACpB,IAAA,KAAA,MAAW,OAAO,IAAM,EAAA;AACtB,MAAM,MAAA,KAAA,GAAQ,GAAI,CAAA,iBAAA,CAAkB,OAAO,CAAA;AAC3C,MAAA,IAAI,CAAC,SAAA,CAAU,MAAO,CAAA,KAAK,CAAG,EAAA;AAC5B,QAAA,UAAA,CAAW,KAAK,KAAK,CAAA;AAAA;AACvB;AAEF,IAAA,MAAM,OAAU,GAAA,CAAA,CAAA,EAAI,UAAW,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,CAAA,CAAA;AAC3C,IAAA,MAAM,IAAIC,iBAAA;AAAA,MACR,uDAAuD,OAAO,CAAA;AAAA,KAChE;AAAA;AAGF,EAAO,OAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAChC;;;;;;"}
|
|
1
|
+
{"version":3,"file":"buildEntitySearch.cjs.js","sources":["../../../../src/database/operations/stitcher/buildEntitySearch.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DEFAULT_NAMESPACE, Entity } from '@backstage/catalog-model';\nimport { InputError } from '@backstage/errors';\nimport { DbSearchRow } from '../../tables';\n\n// These are excluded in the generic loop, either because they do not make sense\n// to index, or because they are special-case always inserted whether they are\n// null or not\nconst SPECIAL_KEYS = [\n 'attachments',\n 'relations',\n 'status',\n 'metadata.name',\n 'metadata.namespace',\n 'metadata.uid',\n 'metadata.etag',\n];\n\n// The maximum length allowed for search values. These columns are indexed, and\n// database engines do not like to index on massive values. For example,\n// postgres will balk after 8191 byte line sizes.\nconst MAX_KEY_LENGTH = 200;\nconst MAX_VALUE_LENGTH = 200;\n\ntype Kv = {\n key: string;\n value: unknown;\n};\n\n// Helper for traversing through a nested structure and outputting a list of\n// path->value entries of the leaves.\n//\n// For example, this yaml structure\n//\n// a: 1\n// b:\n// c: null\n// e: [f, g]\n// h:\n// - i: 1\n// j: k\n// - i: 2\n// j: l\n//\n// will result in\n//\n// \"a\", 1\n// \"b.c\", null\n// \"b.e\": \"f\"\n// \"b.e.f\": true\n// \"b.e\": \"g\"\n// \"b.e.g\": true\n// \"h.i\": 1\n// \"h.j\": \"k\"\n// \"h.i\": 2\n// \"h.j\": \"l\"\nexport function traverse(root: unknown): Kv[] {\n const output: Kv[] = [];\n\n function visit(path: string, current: unknown) {\n if (SPECIAL_KEYS.includes(path)) {\n return;\n }\n\n // empty or scalar\n if (\n current === undefined ||\n current === null ||\n ['string', 'number', 'boolean'].includes(typeof current)\n ) {\n output.push({ key: path, value: current });\n return;\n }\n\n // unknown\n if (typeof current !== 'object') {\n return;\n }\n\n // array\n if (Array.isArray(current)) {\n for (const item of current) {\n // NOTE(freben): The reason that these are output in two different ways,\n // is to support use cases where you want to express that MORE than one\n // tag is present in a list. Since the EntityFilters structure is a\n // record, you can't have several entries of the same key. Therefore\n // you will have to match on\n //\n // { \"a.b\": [\"true\"], \"a.c\": [\"true\"] }\n //\n // rather than\n //\n // { \"a\": [\"b\", \"c\"] }\n //\n // because the latter means EITHER b or c has to be present.\n visit(path, item);\n if (typeof item === 'string') {\n const pathKey = `${path}.${item}`;\n if (\n !output.some(\n kv =>\n kv.key.toLocaleLowerCase('en-US') ===\n pathKey.toLocaleLowerCase('en-US'),\n )\n ) {\n output.push({ key: pathKey, value: true });\n }\n }\n }\n return;\n }\n\n // object\n for (const [key, value] of Object.entries(current!)) {\n visit(path ? `${path}.${key}` : key, value);\n }\n }\n\n visit('', root);\n\n return output;\n}\n\n// Translates a number of raw data rows to search table rows\nexport function mapToRows(input: Kv[], entityId: string): DbSearchRow[] {\n const result: DbSearchRow[] = [];\n\n for (const { key: rawKey, value: rawValue } of input) {\n const key = rawKey.toLocaleLowerCase('en-US');\n if (key.length > MAX_KEY_LENGTH) {\n continue;\n }\n if (rawValue === undefined || rawValue === null) {\n result.push({\n entity_id: entityId,\n key,\n original_value: null,\n value: null,\n });\n } else {\n const value = String(rawValue).toLocaleLowerCase('en-US');\n if (value.length <= MAX_VALUE_LENGTH) {\n result.push({\n entity_id: entityId,\n key,\n original_value: String(rawValue),\n value: value,\n });\n } else {\n result.push({\n entity_id: entityId,\n key,\n original_value: null,\n value: null,\n });\n }\n }\n }\n\n return result;\n}\n\n/**\n * Generates all of the search rows that are relevant for this entity.\n *\n * @param entityId - The uid of the entity\n * @param entity - The entity\n * @returns A list of entity search rows\n */\nexport function buildEntitySearch(\n entityId: string,\n entity: Entity,\n): DbSearchRow[] {\n // Visit the base structure recursively\n const raw = traverse(entity);\n\n // Start with some special keys that are always present because you want to\n // be able to easily search for null specifically\n raw.push({ key: 'metadata.name', value: entity.metadata.name });\n raw.push({ key: 'metadata.namespace', value: entity.metadata.namespace });\n raw.push({ key: 'metadata.uid', value: entity.metadata.uid });\n\n // Namespace not specified has the default value \"default\", so we want to\n // match on that as well\n if (!entity.metadata.namespace) {\n raw.push({ key: 'metadata.namespace', value: DEFAULT_NAMESPACE });\n }\n\n // Visit relations\n for (const relation of entity.relations ?? []) {\n raw.push({\n key: `relations.${relation.type}`,\n value: relation.targetRef,\n });\n }\n\n // This validates that there are no keys that vary only in casing, such\n // as `spec.foo` and `spec.Foo`.\n const keys = new Set(raw.map(r => r.key));\n const lowerKeys = new Set(raw.map(r => r.key.toLocaleLowerCase('en-US')));\n if (keys.size !== lowerKeys.size) {\n const difference = [];\n for (const key of keys) {\n const lower = key.toLocaleLowerCase('en-US');\n if (!lowerKeys.delete(lower)) {\n difference.push(lower);\n }\n }\n const badKeys = `'${difference.join(\"', '\")}'`;\n throw new InputError(\n `Entity has duplicate keys that vary only in casing, ${badKeys}`,\n );\n }\n\n return mapToRows(raw, entityId);\n}\n"],"names":["DEFAULT_NAMESPACE","InputError"],"mappings":";;;;;AAuBA,MAAM,YAAe,GAAA;AAAA,EACnB,aAAA;AAAA,EACA,WAAA;AAAA,EACA,QAAA;AAAA,EACA,eAAA;AAAA,EACA,oBAAA;AAAA,EACA,cAAA;AAAA,EACA;AACF,CAAA;AAKA,MAAM,cAAiB,GAAA,GAAA;AACvB,MAAM,gBAAmB,GAAA,GAAA;AAkClB,SAAS,SAAS,IAAqB,EAAA;AAC5C,EAAA,MAAM,SAAe,EAAC;AAEtB,EAAS,SAAA,KAAA,CAAM,MAAc,OAAkB,EAAA;AAC7C,IAAI,IAAA,YAAA,CAAa,QAAS,CAAA,IAAI,CAAG,EAAA;AAC/B,MAAA;AAAA;AAIF,IAAA,IACE,OAAY,KAAA,KAAA,CAAA,IACZ,OAAY,KAAA,IAAA,IACZ,CAAC,QAAA,EAAU,QAAU,EAAA,SAAS,CAAE,CAAA,QAAA,CAAS,OAAO,OAAO,CACvD,EAAA;AACA,MAAA,MAAA,CAAO,KAAK,EAAE,GAAA,EAAK,IAAM,EAAA,KAAA,EAAO,SAAS,CAAA;AACzC,MAAA;AAAA;AAIF,IAAI,IAAA,OAAO,YAAY,QAAU,EAAA;AAC/B,MAAA;AAAA;AAIF,IAAI,IAAA,KAAA,CAAM,OAAQ,CAAA,OAAO,CAAG,EAAA;AAC1B,MAAA,KAAA,MAAW,QAAQ,OAAS,EAAA;AAc1B,QAAA,KAAA,CAAM,MAAM,IAAI,CAAA;AAChB,QAAI,IAAA,OAAO,SAAS,QAAU,EAAA;AAC5B,UAAA,MAAM,OAAU,GAAA,CAAA,EAAG,IAAI,CAAA,CAAA,EAAI,IAAI,CAAA,CAAA;AAC/B,UAAA,IACE,CAAC,MAAO,CAAA,IAAA;AAAA,YACN,CAAA,EAAA,KACE,GAAG,GAAI,CAAA,iBAAA,CAAkB,OAAO,CAChC,KAAA,OAAA,CAAQ,kBAAkB,OAAO;AAAA,WAErC,EAAA;AACA,YAAA,MAAA,CAAO,KAAK,EAAE,GAAA,EAAK,OAAS,EAAA,KAAA,EAAO,MAAM,CAAA;AAAA;AAC3C;AACF;AAEF,MAAA;AAAA;AAIF,IAAA,KAAA,MAAW,CAAC,GAAK,EAAA,KAAK,KAAK,MAAO,CAAA,OAAA,CAAQ,OAAQ,CAAG,EAAA;AACnD,MAAA,KAAA,CAAM,OAAO,CAAG,EAAA,IAAI,IAAI,GAAG,CAAA,CAAA,GAAK,KAAK,KAAK,CAAA;AAAA;AAC5C;AAGF,EAAA,KAAA,CAAM,IAAI,IAAI,CAAA;AAEd,EAAO,OAAA,MAAA;AACT;AAGgB,SAAA,SAAA,CAAU,OAAa,QAAiC,EAAA;AACtE,EAAA,MAAM,SAAwB,EAAC;AAE/B,EAAA,KAAA,MAAW,EAAE,GAAK,EAAA,MAAA,EAAQ,KAAO,EAAA,QAAA,MAAc,KAAO,EAAA;AACpD,IAAM,MAAA,GAAA,GAAM,MAAO,CAAA,iBAAA,CAAkB,OAAO,CAAA;AAC5C,IAAI,IAAA,GAAA,CAAI,SAAS,cAAgB,EAAA;AAC/B,MAAA;AAAA;AAEF,IAAI,IAAA,QAAA,KAAa,KAAa,CAAA,IAAA,QAAA,KAAa,IAAM,EAAA;AAC/C,MAAA,MAAA,CAAO,IAAK,CAAA;AAAA,QACV,SAAW,EAAA,QAAA;AAAA,QACX,GAAA;AAAA,QACA,cAAgB,EAAA,IAAA;AAAA,QAChB,KAAO,EAAA;AAAA,OACR,CAAA;AAAA,KACI,MAAA;AACL,MAAA,MAAM,KAAQ,GAAA,MAAA,CAAO,QAAQ,CAAA,CAAE,kBAAkB,OAAO,CAAA;AACxD,MAAI,IAAA,KAAA,CAAM,UAAU,gBAAkB,EAAA;AACpC,QAAA,MAAA,CAAO,IAAK,CAAA;AAAA,UACV,SAAW,EAAA,QAAA;AAAA,UACX,GAAA;AAAA,UACA,cAAA,EAAgB,OAAO,QAAQ,CAAA;AAAA,UAC/B;AAAA,SACD,CAAA;AAAA,OACI,MAAA;AACL,QAAA,MAAA,CAAO,IAAK,CAAA;AAAA,UACV,SAAW,EAAA,QAAA;AAAA,UACX,GAAA;AAAA,UACA,cAAgB,EAAA,IAAA;AAAA,UAChB,KAAO,EAAA;AAAA,SACR,CAAA;AAAA;AACH;AACF;AAGF,EAAO,OAAA,MAAA;AACT;AASgB,SAAA,iBAAA,CACd,UACA,MACe,EAAA;AAEf,EAAM,MAAA,GAAA,GAAM,SAAS,MAAM,CAAA;AAI3B,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,eAAA,EAAiB,OAAO,MAAO,CAAA,QAAA,CAAS,MAAM,CAAA;AAC9D,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,oBAAA,EAAsB,OAAO,MAAO,CAAA,QAAA,CAAS,WAAW,CAAA;AACxE,EAAI,GAAA,CAAA,IAAA,CAAK,EAAE,GAAK,EAAA,cAAA,EAAgB,OAAO,MAAO,CAAA,QAAA,CAAS,KAAK,CAAA;AAI5D,EAAI,IAAA,CAAC,MAAO,CAAA,QAAA,CAAS,SAAW,EAAA;AAC9B,IAAA,GAAA,CAAI,KAAK,EAAE,GAAA,EAAK,oBAAsB,EAAA,KAAA,EAAOA,gCAAmB,CAAA;AAAA;AAIlE,EAAA,KAAA,MAAW,QAAY,IAAA,MAAA,CAAO,SAAa,IAAA,EAAI,EAAA;AAC7C,IAAA,GAAA,CAAI,IAAK,CAAA;AAAA,MACP,GAAA,EAAK,CAAa,UAAA,EAAA,QAAA,CAAS,IAAI,CAAA,CAAA;AAAA,MAC/B,OAAO,QAAS,CAAA;AAAA,KACjB,CAAA;AAAA;AAKH,EAAM,MAAA,IAAA,GAAO,IAAI,GAAI,CAAA,GAAA,CAAI,IAAI,CAAK,CAAA,KAAA,CAAA,CAAE,GAAG,CAAC,CAAA;AACxC,EAAM,MAAA,SAAA,GAAY,IAAI,GAAA,CAAI,GAAI,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,GAAI,CAAA,iBAAA,CAAkB,OAAO,CAAC,CAAC,CAAA;AACxE,EAAI,IAAA,IAAA,CAAK,IAAS,KAAA,SAAA,CAAU,IAAM,EAAA;AAChC,IAAA,MAAM,aAAa,EAAC;AACpB,IAAA,KAAA,MAAW,OAAO,IAAM,EAAA;AACtB,MAAM,MAAA,KAAA,GAAQ,GAAI,CAAA,iBAAA,CAAkB,OAAO,CAAA;AAC3C,MAAA,IAAI,CAAC,SAAA,CAAU,MAAO,CAAA,KAAK,CAAG,EAAA;AAC5B,QAAA,UAAA,CAAW,KAAK,KAAK,CAAA;AAAA;AACvB;AAEF,IAAA,MAAM,OAAU,GAAA,CAAA,CAAA,EAAI,UAAW,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,CAAA,CAAA;AAC3C,IAAA,MAAM,IAAIC,iBAAA;AAAA,MACR,uDAAuD,OAAO,CAAA;AAAA,KAChE;AAAA;AAGF,EAAO,OAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAChC;;;;;;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
|
|
3
2
|
import { LoggerService, UrlReaderService, DiscoveryService, DatabaseService, RootConfigService, PermissionsService, PermissionsRegistryService, SchedulerService, AuthService, HttpAuthService, AuditorService } from '@backstage/backend-plugin-api';
|
|
4
3
|
import { Entity, EntityPolicy, Validators } from '@backstage/catalog-model';
|
|
@@ -460,8 +459,8 @@ declare const processingResult: Readonly<{
|
|
|
460
459
|
readonly generalError: (atLocation: LocationSpec$1, message: string) => CatalogProcessorResult$1;
|
|
461
460
|
readonly location: (newLocation: LocationSpec$1) => CatalogProcessorResult$1;
|
|
462
461
|
readonly entity: (atLocation: LocationSpec$1, newEntity: Entity, options?: {
|
|
463
|
-
locationKey?: string | null
|
|
464
|
-
}
|
|
462
|
+
locationKey?: string | null;
|
|
463
|
+
}) => CatalogProcessorResult$1;
|
|
465
464
|
readonly relation: (spec: EntityRelationSpec$1) => CatalogProcessorResult$1;
|
|
466
465
|
readonly refresh: (key: string) => CatalogProcessorResult$1;
|
|
467
466
|
}>;
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
async function getOrphanedEntityProviderNames({
|
|
4
|
+
db,
|
|
5
|
+
providers
|
|
6
|
+
}) {
|
|
7
|
+
const dbProviderNames = await db.transaction(
|
|
8
|
+
async (tx) => db.listReferenceSourceKeys(tx)
|
|
9
|
+
);
|
|
10
|
+
const providerNames = providers.map((p) => p.getProviderName());
|
|
11
|
+
return dbProviderNames.filter(
|
|
12
|
+
(dbProviderName) => !providerNames.includes(dbProviderName)
|
|
13
|
+
);
|
|
14
|
+
}
|
|
15
|
+
async function removeEntitiesForProvider({
|
|
16
|
+
db,
|
|
17
|
+
providerName,
|
|
18
|
+
logger
|
|
19
|
+
}) {
|
|
20
|
+
try {
|
|
21
|
+
await db.transaction(async (tx) => {
|
|
22
|
+
await db.replaceUnprocessedEntities(tx, {
|
|
23
|
+
sourceKey: providerName,
|
|
24
|
+
type: "full",
|
|
25
|
+
items: []
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
|
+
logger.info(`Removed entities for orphaned provider ${providerName}`);
|
|
29
|
+
} catch (e) {
|
|
30
|
+
logger.error(
|
|
31
|
+
`Failed to remove entities for orphaned provider ${providerName}`,
|
|
32
|
+
e
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async function evictEntitiesFromOrphanedProviders(options) {
|
|
37
|
+
for (const providerName of await getOrphanedEntityProviderNames(options)) {
|
|
38
|
+
await removeEntitiesForProvider({
|
|
39
|
+
db: options.db,
|
|
40
|
+
providerName,
|
|
41
|
+
logger: options.logger
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
exports.evictEntitiesFromOrphanedProviders = evictEntitiesFromOrphanedProviders;
|
|
47
|
+
//# sourceMappingURL=evictEntitiesFromOrphanedProviders.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"evictEntitiesFromOrphanedProviders.cjs.js","sources":["../../src/processing/evictEntitiesFromOrphanedProviders.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { EntityProvider } from '@backstage/plugin-catalog-node';\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { ProviderDatabase } from '../database/types';\n\nasync function getOrphanedEntityProviderNames({\n db,\n providers,\n}: {\n db: ProviderDatabase;\n providers: EntityProvider[];\n}): Promise<string[]> {\n const dbProviderNames = await db.transaction(async tx =>\n db.listReferenceSourceKeys(tx),\n );\n\n const providerNames = providers.map(p => p.getProviderName());\n\n return dbProviderNames.filter(\n dbProviderName => !providerNames.includes(dbProviderName),\n );\n}\n\nasync function removeEntitiesForProvider({\n db,\n providerName,\n logger,\n}: {\n db: ProviderDatabase;\n providerName: string;\n logger: LoggerService;\n}) {\n try {\n await db.transaction(async tx => {\n await db.replaceUnprocessedEntities(tx, {\n sourceKey: providerName,\n type: 'full',\n items: [],\n });\n });\n logger.info(`Removed entities for orphaned provider ${providerName}`);\n } catch (e) {\n logger.error(\n `Failed to remove entities for orphaned provider ${providerName}`,\n e,\n );\n }\n}\n\nexport async function evictEntitiesFromOrphanedProviders(options: {\n db: ProviderDatabase;\n providers: EntityProvider[];\n logger: LoggerService;\n}) {\n for (const providerName of await getOrphanedEntityProviderNames(options)) {\n await removeEntitiesForProvider({\n db: options.db,\n providerName,\n logger: options.logger,\n });\n }\n}\n"],"names":[],"mappings":";;AAoBA,eAAe,8BAA+B,CAAA;AAAA,EAC5C,EAAA;AAAA,EACA;AACF,CAGsB,EAAA;AACpB,EAAM,MAAA,eAAA,GAAkB,MAAM,EAAG,CAAA,WAAA;AAAA,IAAY,OAAM,EAAA,KACjD,EAAG,CAAA,uBAAA,CAAwB,EAAE;AAAA,GAC/B;AAEA,EAAA,MAAM,gBAAgB,SAAU,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,iBAAiB,CAAA;AAE5D,EAAA,OAAO,eAAgB,CAAA,MAAA;AAAA,IACrB,CAAkB,cAAA,KAAA,CAAC,aAAc,CAAA,QAAA,CAAS,cAAc;AAAA,GAC1D;AACF;AAEA,eAAe,yBAA0B,CAAA;AAAA,EACvC,EAAA;AAAA,EACA,YAAA;AAAA,EACA;AACF,CAIG,EAAA;AACD,EAAI,IAAA;AACF,IAAM,MAAA,EAAA,CAAG,WAAY,CAAA,OAAM,EAAM,KAAA;AAC/B,MAAM,MAAA,EAAA,CAAG,2BAA2B,EAAI,EAAA;AAAA,QACtC,SAAW,EAAA,YAAA;AAAA,QACX,IAAM,EAAA,MAAA;AAAA,QACN,OAAO;AAAC,OACT,CAAA;AAAA,KACF,CAAA;AACD,IAAO,MAAA,CAAA,IAAA,CAAK,CAA0C,uCAAA,EAAA,YAAY,CAAE,CAAA,CAAA;AAAA,WAC7D,CAAG,EAAA;AACV,IAAO,MAAA,CAAA,KAAA;AAAA,MACL,mDAAmD,YAAY,CAAA,CAAA;AAAA,MAC/D;AAAA,KACF;AAAA;AAEJ;AAEA,eAAsB,mCAAmC,OAItD,EAAA;AACD,EAAA,KAAA,MAAW,YAAgB,IAAA,MAAM,8BAA+B,CAAA,OAAO,CAAG,EAAA;AACxE,IAAA,MAAM,yBAA0B,CAAA;AAAA,MAC9B,IAAI,OAAQ,CAAA,EAAA;AAAA,MACZ,YAAA;AAAA,MACA,QAAQ,OAAQ,CAAA;AAAA,KACjB,CAAA;AAAA;AAEL;;;;"}
|
|
@@ -7,7 +7,7 @@ const spec = {
|
|
|
7
7
|
info: {
|
|
8
8
|
title: "catalog",
|
|
9
9
|
version: "1",
|
|
10
|
-
description: "The
|
|
10
|
+
description: "The API surface consists of a few distinct groups of functionality. Each has a\ndedicated section below.\n\n> **Note:** This page only describes some of the most commonly used parts of the\n> API, and is a work in progress.\n\nAll of the URL paths in this article are assumed to be on top of some base URL\npointing at your catalog installation. For example, if the path given in a\nsection below is `/entities`, and the catalog is located at\n`http://localhost:7007/api/catalog` during local development, the full URL would\nbe `http://localhost:7007/api/catalog/entities`. The actual URL may vary from\none organization to the other, especially in production, but is commonly your\n`backend.baseUrl` in your app config, plus `/api/catalog` at the end.\n\nSome or all of the endpoints may accept or require an `Authorization` header\nwith a `Bearer` token, which should then be the Backstage token returned by the\n[`identity API`](https://backstage.io/docs/reference/core-plugin-api.identityapiref).\n",
|
|
11
11
|
license: {
|
|
12
12
|
name: "Apache-2.0",
|
|
13
13
|
url: "http://www.apache.org/licenses/LICENSE-2.0.html"
|
|
@@ -62,7 +62,7 @@ const spec = {
|
|
|
62
62
|
cursor: {
|
|
63
63
|
name: "cursor",
|
|
64
64
|
in: "query",
|
|
65
|
-
description: "
|
|
65
|
+
description: 'You may pass the `cursor` query parameters to perform cursor based pagination\nthrough the set of entities. The value of `cursor` will be returned in the response, under the `pageInfo` property:\n\n```json\n "pageInfo": {\n "nextCursor": "a-cursor",\n "prevCursor": "another-cursor"\n }\n```\n\nIf `nextCursor` exists, it can be used to retrieve the next batch of entities. Following the same approach,\nif `prevCursor` exists, it can be used to retrieve the previous batch of entities.\n\n- [`filter`](#filtering), for selecting only a subset of all entities\n- [`fields`](#field-selection), for selecting only parts of the full data\n structure of each entity\n- `limit` for limiting the number of entities returned (20 is the default)\n- [`orderField`](#ordering), for deciding the order of the entities\n- `fullTextFilter`\n **NOTE**: [`filter`, `orderField`, `fullTextFilter`] and `cursor` are mutually exclusive. This means that,\n it isn\'t possible to change any of [`filter`, `orderField`, `fullTextFilter`] when passing `cursor` as query parameters,\n as changing any of these properties will affect pagination. If any of `filter`, `orderField`, `fullTextFilter` is specified together with `cursor`, only the latter is taken into consideration.\n',
|
|
66
66
|
required: false,
|
|
67
67
|
allowReserved: true,
|
|
68
68
|
schema: {
|
|
@@ -84,7 +84,7 @@ const spec = {
|
|
|
84
84
|
fields: {
|
|
85
85
|
name: "fields",
|
|
86
86
|
in: "query",
|
|
87
|
-
description: "
|
|
87
|
+
description: "By default the full entities are returned, but you can pass in a `fields` query\nparameter which selects what parts of the entity data to retain. This makes the\nresponse smaller and faster to transfer, and may allow the catalog to perform\nmore efficient queries.\n\nThe query parameter value is a comma separated list of simplified JSON paths\nlike above. Each path corresponds to the key of either a value, or of a subtree\nroot that you want to keep in the output. The rest is pruned away. For example,\nspecifying `?fields=metadata.name,metadata.annotations,spec` retains only the\n`name` and `annotations` fields of the `metadata` of each entity (it'll be an\nobject with at most two keys), keeps the entire `spec` unchanged, and cuts out\nall other roots such as `relations`.\n\nSome more real world usable examples:\n\n- Return only enough data to form the full ref of each entity:\n\n `/entities/by-query?fields=kind,metadata.namespace,metadata.name`\n",
|
|
88
88
|
required: false,
|
|
89
89
|
allowReserved: true,
|
|
90
90
|
explode: false,
|
|
@@ -106,7 +106,7 @@ const spec = {
|
|
|
106
106
|
filter: {
|
|
107
107
|
name: "filter",
|
|
108
108
|
in: "query",
|
|
109
|
-
description:
|
|
109
|
+
description: 'You can pass in one or more filter sets that get matched against each entity.\nEach filter set is a number of conditions that all have to match for the\ncondition to be true (conditions effectively have an AND between them). At least\none filter set has to be true for the entity to be part of the result set\n(filter sets effectively have an OR between them).\n\nExample:\n\n```text\n/entities/by-query?filter=kind=user,metadata.namespace=default&filter=kind=group,spec.type\n\n Return entities that match\n\n Filter set 1:\n Condition 1: kind = user\n AND\n Condition 2: metadata.namespace = default\n\n OR\n\n Filter set 2:\n Condition 1: kind = group\n AND\n Condition 2: spec.type exists\n```\n\nEach condition is either on the form `<key>`, or on the form `<key>=<value>`.\nThe first form asserts on the existence of a certain key (with any value), and\nthe second asserts that the key exists and has a certain value. All checks are\nalways case _insensitive_.\n\nIn all cases, the key is a simplified JSON path in a given piece of entity data.\nEach part of the path is a key of an object, and the traversal also descends\nthrough arrays. There are two special forms:\n\n- Array items that are simple value types (such as strings) match on a key-value\n pair where the key is the item as a string, and the value is the string `true`\n- Relations can be matched on a `relations.<type>=<targetRef>` form\n\nLet\'s look at a simplified example to illustrate the concept:\n\n```json\n{\n "a": {\n "b": ["c", { "d": 1 }],\n "e": 7\n }\n}\n```\n\nThis would match any one of the following conditions:\n\n- `a`\n- `a.b`\n- `a.b.c`\n- `a.b.c=true`\n- `a.b.d`\n- `a.b.d=1`\n- `a.e`\n- `a.e=7`\n\nSome more real world usable examples:\n\n- Return all orphaned entities:\n\n `/entities/by-query?filter=metadata.annotations.backstage.io/orphan=true`\n\n- Return all users and groups:\n\n `/entities/by-query?filter=kind=user&filter=kind=group`\n\n- Return all service components:\n\n `/entities/by-query?filter=kind=component,spec.type=service`\n\n- Return all entities with the `java` tag:\n\n `/entities/by-query?filter=metadata.tags.java`\n\n- Return all users who are members of the `ops` group (note that the full\n [reference](references.md) of the group is used):\n\n `/entities/by-query?filter=kind=user,relations.memberof=group:default/ops`\n',
|
|
110
110
|
required: false,
|
|
111
111
|
allowReserved: true,
|
|
112
112
|
schema: {
|
|
@@ -151,7 +151,7 @@ const spec = {
|
|
|
151
151
|
orderField: {
|
|
152
152
|
name: "orderField",
|
|
153
153
|
in: "query",
|
|
154
|
-
description: "
|
|
154
|
+
description: "By default the entities are returned ordered by their internal uid. You can\ncustomize the `orderField` query parameters to affect that ordering.\n\nFor example, to return entities by their name:\n\n`/entities/by-query?orderField=metadata.name,asc`\n\nEach parameter can be followed by `asc` for ascending lexicographical order or\n`desc` for descending (reverse) lexicographical order.\n",
|
|
155
155
|
required: false,
|
|
156
156
|
allowReserved: true,
|
|
157
157
|
schema: {
|
|
@@ -736,6 +736,7 @@ const spec = {
|
|
|
736
736
|
"/refresh": {
|
|
737
737
|
post: {
|
|
738
738
|
operationId: "RefreshEntity",
|
|
739
|
+
tags: ["Entity"],
|
|
739
740
|
description: "Refresh the entity related to entityRef.",
|
|
740
741
|
responses: {
|
|
741
742
|
"200": {
|
|
@@ -782,6 +783,7 @@ const spec = {
|
|
|
782
783
|
"/entities": {
|
|
783
784
|
get: {
|
|
784
785
|
operationId: "GetEntities",
|
|
786
|
+
tags: ["Entity"],
|
|
785
787
|
description: "Get all entities matching a given filter.",
|
|
786
788
|
responses: {
|
|
787
789
|
"200": {
|
|
@@ -844,6 +846,7 @@ const spec = {
|
|
|
844
846
|
"/entities/by-uid/{uid}": {
|
|
845
847
|
get: {
|
|
846
848
|
operationId: "GetEntityByUid",
|
|
849
|
+
tags: ["Entity"],
|
|
847
850
|
description: "Get a single entity by the UID.",
|
|
848
851
|
responses: {
|
|
849
852
|
"200": {
|
|
@@ -877,6 +880,7 @@ const spec = {
|
|
|
877
880
|
},
|
|
878
881
|
delete: {
|
|
879
882
|
operationId: "DeleteEntityByUid",
|
|
883
|
+
tags: ["Entity"],
|
|
880
884
|
description: "Delete a single entity by UID.",
|
|
881
885
|
responses: {
|
|
882
886
|
"204": {
|
|
@@ -905,6 +909,7 @@ const spec = {
|
|
|
905
909
|
"/entities/by-name/{kind}/{namespace}/{name}": {
|
|
906
910
|
get: {
|
|
907
911
|
operationId: "GetEntityByName",
|
|
912
|
+
tags: ["Entity"],
|
|
908
913
|
description: "Get an entity by an entity ref.",
|
|
909
914
|
responses: {
|
|
910
915
|
"200": {
|
|
@@ -946,6 +951,7 @@ const spec = {
|
|
|
946
951
|
"/entities/by-name/{kind}/{namespace}/{name}/ancestry": {
|
|
947
952
|
get: {
|
|
948
953
|
operationId: "GetEntityAncestryByName",
|
|
954
|
+
tags: ["Entity"],
|
|
949
955
|
description: "Get an entity's ancestry by entity ref.",
|
|
950
956
|
responses: {
|
|
951
957
|
"200": {
|
|
@@ -987,6 +993,7 @@ const spec = {
|
|
|
987
993
|
"/entities/by-refs": {
|
|
988
994
|
post: {
|
|
989
995
|
operationId: "GetEntitiesByRefs",
|
|
996
|
+
tags: ["Entity"],
|
|
990
997
|
description: "Get a batch set of entities given an array of entityRefs.",
|
|
991
998
|
responses: {
|
|
992
999
|
"200": {
|
|
@@ -1063,6 +1070,7 @@ const spec = {
|
|
|
1063
1070
|
"/entities/by-query": {
|
|
1064
1071
|
get: {
|
|
1065
1072
|
operationId: "GetEntitiesByQuery",
|
|
1073
|
+
tags: ["Entity"],
|
|
1066
1074
|
description: "Search for entities by a given query.",
|
|
1067
1075
|
responses: {
|
|
1068
1076
|
"200": {
|
|
@@ -1138,6 +1146,7 @@ const spec = {
|
|
|
1138
1146
|
"/entity-facets": {
|
|
1139
1147
|
get: {
|
|
1140
1148
|
operationId: "GetEntityFacets",
|
|
1149
|
+
tags: ["Entity"],
|
|
1141
1150
|
description: "Get all entity facets that match the given filters.",
|
|
1142
1151
|
responses: {
|
|
1143
1152
|
"200": {
|
|
@@ -1193,6 +1202,7 @@ const spec = {
|
|
|
1193
1202
|
"/locations": {
|
|
1194
1203
|
post: {
|
|
1195
1204
|
operationId: "CreateLocation",
|
|
1205
|
+
tags: ["Locations"],
|
|
1196
1206
|
description: "Create a location for a given target.",
|
|
1197
1207
|
responses: {
|
|
1198
1208
|
"201": {
|
|
@@ -1266,6 +1276,7 @@ const spec = {
|
|
|
1266
1276
|
},
|
|
1267
1277
|
get: {
|
|
1268
1278
|
operationId: "GetLocations",
|
|
1279
|
+
tags: ["Locations"],
|
|
1269
1280
|
description: "Get all locations",
|
|
1270
1281
|
responses: {
|
|
1271
1282
|
"200": {
|
|
@@ -1303,6 +1314,7 @@ const spec = {
|
|
|
1303
1314
|
"/locations/{id}": {
|
|
1304
1315
|
get: {
|
|
1305
1316
|
operationId: "GetLocation",
|
|
1317
|
+
tags: ["Locations"],
|
|
1306
1318
|
description: "Get a location by id.",
|
|
1307
1319
|
responses: {
|
|
1308
1320
|
"200": {
|
|
@@ -1339,6 +1351,7 @@ const spec = {
|
|
|
1339
1351
|
},
|
|
1340
1352
|
delete: {
|
|
1341
1353
|
operationId: "DeleteLocation",
|
|
1354
|
+
tags: ["Locations"],
|
|
1342
1355
|
description: "Delete a location by id.",
|
|
1343
1356
|
responses: {
|
|
1344
1357
|
"204": {
|
|
@@ -1373,6 +1386,7 @@ const spec = {
|
|
|
1373
1386
|
"/locations/by-entity/{kind}/{namespace}/{name}": {
|
|
1374
1387
|
get: {
|
|
1375
1388
|
operationId: "getLocationByEntity",
|
|
1389
|
+
tags: ["Locations"],
|
|
1376
1390
|
description: "Get a location for entity.",
|
|
1377
1391
|
responses: {
|
|
1378
1392
|
"200": {
|
|
@@ -1429,6 +1443,7 @@ const spec = {
|
|
|
1429
1443
|
"/analyze-location": {
|
|
1430
1444
|
post: {
|
|
1431
1445
|
operationId: "AnalyzeLocation",
|
|
1446
|
+
tags: ["Locations"],
|
|
1432
1447
|
description: "Validate a given location.",
|
|
1433
1448
|
responses: {
|
|
1434
1449
|
"200": {
|
|
@@ -1479,6 +1494,7 @@ const spec = {
|
|
|
1479
1494
|
"/validate-entity": {
|
|
1480
1495
|
post: {
|
|
1481
1496
|
operationId: "ValidateEntity",
|
|
1497
|
+
tags: ["Entity"],
|
|
1482
1498
|
description: "Validate that a passed in entity has no errors in schema.",
|
|
1483
1499
|
responses: {
|
|
1484
1500
|
"200": {
|