@backstage/plugin-catalog-backend 1.30.0 → 1.31.0-next.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,33 @@
1
1
  # @backstage/plugin-catalog-backend
2
2
 
3
+ ## 1.31.0-next.0
4
+
5
+ ### Minor Changes
6
+
7
+ - 8805f93: The catalog backend now supports the new `PermissionsRegistryService`, which can be used to add custom permission rules.
8
+ - a4aa244: This change integrates the `auditor` service into the Catalog plugin.
9
+
10
+ ### Patch Changes
11
+
12
+ - c9139e1: Ignore benign database conflict errors during stitching, now logged with debug level instead.
13
+ - f178b12: Cleanup `refresh_state_references` for entity processors and providers that are no longer in control of a `refresh_state` row for entity
14
+ - Updated dependencies
15
+ - @backstage/plugin-permission-node@0.8.8-next.0
16
+ - @backstage/plugin-catalog-node@1.15.2-next.0
17
+ - @backstage/backend-plugin-api@1.2.0-next.0
18
+ - @backstage/plugin-search-backend-module-catalog@0.3.1-next.0
19
+ - @backstage/backend-openapi-utils@0.4.2-next.0
20
+ - @backstage/catalog-client@1.9.1
21
+ - @backstage/catalog-model@1.7.3
22
+ - @backstage/config@1.3.2
23
+ - @backstage/errors@1.2.7
24
+ - @backstage/integration@1.16.1
25
+ - @backstage/types@1.2.1
26
+ - @backstage/plugin-catalog-common@1.1.3
27
+ - @backstage/plugin-events-node@0.4.8-next.0
28
+ - @backstage/plugin-permission-common@0.8.4
29
+ - @backstage/plugin-search-common@1.2.17
30
+
3
31
  ## 1.30.0
4
32
 
5
33
  ### Patch Changes
package/README.md CHANGED
@@ -87,6 +87,51 @@ yarn start
87
87
  This will launch both frontend and backend in the same window, populated with
88
88
  some example entities.
89
89
 
90
+ ## Audit Events
91
+
92
+ The Catalog backend emits audit events for various operations. Events are grouped logically by `eventId`, with `subEventId` providing further distinction within an operation group.
93
+
94
+ **Entity Events:**
95
+
96
+ - **`entity-fetch`**: Retrieves entities.
97
+
98
+ Filter on `queryType`.
99
+
100
+ - **`all`**: Fetching all entities. (GET `/entities`)
101
+ - **`by-id`**: Fetching a single entity using its UID. (GET `/entities/by-uid/:uid`)
102
+ - **`by-name`**: Fetching a single entity using its kind, namespace, and name. (GET `/entities/by-name/:kind/:namespace/:name`)
103
+ - **`by-query`**: Fetching multiple entities using a filter query. (GET `/entities/by-query`)
104
+ - **`by-refs`**: Fetching a batch of entities by their entity refs. (POST `/entities/by-refs`)
105
+ - **`ancestry`**: Fetching the ancestry of an entity. (GET `/entities/by-name/:kind/:namespace/:name/ancestry`)
106
+
107
+ - **`entity-mutate`**: Modifies entities.
108
+
109
+ Filter on `actionType`.
110
+
111
+ - **`delete`**: Deleting a single entity. Note: this will not be a permanent deletion and the entity will be restored if the parent location is still present in the catalog. (DELETE `/entities/by-uid/:uid`)
112
+ - **`refresh`**: Scheduling an entity refresh. (POST `/entities/refresh`)
113
+
114
+ - **`entity-validate`**: Validates an entity. (POST `/entities/validate`)
115
+
116
+ - **`entity-facets`**: Retrieves entity facets. (GET `/entity-facets`)
117
+
118
+ **Location Events:**
119
+
120
+ - **`location-fetch`**: Retrieves locations.
121
+
122
+ Filter on `actionType`.
123
+
124
+ - **`all`**: Fetching all locations. (GET `/locations`)
125
+ - **`by-id`**: Fetching a single location by ID. (GET `/locations/:id`)
126
+ - **`by-entity`**: Fetching locations associated with an entity ref. (GET `/locations/by-entity`)
127
+
128
+ - **`location-mutate`**: Modifies locations.
129
+
130
+ - **`create`**: Creating a new location. (POST `/locations`)
131
+ - **`delete`**: Deleting a location and its associated entities. (DELETE `/locations/:id`)
132
+
133
+ - **`location-analyze`**: Analyzes a location. (POST `/locations/analyze`)
134
+
90
135
  ## Links
91
136
 
92
137
  - [catalog](https://github.com/backstage/backstage/tree/master/plugins/catalog)
@@ -237,7 +237,7 @@ class DefaultProcessingDatabase {
237
237
  }
238
238
  }
239
239
  }
240
- await tx("refresh_state_references").andWhere({ source_entity_ref: options.sourceEntityRef }).delete();
240
+ await tx("refresh_state_references").where({ source_entity_ref: options.sourceEntityRef }).orWhereIn("target_entity_ref", stateReferences).delete();
241
241
  await tx.batchInsert(
242
242
  "refresh_state_references",
243
243
  stateReferences.map((entityRef) => ({
@@ -1 +1 @@
1
- {"version":3,"file":"DefaultProcessingDatabase.cjs.js","sources":["../../src/database/DefaultProcessingDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { ConflictError } from '@backstage/errors';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { ProcessingIntervalFunction } from '../processing';\nimport { rethrowError, timestampToDateTime } from './conversion';\nimport { initDatabaseMetrics } from './metrics';\nimport {\n DbRefreshKeysRow,\n DbRefreshStateReferencesRow,\n DbRefreshStateRow,\n DbRelationsRow,\n} from './tables';\nimport {\n GetProcessableEntitiesResult,\n ListParentsOptions,\n ListParentsResult,\n ProcessingDatabase,\n RefreshStateItem,\n Transaction,\n UpdateEntityCacheOptions,\n UpdateProcessedEntityOptions,\n} from './types';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { generateStableHash, generateTargetKey } from './util';\nimport {\n EventBroker,\n EventParams,\n EventsService,\n} from '@backstage/plugin-events-node';\nimport { DateTime } from 'luxon';\nimport { CATALOG_CONFLICTS_TOPIC } from '../constants';\nimport { CatalogConflictEventPayload } from '../catalog/types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProcessingDatabase implements ProcessingDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n refreshInterval: ProcessingIntervalFunction;\n eventBroker?: EventBroker | EventsService;\n },\n ) {\n initDatabaseMetrics(options.database);\n }\n\n async updateProcessedEntity(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<{ previous: { relations: DbRelationsRow[] } }> {\n const tx = txOpaque as Knex.Transaction;\n const {\n id,\n processedEntity,\n resultHash,\n errors,\n relations,\n deferredEntities,\n refreshKeys,\n locationKey,\n } = options;\n const configClient = tx.client.config.client;\n const refreshResult = await tx<DbRefreshStateRow>('refresh_state')\n .update({\n processed_entity: JSON.stringify(processedEntity),\n result_hash: resultHash,\n errors,\n location_key: locationKey,\n })\n .where('entity_id', id)\n .andWhere(inner => {\n if (!locationKey) {\n return inner.whereNull('location_key');\n }\n return inner\n .where('location_key', locationKey)\n .orWhereNull('location_key');\n });\n if (refreshResult === 0) {\n throw new ConflictError(\n `Conflicting write of processing result for ${id} with location key '${locationKey}'`,\n );\n }\n const sourceEntityRef = stringifyEntityRef(processedEntity);\n\n // Schedule all deferred entities for future processing.\n await this.addUnprocessedEntities(tx, {\n entities: deferredEntities,\n sourceEntityRef,\n });\n\n // Delete old relations\n // NOTE(freben): knex implemented support for returning() on update queries for sqlite, but at the current time of writing (Sep 2022) not for delete() queries.\n let previousRelationRows: DbRelationsRow[];\n if (configClient.includes('sqlite3') || configClient.includes('mysql')) {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .select('*')\n .where({ originating_entity_id: id });\n await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete();\n } else {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete()\n .returning('*');\n }\n\n // Batch insert new relations\n const relationRows: DbRelationsRow[] = relations.map(\n ({ source, target, type }) => ({\n originating_entity_id: id,\n source_entity_ref: stringifyEntityRef(source),\n target_entity_ref: stringifyEntityRef(target),\n type,\n }),\n );\n\n await tx.batchInsert(\n 'relations',\n this.deduplicateRelations(relationRows),\n BATCH_SIZE,\n );\n\n // Delete old refresh keys\n await tx<DbRefreshKeysRow>('refresh_keys')\n .where({ entity_id: id })\n .delete();\n\n // Insert the refresh keys for the processed entity\n await tx.batchInsert(\n 'refresh_keys',\n refreshKeys.map(k => ({\n entity_id: id,\n key: generateTargetKey(k.key),\n })),\n BATCH_SIZE,\n );\n\n return {\n previous: {\n relations: previousRelationRows,\n },\n };\n }\n\n async updateProcessedEntityErrors(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, errors, resultHash } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({\n errors,\n result_hash: resultHash,\n })\n .where('entity_id', id);\n }\n\n async updateEntityCache(\n txOpaque: Transaction,\n options: UpdateEntityCacheOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, state } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({ cache: JSON.stringify(state ?? {}) })\n .where('entity_id', id);\n }\n\n async getProcessableEntities(\n maybeTx: Transaction | Knex,\n request: { processBatchSize: number },\n ): Promise<GetProcessableEntitiesResult> {\n const knex = maybeTx as Knex.Transaction | Knex;\n\n let itemsQuery = knex<DbRefreshStateRow>('refresh_state').select([\n 'entity_id',\n 'entity_ref',\n 'unprocessed_entity',\n 'result_hash',\n 'cache',\n 'errors',\n 'location_key',\n 'next_update_at',\n ]);\n\n // This avoids duplication of work because of race conditions and is\n // also fast because locked rows are ignored rather than blocking.\n // It's only available in MySQL and PostgreSQL\n if (['mysql', 'mysql2', 'pg'].includes(knex.client.config.client)) {\n itemsQuery = itemsQuery.forUpdate().skipLocked();\n }\n\n const items = await itemsQuery\n .where('next_update_at', '<=', knex.fn.now())\n .limit(request.processBatchSize)\n .orderBy('next_update_at', 'asc');\n\n const interval = this.options.refreshInterval();\n\n const nextUpdateAt = (refreshInterval: number) => {\n if (knex.client.config.client.includes('sqlite3')) {\n return knex.raw(`datetime('now', ?)`, [`${refreshInterval} seconds`]);\n } else if (knex.client.config.client.includes('mysql')) {\n return knex.raw(`now() + interval ${refreshInterval} second`);\n }\n return knex.raw(`now() + interval '${refreshInterval} seconds'`);\n };\n\n await knex<DbRefreshStateRow>('refresh_state')\n .whereIn(\n 'entity_ref',\n items.map(i => i.entity_ref),\n )\n .update({\n next_update_at: nextUpdateAt(interval),\n });\n\n return {\n items: items.map(\n i =>\n ({\n id: i.entity_id,\n entityRef: i.entity_ref,\n unprocessedEntity: JSON.parse(i.unprocessed_entity) as Entity,\n resultHash: i.result_hash || '',\n nextUpdateAt: timestampToDateTime(i.next_update_at),\n state: i.cache ? JSON.parse(i.cache) : undefined,\n errors: i.errors,\n locationKey: i.location_key,\n } satisfies RefreshStateItem),\n ),\n };\n }\n\n async listParents(\n txOpaque: Transaction,\n options: ListParentsOptions,\n ): Promise<ListParentsResult> {\n const tx = txOpaque as Knex.Transaction;\n\n const rows = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .where({ target_entity_ref: options.entityRef })\n .select();\n\n const entityRefs = rows.map(r => r.source_entity_ref!).filter(Boolean);\n\n return { entityRefs };\n }\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n private deduplicateRelations(rows: DbRelationsRow[]): DbRelationsRow[] {\n return lodash.uniqBy(\n rows,\n r => `${r.source_entity_ref}:${r.target_entity_ref}:${r.type}`,\n );\n }\n\n /**\n * Add a set of deferred entities for processing.\n * The entities will be added at the front of the processing queue.\n */\n private async addUnprocessedEntities(\n txOpaque: Transaction,\n options: {\n sourceEntityRef: string;\n entities: DeferredEntity[];\n },\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n\n // Keeps track of the entities that we end up inserting to update refresh_state_references afterwards\n const stateReferences = new Array<string>();\n\n // Upsert all of the unprocessed entities into the refresh_state table, by\n // their entity ref.\n for (const { entity, locationKey } of options.entities) {\n const entityRef = stringifyEntityRef(entity);\n const hash = generateStableHash(entity);\n\n const updated = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (updated) {\n stateReferences.push(entityRef);\n continue;\n }\n\n const inserted = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n if (inserted) {\n stateReferences.push(entityRef);\n continue;\n }\n\n // If the row can't be inserted, we have a conflict, but it could be either\n // because of a conflicting locationKey or a race with another instance, so check\n // whether the conflicting entity has the same entityRef but a different locationKey\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n if (this.options.eventBroker && locationKey) {\n const eventParams: EventParams<CatalogConflictEventPayload> = {\n topic: CATALOG_CONFLICTS_TOPIC,\n eventPayload: {\n unprocessedEntity: entity,\n entityRef,\n newLocationKey: locationKey,\n existingLocationKey: conflictingKey,\n lastConflictAt: DateTime.now().toISO()!,\n },\n };\n await this.options.eventBroker?.publish(eventParams);\n }\n }\n }\n\n // Replace all references for the originating entity or source and then create new ones\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .andWhere({ source_entity_ref: options.sourceEntityRef })\n .delete();\n await tx.batchInsert(\n 'refresh_state_references',\n stateReferences.map(entityRef => ({\n source_entity_ref: options.sourceEntityRef,\n target_entity_ref: entityRef,\n })),\n BATCH_SIZE,\n );\n }\n}\n"],"names":["initDatabaseMetrics","errors","ConflictError","stringifyEntityRef","generateTargetKey","timestampToDateTime","rethrowError","lodash","generateStableHash","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","CATALOG_CONFLICTS_TOPIC","DateTime"],"mappings":";;;;;;;;;;;;;;;;;;AA0DA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,yBAAwD,CAAA;AAAA,EACnE,YACmB,OAMjB,EAAA;AANiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAOjB,IAAAA,2BAAA,CAAoB,QAAQ,QAAQ,CAAA;AAAA;AACtC,EAEA,MAAM,qBACJ,CAAA,QAAA,EACA,OACwD,EAAA;AACxD,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA;AAAA,MACJ,EAAA;AAAA,MACA,eAAA;AAAA,MACA,UAAA;AAAA,cACAC,QAAA;AAAA,MACA,SAAA;AAAA,MACA,gBAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACE,GAAA,OAAA;AACJ,IAAM,MAAA,YAAA,GAAe,EAAG,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA;AACtC,IAAA,MAAM,aAAgB,GAAA,MAAM,EAAsB,CAAA,eAAe,EAC9D,MAAO,CAAA;AAAA,MACN,gBAAA,EAAkB,IAAK,CAAA,SAAA,CAAU,eAAe,CAAA;AAAA,MAChD,WAAa,EAAA,UAAA;AAAA,cACbA,QAAA;AAAA,MACA,YAAc,EAAA;AAAA,KACf,CACA,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA,CACrB,SAAS,CAAS,KAAA,KAAA;AACjB,MAAA,IAAI,CAAC,WAAa,EAAA;AAChB,QAAO,OAAA,KAAA,CAAM,UAAU,cAAc,CAAA;AAAA;AAEvC,MAAA,OAAO,MACJ,KAAM,CAAA,cAAA,EAAgB,WAAW,CAAA,CACjC,YAAY,cAAc,CAAA;AAAA,KAC9B,CAAA;AACH,IAAA,IAAI,kBAAkB,CAAG,EAAA;AACvB,MAAA,MAAM,IAAIC,oBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,EAAE,CAAA,oBAAA,EAAuB,WAAW,CAAA,CAAA;AAAA,OACpF;AAAA;AAEF,IAAM,MAAA,eAAA,GAAkBC,gCAAmB,eAAe,CAAA;AAG1D,IAAM,MAAA,IAAA,CAAK,uBAAuB,EAAI,EAAA;AAAA,MACpC,QAAU,EAAA,gBAAA;AAAA,MACV;AAAA,KACD,CAAA;AAID,IAAI,IAAA,oBAAA;AACJ,IAAA,IAAI,aAAa,QAAS,CAAA,SAAS,KAAK,YAAa,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtE,MAAuB,oBAAA,GAAA,MAAM,EAAmB,CAAA,WAAW,CACxD,CAAA,MAAA,CAAO,GAAG,CAAA,CACV,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA;AACtC,MAAM,MAAA,EAAA,CAAmB,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA;AAAA,KACL,MAAA;AACL,MAAA,oBAAA,GAAuB,MAAM,EAAA,CAAmB,WAAW,CAAA,CACxD,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA,CACP,UAAU,GAAG,CAAA;AAAA;AAIlB,IAAA,MAAM,eAAiC,SAAU,CAAA,GAAA;AAAA,MAC/C,CAAC,EAAE,MAAQ,EAAA,MAAA,EAAQ,MAAY,MAAA;AAAA,QAC7B,qBAAuB,EAAA,EAAA;AAAA,QACvB,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C;AAAA,OACF;AAAA,KACF;AAEA,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,WAAA;AAAA,MACA,IAAA,CAAK,qBAAqB,YAAY,CAAA;AAAA,MACtC;AAAA,KACF;AAGA,IAAM,MAAA,EAAA,CAAqB,cAAc,CACtC,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,EAAA,EAAI,CAAA,CACvB,MAAO,EAAA;AAGV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,cAAA;AAAA,MACA,WAAA,CAAY,IAAI,CAAM,CAAA,MAAA;AAAA,QACpB,SAAW,EAAA,EAAA;AAAA,QACX,GAAA,EAAKC,sBAAkB,CAAA,CAAA,CAAE,GAAG;AAAA,OAC5B,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAEA,IAAO,OAAA;AAAA,MACL,QAAU,EAAA;AAAA,QACR,SAAW,EAAA;AAAA;AACb,KACF;AAAA;AACF,EAEA,MAAM,2BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAM,EAAE,EAAA,EAAI,MAAQ,EAAA,UAAA,EAAe,GAAA,OAAA;AAEnC,IAAM,MAAA,EAAA,CAAsB,eAAe,CAAA,CACxC,MAAO,CAAA;AAAA,MACN,MAAA;AAAA,MACA,WAAa,EAAA;AAAA,KACd,CAAA,CACA,KAAM,CAAA,WAAA,EAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,iBACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,EAAI,EAAA,KAAA,EAAU,GAAA,OAAA;AAEtB,IAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,MAAO,CAAA,EAAE,OAAO,IAAK,CAAA,SAAA,CAAU,KAAS,IAAA,EAAE,CAAE,EAAC,CAC7C,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,sBACJ,CAAA,OAAA,EACA,OACuC,EAAA;AACvC,IAAA,MAAM,IAAO,GAAA,OAAA;AAEb,IAAA,IAAI,UAAa,GAAA,IAAA,CAAwB,eAAe,CAAA,CAAE,MAAO,CAAA;AAAA,MAC/D,WAAA;AAAA,MACA,YAAA;AAAA,MACA,oBAAA;AAAA,MACA,aAAA;AAAA,MACA,OAAA;AAAA,MACA,QAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACD,CAAA;AAKD,IAAI,IAAA,CAAC,OAAS,EAAA,QAAA,EAAU,IAAI,CAAA,CAAE,SAAS,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAM,CAAG,EAAA;AACjE,MAAa,UAAA,GAAA,UAAA,CAAW,SAAU,EAAA,CAAE,UAAW,EAAA;AAAA;AAGjD,IAAA,MAAM,QAAQ,MAAM,UAAA,CACjB,KAAM,CAAA,gBAAA,EAAkB,MAAM,IAAK,CAAA,EAAA,CAAG,GAAI,EAAC,EAC3C,KAAM,CAAA,OAAA,CAAQ,gBAAgB,CAC9B,CAAA,OAAA,CAAQ,kBAAkB,KAAK,CAAA;AAElC,IAAM,MAAA,QAAA,GAAW,IAAK,CAAA,OAAA,CAAQ,eAAgB,EAAA;AAE9C,IAAM,MAAA,YAAA,GAAe,CAAC,eAA4B,KAAA;AAChD,MAAA,IAAI,KAAK,MAAO,CAAA,MAAA,CAAO,MAAO,CAAA,QAAA,CAAS,SAAS,CAAG,EAAA;AACjD,QAAA,OAAO,KAAK,GAAI,CAAA,CAAA,kBAAA,CAAA,EAAsB,CAAC,CAAG,EAAA,eAAe,UAAU,CAAC,CAAA;AAAA,iBAC3D,IAAK,CAAA,MAAA,CAAO,OAAO,MAAO,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtD,QAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAoB,iBAAA,EAAA,eAAe,CAAS,OAAA,CAAA,CAAA;AAAA;AAE9D,MAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAqB,kBAAA,EAAA,eAAe,CAAW,SAAA,CAAA,CAAA;AAAA,KACjE;AAEA,IAAM,MAAA,IAAA,CAAwB,eAAe,CAC1C,CAAA,OAAA;AAAA,MACC,YAAA;AAAA,MACA,KAAM,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,UAAU;AAAA,MAE5B,MAAO,CAAA;AAAA,MACN,cAAA,EAAgB,aAAa,QAAQ;AAAA,KACtC,CAAA;AAEH,IAAO,OAAA;AAAA,MACL,OAAO,KAAM,CAAA,GAAA;AAAA,QACX,CACG,CAAA,MAAA;AAAA,UACC,IAAI,CAAE,CAAA,SAAA;AAAA,UACN,WAAW,CAAE,CAAA,UAAA;AAAA,UACb,iBAAmB,EAAA,IAAA,CAAK,KAAM,CAAA,CAAA,CAAE,kBAAkB,CAAA;AAAA,UAClD,UAAA,EAAY,EAAE,WAAe,IAAA,EAAA;AAAA,UAC7B,YAAA,EAAcC,8BAAoB,CAAA,CAAA,CAAE,cAAc,CAAA;AAAA,UAClD,OAAO,CAAE,CAAA,KAAA,GAAQ,KAAK,KAAM,CAAA,CAAA,CAAE,KAAK,CAAI,GAAA,KAAA,CAAA;AAAA,UACvC,QAAQ,CAAE,CAAA,MAAA;AAAA,UACV,aAAa,CAAE,CAAA;AAAA,SACjB;AAAA;AACJ,KACF;AAAA;AACF,EAEA,MAAM,WACJ,CAAA,QAAA,EACA,OAC4B,EAAA;AAC5B,IAAA,MAAM,EAAK,GAAA,QAAA;AAEX,IAAA,MAAM,OAAO,MAAM,EAAA;AAAA,MACjB;AAAA,KACF,CACG,MAAM,EAAE,iBAAA,EAAmB,QAAQ,SAAU,EAAC,EAC9C,MAAO,EAAA;AAEV,IAAM,MAAA,UAAA,GAAa,KAAK,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,iBAAkB,CAAA,CAAE,OAAO,OAAO,CAAA;AAErE,IAAA,OAAO,EAAE,UAAW,EAAA;AAAA;AACtB,EAEA,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAE5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAGV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AAEA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMC,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEQ,qBAAqB,IAA0C,EAAA;AACrE,IAAA,OAAOC,uBAAO,CAAA,MAAA;AAAA,MACZ,IAAA;AAAA,MACA,CAAA,CAAA,KAAK,GAAG,CAAE,CAAA,iBAAiB,IAAI,CAAE,CAAA,iBAAiB,CAAI,CAAA,EAAA,CAAA,CAAE,IAAI,CAAA;AAAA,KAC9D;AAAA;AACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBACZ,CAAA,QAAA,EACA,OAIe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AAGX,IAAM,MAAA,eAAA,GAAkB,IAAI,KAAc,EAAA;AAI1C,IAAA,KAAA,MAAW,EAAE,MAAA,EAAQ,WAAY,EAAA,IAAK,QAAQ,QAAU,EAAA;AACtD,MAAM,MAAA,SAAA,GAAYJ,gCAAmB,MAAM,CAAA;AAC3C,MAAM,MAAA,IAAA,GAAOK,wBAAmB,MAAM,CAAA;AAEtC,MAAM,MAAA,OAAA,GAAU,MAAMC,+CAAwB,CAAA;AAAA,QAC5C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,OAAS,EAAA;AACX,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAGF,MAAM,MAAA,QAAA,GAAW,MAAMC,+CAAwB,CAAA;AAAA,QAC7C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,OACtB,CAAA;AACD,MAAA,IAAI,QAAU,EAAA;AACZ,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAMF,MAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,QACpD,EAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,cAAgB,EAAA;AAClB,QAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,UAClB,CAAkC,+BAAA,EAAA,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,SACjH;AACA,QAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,WAAA,IAAe,WAAa,EAAA;AAC3C,UAAA,MAAM,WAAwD,GAAA;AAAA,YAC5D,KAAO,EAAAC,iCAAA;AAAA,YACP,YAAc,EAAA;AAAA,cACZ,iBAAmB,EAAA,MAAA;AAAA,cACnB,SAAA;AAAA,cACA,cAAgB,EAAA,WAAA;AAAA,cAChB,mBAAqB,EAAA,cAAA;AAAA,cACrB,cAAgB,EAAAC,cAAA,CAAS,GAAI,EAAA,CAAE,KAAM;AAAA;AACvC,WACF;AACA,UAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,WAAa,EAAA,OAAA,CAAQ,WAAW,CAAA;AAAA;AACrD;AACF;AAIF,IAAM,MAAA,EAAA,CAAgC,0BAA0B,CAAA,CAC7D,QAAS,CAAA,EAAE,mBAAmB,OAAQ,CAAA,eAAA,EAAiB,CAAA,CACvD,MAAO,EAAA;AACV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,0BAAA;AAAA,MACA,eAAA,CAAgB,IAAI,CAAc,SAAA,MAAA;AAAA,QAChC,mBAAmB,OAAQ,CAAA,eAAA;AAAA,QAC3B,iBAAmB,EAAA;AAAA,OACnB,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAAA;AAEJ;;;;"}
1
+ {"version":3,"file":"DefaultProcessingDatabase.cjs.js","sources":["../../src/database/DefaultProcessingDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { ConflictError } from '@backstage/errors';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { ProcessingIntervalFunction } from '../processing';\nimport { rethrowError, timestampToDateTime } from './conversion';\nimport { initDatabaseMetrics } from './metrics';\nimport {\n DbRefreshKeysRow,\n DbRefreshStateReferencesRow,\n DbRefreshStateRow,\n DbRelationsRow,\n} from './tables';\nimport {\n GetProcessableEntitiesResult,\n ListParentsOptions,\n ListParentsResult,\n ProcessingDatabase,\n RefreshStateItem,\n Transaction,\n UpdateEntityCacheOptions,\n UpdateProcessedEntityOptions,\n} from './types';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { generateStableHash, generateTargetKey } from './util';\nimport {\n EventBroker,\n EventParams,\n EventsService,\n} from '@backstage/plugin-events-node';\nimport { DateTime } from 'luxon';\nimport { CATALOG_CONFLICTS_TOPIC } from '../constants';\nimport { CatalogConflictEventPayload } from '../catalog/types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProcessingDatabase implements ProcessingDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n refreshInterval: ProcessingIntervalFunction;\n eventBroker?: EventBroker | EventsService;\n },\n ) {\n initDatabaseMetrics(options.database);\n }\n\n async updateProcessedEntity(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<{ previous: { relations: DbRelationsRow[] } }> {\n const tx = txOpaque as Knex.Transaction;\n const {\n id,\n processedEntity,\n resultHash,\n errors,\n relations,\n deferredEntities,\n refreshKeys,\n locationKey,\n } = options;\n const configClient = tx.client.config.client;\n const refreshResult = await tx<DbRefreshStateRow>('refresh_state')\n .update({\n processed_entity: JSON.stringify(processedEntity),\n result_hash: resultHash,\n errors,\n location_key: locationKey,\n })\n .where('entity_id', id)\n .andWhere(inner => {\n if (!locationKey) {\n return inner.whereNull('location_key');\n }\n return inner\n .where('location_key', locationKey)\n .orWhereNull('location_key');\n });\n if (refreshResult === 0) {\n throw new ConflictError(\n `Conflicting write of processing result for ${id} with location key '${locationKey}'`,\n );\n }\n const sourceEntityRef = stringifyEntityRef(processedEntity);\n\n // Schedule all deferred entities for future processing.\n await this.addUnprocessedEntities(tx, {\n entities: deferredEntities,\n sourceEntityRef,\n });\n\n // Delete old relations\n // NOTE(freben): knex implemented support for returning() on update queries for sqlite, but at the current time of writing (Sep 2022) not for delete() queries.\n let previousRelationRows: DbRelationsRow[];\n if (configClient.includes('sqlite3') || configClient.includes('mysql')) {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .select('*')\n .where({ originating_entity_id: id });\n await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete();\n } else {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete()\n .returning('*');\n }\n\n // Batch insert new relations\n const relationRows: DbRelationsRow[] = relations.map(\n ({ source, target, type }) => ({\n originating_entity_id: id,\n source_entity_ref: stringifyEntityRef(source),\n target_entity_ref: stringifyEntityRef(target),\n type,\n }),\n );\n\n await tx.batchInsert(\n 'relations',\n this.deduplicateRelations(relationRows),\n BATCH_SIZE,\n );\n\n // Delete old refresh keys\n await tx<DbRefreshKeysRow>('refresh_keys')\n .where({ entity_id: id })\n .delete();\n\n // Insert the refresh keys for the processed entity\n await tx.batchInsert(\n 'refresh_keys',\n refreshKeys.map(k => ({\n entity_id: id,\n key: generateTargetKey(k.key),\n })),\n BATCH_SIZE,\n );\n\n return {\n previous: {\n relations: previousRelationRows,\n },\n };\n }\n\n async updateProcessedEntityErrors(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, errors, resultHash } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({\n errors,\n result_hash: resultHash,\n })\n .where('entity_id', id);\n }\n\n async updateEntityCache(\n txOpaque: Transaction,\n options: UpdateEntityCacheOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, state } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({ cache: JSON.stringify(state ?? {}) })\n .where('entity_id', id);\n }\n\n async getProcessableEntities(\n maybeTx: Transaction | Knex,\n request: { processBatchSize: number },\n ): Promise<GetProcessableEntitiesResult> {\n const knex = maybeTx as Knex.Transaction | Knex;\n\n let itemsQuery = knex<DbRefreshStateRow>('refresh_state').select([\n 'entity_id',\n 'entity_ref',\n 'unprocessed_entity',\n 'result_hash',\n 'cache',\n 'errors',\n 'location_key',\n 'next_update_at',\n ]);\n\n // This avoids duplication of work because of race conditions and is\n // also fast because locked rows are ignored rather than blocking.\n // It's only available in MySQL and PostgreSQL\n if (['mysql', 'mysql2', 'pg'].includes(knex.client.config.client)) {\n itemsQuery = itemsQuery.forUpdate().skipLocked();\n }\n\n const items = await itemsQuery\n .where('next_update_at', '<=', knex.fn.now())\n .limit(request.processBatchSize)\n .orderBy('next_update_at', 'asc');\n\n const interval = this.options.refreshInterval();\n\n const nextUpdateAt = (refreshInterval: number) => {\n if (knex.client.config.client.includes('sqlite3')) {\n return knex.raw(`datetime('now', ?)`, [`${refreshInterval} seconds`]);\n } else if (knex.client.config.client.includes('mysql')) {\n return knex.raw(`now() + interval ${refreshInterval} second`);\n }\n return knex.raw(`now() + interval '${refreshInterval} seconds'`);\n };\n\n await knex<DbRefreshStateRow>('refresh_state')\n .whereIn(\n 'entity_ref',\n items.map(i => i.entity_ref),\n )\n .update({\n next_update_at: nextUpdateAt(interval),\n });\n\n return {\n items: items.map(\n i =>\n ({\n id: i.entity_id,\n entityRef: i.entity_ref,\n unprocessedEntity: JSON.parse(i.unprocessed_entity) as Entity,\n resultHash: i.result_hash || '',\n nextUpdateAt: timestampToDateTime(i.next_update_at),\n state: i.cache ? JSON.parse(i.cache) : undefined,\n errors: i.errors,\n locationKey: i.location_key,\n } satisfies RefreshStateItem),\n ),\n };\n }\n\n async listParents(\n txOpaque: Transaction,\n options: ListParentsOptions,\n ): Promise<ListParentsResult> {\n const tx = txOpaque as Knex.Transaction;\n\n const rows = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .where({ target_entity_ref: options.entityRef })\n .select();\n\n const entityRefs = rows.map(r => r.source_entity_ref!).filter(Boolean);\n\n return { entityRefs };\n }\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n private deduplicateRelations(rows: DbRelationsRow[]): DbRelationsRow[] {\n return lodash.uniqBy(\n rows,\n r => `${r.source_entity_ref}:${r.target_entity_ref}:${r.type}`,\n );\n }\n\n /**\n * Add a set of deferred entities for processing.\n * The entities will be added at the front of the processing queue.\n */\n private async addUnprocessedEntities(\n txOpaque: Transaction,\n options: {\n sourceEntityRef: string;\n entities: DeferredEntity[];\n },\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n\n // Keeps track of the entities that we end up inserting to update refresh_state_references afterwards\n const stateReferences = new Array<string>();\n\n // Upsert all of the unprocessed entities into the refresh_state table, by\n // their entity ref.\n for (const { entity, locationKey } of options.entities) {\n const entityRef = stringifyEntityRef(entity);\n const hash = generateStableHash(entity);\n\n const updated = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (updated) {\n stateReferences.push(entityRef);\n continue;\n }\n\n const inserted = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n if (inserted) {\n stateReferences.push(entityRef);\n continue;\n }\n\n // If the row can't be inserted, we have a conflict, but it could be either\n // because of a conflicting locationKey or a race with another instance, so check\n // whether the conflicting entity has the same entityRef but a different locationKey\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n if (this.options.eventBroker && locationKey) {\n const eventParams: EventParams<CatalogConflictEventPayload> = {\n topic: CATALOG_CONFLICTS_TOPIC,\n eventPayload: {\n unprocessedEntity: entity,\n entityRef,\n newLocationKey: locationKey,\n existingLocationKey: conflictingKey,\n lastConflictAt: DateTime.now().toISO()!,\n },\n };\n await this.options.eventBroker?.publish(eventParams);\n }\n }\n }\n\n // Lastly, replace refresh state references for the originating entity and any successfully added entities\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n // Remove all existing references from the originating entity\n .where({ source_entity_ref: options.sourceEntityRef })\n // And remove any existing references to entities that we're inserting new references for\n .orWhereIn('target_entity_ref', stateReferences)\n .delete();\n await tx.batchInsert(\n 'refresh_state_references',\n stateReferences.map(entityRef => ({\n source_entity_ref: options.sourceEntityRef,\n target_entity_ref: entityRef,\n })),\n BATCH_SIZE,\n );\n }\n}\n"],"names":["initDatabaseMetrics","errors","ConflictError","stringifyEntityRef","generateTargetKey","timestampToDateTime","rethrowError","lodash","generateStableHash","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","CATALOG_CONFLICTS_TOPIC","DateTime"],"mappings":";;;;;;;;;;;;;;;;;;AA0DA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,yBAAwD,CAAA;AAAA,EACnE,YACmB,OAMjB,EAAA;AANiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAOjB,IAAAA,2BAAA,CAAoB,QAAQ,QAAQ,CAAA;AAAA;AACtC,EAEA,MAAM,qBACJ,CAAA,QAAA,EACA,OACwD,EAAA;AACxD,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA;AAAA,MACJ,EAAA;AAAA,MACA,eAAA;AAAA,MACA,UAAA;AAAA,cACAC,QAAA;AAAA,MACA,SAAA;AAAA,MACA,gBAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACE,GAAA,OAAA;AACJ,IAAM,MAAA,YAAA,GAAe,EAAG,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA;AACtC,IAAA,MAAM,aAAgB,GAAA,MAAM,EAAsB,CAAA,eAAe,EAC9D,MAAO,CAAA;AAAA,MACN,gBAAA,EAAkB,IAAK,CAAA,SAAA,CAAU,eAAe,CAAA;AAAA,MAChD,WAAa,EAAA,UAAA;AAAA,cACbA,QAAA;AAAA,MACA,YAAc,EAAA;AAAA,KACf,CACA,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA,CACrB,SAAS,CAAS,KAAA,KAAA;AACjB,MAAA,IAAI,CAAC,WAAa,EAAA;AAChB,QAAO,OAAA,KAAA,CAAM,UAAU,cAAc,CAAA;AAAA;AAEvC,MAAA,OAAO,MACJ,KAAM,CAAA,cAAA,EAAgB,WAAW,CAAA,CACjC,YAAY,cAAc,CAAA;AAAA,KAC9B,CAAA;AACH,IAAA,IAAI,kBAAkB,CAAG,EAAA;AACvB,MAAA,MAAM,IAAIC,oBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,EAAE,CAAA,oBAAA,EAAuB,WAAW,CAAA,CAAA;AAAA,OACpF;AAAA;AAEF,IAAM,MAAA,eAAA,GAAkBC,gCAAmB,eAAe,CAAA;AAG1D,IAAM,MAAA,IAAA,CAAK,uBAAuB,EAAI,EAAA;AAAA,MACpC,QAAU,EAAA,gBAAA;AAAA,MACV;AAAA,KACD,CAAA;AAID,IAAI,IAAA,oBAAA;AACJ,IAAA,IAAI,aAAa,QAAS,CAAA,SAAS,KAAK,YAAa,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtE,MAAuB,oBAAA,GAAA,MAAM,EAAmB,CAAA,WAAW,CACxD,CAAA,MAAA,CAAO,GAAG,CAAA,CACV,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA;AACtC,MAAM,MAAA,EAAA,CAAmB,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA;AAAA,KACL,MAAA;AACL,MAAA,oBAAA,GAAuB,MAAM,EAAA,CAAmB,WAAW,CAAA,CACxD,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA,CACP,UAAU,GAAG,CAAA;AAAA;AAIlB,IAAA,MAAM,eAAiC,SAAU,CAAA,GAAA;AAAA,MAC/C,CAAC,EAAE,MAAQ,EAAA,MAAA,EAAQ,MAAY,MAAA;AAAA,QAC7B,qBAAuB,EAAA,EAAA;AAAA,QACvB,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C;AAAA,OACF;AAAA,KACF;AAEA,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,WAAA;AAAA,MACA,IAAA,CAAK,qBAAqB,YAAY,CAAA;AAAA,MACtC;AAAA,KACF;AAGA,IAAM,MAAA,EAAA,CAAqB,cAAc,CACtC,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,EAAA,EAAI,CAAA,CACvB,MAAO,EAAA;AAGV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,cAAA;AAAA,MACA,WAAA,CAAY,IAAI,CAAM,CAAA,MAAA;AAAA,QACpB,SAAW,EAAA,EAAA;AAAA,QACX,GAAA,EAAKC,sBAAkB,CAAA,CAAA,CAAE,GAAG;AAAA,OAC5B,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAEA,IAAO,OAAA;AAAA,MACL,QAAU,EAAA;AAAA,QACR,SAAW,EAAA;AAAA;AACb,KACF;AAAA;AACF,EAEA,MAAM,2BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAM,EAAE,EAAA,EAAI,MAAQ,EAAA,UAAA,EAAe,GAAA,OAAA;AAEnC,IAAM,MAAA,EAAA,CAAsB,eAAe,CAAA,CACxC,MAAO,CAAA;AAAA,MACN,MAAA;AAAA,MACA,WAAa,EAAA;AAAA,KACd,CAAA,CACA,KAAM,CAAA,WAAA,EAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,iBACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,EAAI,EAAA,KAAA,EAAU,GAAA,OAAA;AAEtB,IAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,MAAO,CAAA,EAAE,OAAO,IAAK,CAAA,SAAA,CAAU,KAAS,IAAA,EAAE,CAAE,EAAC,CAC7C,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,sBACJ,CAAA,OAAA,EACA,OACuC,EAAA;AACvC,IAAA,MAAM,IAAO,GAAA,OAAA;AAEb,IAAA,IAAI,UAAa,GAAA,IAAA,CAAwB,eAAe,CAAA,CAAE,MAAO,CAAA;AAAA,MAC/D,WAAA;AAAA,MACA,YAAA;AAAA,MACA,oBAAA;AAAA,MACA,aAAA;AAAA,MACA,OAAA;AAAA,MACA,QAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACD,CAAA;AAKD,IAAI,IAAA,CAAC,OAAS,EAAA,QAAA,EAAU,IAAI,CAAA,CAAE,SAAS,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAM,CAAG,EAAA;AACjE,MAAa,UAAA,GAAA,UAAA,CAAW,SAAU,EAAA,CAAE,UAAW,EAAA;AAAA;AAGjD,IAAA,MAAM,QAAQ,MAAM,UAAA,CACjB,KAAM,CAAA,gBAAA,EAAkB,MAAM,IAAK,CAAA,EAAA,CAAG,GAAI,EAAC,EAC3C,KAAM,CAAA,OAAA,CAAQ,gBAAgB,CAC9B,CAAA,OAAA,CAAQ,kBAAkB,KAAK,CAAA;AAElC,IAAM,MAAA,QAAA,GAAW,IAAK,CAAA,OAAA,CAAQ,eAAgB,EAAA;AAE9C,IAAM,MAAA,YAAA,GAAe,CAAC,eAA4B,KAAA;AAChD,MAAA,IAAI,KAAK,MAAO,CAAA,MAAA,CAAO,MAAO,CAAA,QAAA,CAAS,SAAS,CAAG,EAAA;AACjD,QAAA,OAAO,KAAK,GAAI,CAAA,CAAA,kBAAA,CAAA,EAAsB,CAAC,CAAG,EAAA,eAAe,UAAU,CAAC,CAAA;AAAA,iBAC3D,IAAK,CAAA,MAAA,CAAO,OAAO,MAAO,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtD,QAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAoB,iBAAA,EAAA,eAAe,CAAS,OAAA,CAAA,CAAA;AAAA;AAE9D,MAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAqB,kBAAA,EAAA,eAAe,CAAW,SAAA,CAAA,CAAA;AAAA,KACjE;AAEA,IAAM,MAAA,IAAA,CAAwB,eAAe,CAC1C,CAAA,OAAA;AAAA,MACC,YAAA;AAAA,MACA,KAAM,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,UAAU;AAAA,MAE5B,MAAO,CAAA;AAAA,MACN,cAAA,EAAgB,aAAa,QAAQ;AAAA,KACtC,CAAA;AAEH,IAAO,OAAA;AAAA,MACL,OAAO,KAAM,CAAA,GAAA;AAAA,QACX,CACG,CAAA,MAAA;AAAA,UACC,IAAI,CAAE,CAAA,SAAA;AAAA,UACN,WAAW,CAAE,CAAA,UAAA;AAAA,UACb,iBAAmB,EAAA,IAAA,CAAK,KAAM,CAAA,CAAA,CAAE,kBAAkB,CAAA;AAAA,UAClD,UAAA,EAAY,EAAE,WAAe,IAAA,EAAA;AAAA,UAC7B,YAAA,EAAcC,8BAAoB,CAAA,CAAA,CAAE,cAAc,CAAA;AAAA,UAClD,OAAO,CAAE,CAAA,KAAA,GAAQ,KAAK,KAAM,CAAA,CAAA,CAAE,KAAK,CAAI,GAAA,KAAA,CAAA;AAAA,UACvC,QAAQ,CAAE,CAAA,MAAA;AAAA,UACV,aAAa,CAAE,CAAA;AAAA,SACjB;AAAA;AACJ,KACF;AAAA;AACF,EAEA,MAAM,WACJ,CAAA,QAAA,EACA,OAC4B,EAAA;AAC5B,IAAA,MAAM,EAAK,GAAA,QAAA;AAEX,IAAA,MAAM,OAAO,MAAM,EAAA;AAAA,MACjB;AAAA,KACF,CACG,MAAM,EAAE,iBAAA,EAAmB,QAAQ,SAAU,EAAC,EAC9C,MAAO,EAAA;AAEV,IAAM,MAAA,UAAA,GAAa,KAAK,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,iBAAkB,CAAA,CAAE,OAAO,OAAO,CAAA;AAErE,IAAA,OAAO,EAAE,UAAW,EAAA;AAAA;AACtB,EAEA,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAE5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAGV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AAEA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMC,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEQ,qBAAqB,IAA0C,EAAA;AACrE,IAAA,OAAOC,uBAAO,CAAA,MAAA;AAAA,MACZ,IAAA;AAAA,MACA,CAAA,CAAA,KAAK,GAAG,CAAE,CAAA,iBAAiB,IAAI,CAAE,CAAA,iBAAiB,CAAI,CAAA,EAAA,CAAA,CAAE,IAAI,CAAA;AAAA,KAC9D;AAAA;AACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBACZ,CAAA,QAAA,EACA,OAIe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AAGX,IAAM,MAAA,eAAA,GAAkB,IAAI,KAAc,EAAA;AAI1C,IAAA,KAAA,MAAW,EAAE,MAAA,EAAQ,WAAY,EAAA,IAAK,QAAQ,QAAU,EAAA;AACtD,MAAM,MAAA,SAAA,GAAYJ,gCAAmB,MAAM,CAAA;AAC3C,MAAM,MAAA,IAAA,GAAOK,wBAAmB,MAAM,CAAA;AAEtC,MAAM,MAAA,OAAA,GAAU,MAAMC,+CAAwB,CAAA;AAAA,QAC5C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,OAAS,EAAA;AACX,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAGF,MAAM,MAAA,QAAA,GAAW,MAAMC,+CAAwB,CAAA;AAAA,QAC7C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,OACtB,CAAA;AACD,MAAA,IAAI,QAAU,EAAA;AACZ,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAMF,MAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,QACpD,EAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,cAAgB,EAAA;AAClB,QAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,UAClB,CAAkC,+BAAA,EAAA,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,SACjH;AACA,QAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,WAAA,IAAe,WAAa,EAAA;AAC3C,UAAA,MAAM,WAAwD,GAAA;AAAA,YAC5D,KAAO,EAAAC,iCAAA;AAAA,YACP,YAAc,EAAA;AAAA,cACZ,iBAAmB,EAAA,MAAA;AAAA,cACnB,SAAA;AAAA,cACA,cAAgB,EAAA,WAAA;AAAA,cAChB,mBAAqB,EAAA,cAAA;AAAA,cACrB,cAAgB,EAAAC,cAAA,CAAS,GAAI,EAAA,CAAE,KAAM;AAAA;AACvC,WACF;AACA,UAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,WAAa,EAAA,OAAA,CAAQ,WAAW,CAAA;AAAA;AACrD;AACF;AAIF,IAAA,MAAM,EAAgC,CAAA,0BAA0B,CAE7D,CAAA,KAAA,CAAM,EAAE,iBAAmB,EAAA,OAAA,CAAQ,eAAgB,EAAC,CAEpD,CAAA,SAAA,CAAU,mBAAqB,EAAA,eAAe,EAC9C,MAAO,EAAA;AACV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,0BAAA;AAAA,MACA,eAAA,CAAgB,IAAI,CAAc,SAAA,MAAA;AAAA,QAChC,mBAAmB,OAAQ,CAAA,eAAA;AAAA,QAC3B,iBAAmB,EAAA;AAAA,OACnB,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAAA;AAEJ;;;;"}
@@ -111,8 +111,8 @@ class DefaultProviderDatabase {
111
111
  logger: this.options.logger
112
112
  });
113
113
  }
114
- await tx("refresh_state_references").where("target_entity_ref", entityRef).andWhere({ source_key: options.sourceKey }).delete();
115
114
  if (ok) {
115
+ await tx("refresh_state_references").where("target_entity_ref", entityRef).delete();
116
116
  await tx(
117
117
  "refresh_state_references"
118
118
  ).insert({
@@ -120,6 +120,7 @@ class DefaultProviderDatabase {
120
120
  target_entity_ref: entityRef
121
121
  });
122
122
  } else {
123
+ await tx("refresh_state_references").where("target_entity_ref", entityRef).andWhere({ source_key: options.sourceKey }).delete();
123
124
  const conflictingKey = await checkLocationKeyConflict.checkLocationKeyConflict({
124
125
  tx,
125
126
  entityRef,
@@ -1 +1 @@
1
- {"version":3,"file":"DefaultProviderDatabase.cjs.js","sources":["../../src/database/DefaultProviderDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringifyEntityRef } from '@backstage/catalog-model';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { v4 as uuid } from 'uuid';\nimport { rethrowError } from './conversion';\nimport { deleteWithEagerPruningOfChildren } from './operations/provider/deleteWithEagerPruningOfChildren';\nimport { refreshByRefreshKeys } from './operations/provider/refreshByRefreshKeys';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { DbRefreshStateReferencesRow, DbRefreshStateRow } from './tables';\nimport {\n ProviderDatabase,\n RefreshByKeyOptions,\n ReplaceUnprocessedEntitiesOptions,\n Transaction,\n} from './types';\nimport { generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProviderDatabase implements ProviderDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n },\n ) {}\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the\n // transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n async replaceUnprocessedEntities(\n txOpaque: Knex | Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex | Knex.Transaction;\n const { toAdd, toUpsert, toRemove } = await this.createDelta(tx, options);\n\n if (toRemove.length) {\n const removedCount = await deleteWithEagerPruningOfChildren({\n knex: tx,\n entityRefs: toRemove,\n sourceKey: options.sourceKey,\n });\n this.options.logger.debug(\n `removed, ${removedCount} entities: ${JSON.stringify(toRemove)}`,\n );\n }\n\n if (toAdd.length) {\n // The reason for this chunking, rather than just massively batch\n // inserting the entire payload, is that we fall back to the individual\n // upsert mechanism below on conflicts. That path is massively slower than\n // the fast batch path, so we don't want to end up accidentally having to\n // for example item-by-item upsert tens of thousands of entities in a\n // large initial delivery dump. The implication is that the size of these\n // chunks needs to weigh the benefit of fast successful inserts, against\n // the drawback of super slow but more rare fallbacks. There's quickly\n // diminishing returns though with turning up this value way high.\n for (const chunk of lodash.chunk(toAdd, 50)) {\n try {\n await tx.batchInsert(\n 'refresh_state',\n chunk.map(item => ({\n entity_id: uuid(),\n entity_ref: stringifyEntityRef(item.deferred.entity),\n unprocessed_entity: JSON.stringify(item.deferred.entity),\n unprocessed_hash: item.hash,\n errors: '',\n location_key: item.deferred.locationKey,\n next_update_at: tx.fn.now(),\n last_discovery_at: tx.fn.now(),\n })),\n BATCH_SIZE,\n );\n await tx.batchInsert(\n 'refresh_state_references',\n chunk.map(item => ({\n source_key: options.sourceKey,\n target_entity_ref: stringifyEntityRef(item.deferred.entity),\n })),\n BATCH_SIZE,\n );\n } catch (error) {\n if (!isDatabaseConflictError(error)) {\n throw error;\n } else {\n this.options.logger.debug(\n `Fast insert path failed, falling back to slow path, ${error}`,\n );\n toUpsert.push(...chunk);\n }\n }\n }\n }\n\n if (toUpsert.length) {\n for (const {\n deferred: { entity, locationKey },\n hash,\n } of toUpsert) {\n const entityRef = stringifyEntityRef(entity);\n\n try {\n let ok = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (!ok) {\n ok = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n }\n\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .andWhere({ source_key: options.sourceKey })\n .delete();\n\n if (ok) {\n await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n ).insert({\n source_key: options.sourceKey,\n target_entity_ref: entityRef,\n });\n } else {\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Source ${options.sourceKey} detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n }\n }\n } catch (error) {\n this.options.logger.error(\n `Failed to add '${entityRef}' from source '${options.sourceKey}', ${error}`,\n );\n }\n }\n }\n }\n\n async refreshByRefreshKeys(\n txOpaque: Transaction,\n options: RefreshByKeyOptions,\n ) {\n const tx = txOpaque as Knex.Transaction;\n await refreshByRefreshKeys({ tx, keys: options.keys });\n }\n\n private async createDelta(\n tx: Knex | Knex.Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<{\n toAdd: { deferred: DeferredEntity; hash: string }[];\n toUpsert: { deferred: DeferredEntity; hash: string }[];\n toRemove: string[];\n }> {\n if (options.type === 'delta') {\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = options.removed.map(e => e.entityRef);\n\n for (const chunk of lodash.chunk(options.added, 1000)) {\n const entityRefs = chunk.map(e => stringifyEntityRef(e.entity));\n const rows = await tx<DbRefreshStateRow>('refresh_state')\n .select(['entity_ref', 'unprocessed_hash', 'location_key'])\n .whereIn('entity_ref', entityRefs);\n const oldStates = new Map(\n rows.map(row => [\n row.entity_ref,\n {\n unprocessed_hash: row.unprocessed_hash,\n location_key: row.location_key,\n },\n ]),\n );\n\n chunk.forEach((deferred, i) => {\n const entityRef = entityRefs[i];\n const newHash = generateStableHash(deferred.entity);\n const oldState = oldStates.get(entityRef);\n if (oldState === undefined) {\n // Add any entity that does not exist in the database\n toAdd.push({ deferred, hash: newHash });\n } else if (\n (deferred.locationKey ?? null) !== (oldState.location_key ?? null)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(entityRef);\n toAdd.push({ deferred, hash: newHash });\n } else if (newHash !== oldState.unprocessed_hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push({ deferred, hash: newHash });\n }\n });\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n\n // Grab all of the existing references from the same source, and their locationKeys as well\n const oldRefs = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .leftJoin<DbRefreshStateRow>('refresh_state', {\n target_entity_ref: 'entity_ref',\n })\n .where({ source_key: options.sourceKey })\n .select({\n target_entity_ref: 'refresh_state_references.target_entity_ref',\n location_key: 'refresh_state.location_key',\n unprocessed_hash: 'refresh_state.unprocessed_hash',\n });\n\n const items = options.items.map(deferred => ({\n deferred,\n ref: stringifyEntityRef(deferred.entity),\n hash: generateStableHash(deferred.entity),\n }));\n\n const oldRefsSet = new Map(\n oldRefs.map(r => [\n r.target_entity_ref,\n {\n locationKey: r.location_key,\n oldEntityHash: r.unprocessed_hash,\n },\n ]),\n );\n const newRefsSet = new Set(items.map(item => item.ref));\n\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = oldRefs\n .map(row => row.target_entity_ref)\n .filter(ref => !newRefsSet.has(ref));\n\n for (const item of items) {\n const oldRef = oldRefsSet.get(item.ref);\n const upsertItem = { deferred: item.deferred, hash: item.hash };\n if (!oldRef) {\n // Add any entity that does not exist in the database\n toAdd.push(upsertItem);\n } else if (\n (oldRef.locationKey ?? undefined) !==\n (item.deferred.locationKey ?? undefined)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(item.ref);\n toAdd.push(upsertItem);\n } else if (oldRef.oldEntityHash !== item.hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push(upsertItem);\n }\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n}\n"],"names":["rethrowError","deleteWithEagerPruningOfChildren","lodash","uuid","stringifyEntityRef","isDatabaseConflictError","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","refreshByRefreshKeys","toAdd","toUpsert","toRemove","generateStableHash"],"mappings":";;;;;;;;;;;;;;;;;;AA4CA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,uBAAoD,CAAA;AAAA,EAC/D,YACmB,OAIjB,EAAA;AAJiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAAA;AAIhB,EAEH,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAC5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAIV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AACA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMA,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEA,MAAM,0BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,OAAO,QAAU,EAAA,QAAA,KAAa,MAAM,IAAA,CAAK,WAAY,CAAA,EAAA,EAAI,OAAO,CAAA;AAExE,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAM,MAAA,YAAA,GAAe,MAAMC,iEAAiC,CAAA;AAAA,QAC1D,IAAM,EAAA,EAAA;AAAA,QACN,UAAY,EAAA,QAAA;AAAA,QACZ,WAAW,OAAQ,CAAA;AAAA,OACpB,CAAA;AACD,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,QAClB,YAAY,YAAY,CAAA,WAAA,EAAc,IAAK,CAAA,SAAA,CAAU,QAAQ,CAAC,CAAA;AAAA,OAChE;AAAA;AAGF,IAAA,IAAI,MAAM,MAAQ,EAAA;AAUhB,MAAA,KAAA,MAAW,KAAS,IAAAC,uBAAA,CAAO,KAAM,CAAA,KAAA,EAAO,EAAE,CAAG,EAAA;AAC3C,QAAI,IAAA;AACF,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,eAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,WAAWC,OAAK,EAAA;AAAA,cAChB,UAAY,EAAAC,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,cACnD,kBAAoB,EAAA,IAAA,CAAK,SAAU,CAAA,IAAA,CAAK,SAAS,MAAM,CAAA;AAAA,cACvD,kBAAkB,IAAK,CAAA,IAAA;AAAA,cACvB,MAAQ,EAAA,EAAA;AAAA,cACR,YAAA,EAAc,KAAK,QAAS,CAAA,WAAA;AAAA,cAC5B,cAAA,EAAgB,EAAG,CAAA,EAAA,CAAG,GAAI,EAAA;AAAA,cAC1B,iBAAA,EAAmB,EAAG,CAAA,EAAA,CAAG,GAAI;AAAA,aAC7B,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AACA,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,0BAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAAA,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM;AAAA,aAC1D,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AAAA,iBACO,KAAO,EAAA;AACd,UAAI,IAAA,CAACC,wCAAwB,CAAA,KAAK,CAAG,EAAA;AACnC,YAAM,MAAA,KAAA;AAAA,WACD,MAAA;AACL,YAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,cAClB,uDAAuD,KAAK,CAAA;AAAA,aAC9D;AACA,YAAS,QAAA,CAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA;AACxB;AACF;AACF;AAGF,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAW,KAAA,MAAA;AAAA,QACT,QAAA,EAAU,EAAE,MAAA,EAAQ,WAAY,EAAA;AAAA,QAChC;AAAA,WACG,QAAU,EAAA;AACb,QAAM,MAAA,SAAA,GAAYD,gCAAmB,MAAM,CAAA;AAE3C,QAAI,IAAA;AACF,UAAI,IAAA,EAAA,GAAK,MAAME,+CAAwB,CAAA;AAAA,YACrC,EAAA;AAAA,YACA,MAAA;AAAA,YACA,IAAA;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,IAAI,CAAC,EAAI,EAAA;AACP,YAAA,EAAA,GAAK,MAAMC,+CAAwB,CAAA;AAAA,cACjC,EAAA;AAAA,cACA,MAAA;AAAA,cACA,IAAA;AAAA,cACA,WAAA;AAAA,cACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,aACtB,CAAA;AAAA;AAGH,UAAA,MAAM,EAAgC,CAAA,0BAA0B,CAC7D,CAAA,KAAA,CAAM,qBAAqB,SAAS,CAAA,CACpC,QAAS,CAAA,EAAE,UAAY,EAAA,OAAA,CAAQ,SAAU,EAAC,EAC1C,MAAO,EAAA;AAEV,UAAA,IAAI,EAAI,EAAA;AACN,YAAM,MAAA,EAAA;AAAA,cACJ;AAAA,cACA,MAAO,CAAA;AAAA,cACP,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAA;AAAA,aACpB,CAAA;AAAA,WACI,MAAA;AACL,YAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,cACpD,EAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACD,CAAA;AACD,YAAA,IAAI,cAAgB,EAAA;AAClB,cAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,gBAClB,CAAA,OAAA,EAAU,QAAQ,SAAS,CAAA,gCAAA,EAAmC,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,eAC7I;AAAA;AACF;AACF,iBACO,KAAO,EAAA;AACd,UAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,YAClB,kBAAkB,SAAS,CAAA,eAAA,EAAkB,OAAQ,CAAA,SAAS,MAAM,KAAK,CAAA;AAAA,WAC3E;AAAA;AACF;AACF;AACF;AACF,EAEA,MAAM,oBACJ,CAAA,QAAA,EACA,OACA,EAAA;AACA,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAMC,0CAAqB,EAAE,EAAA,EAAI,IAAM,EAAA,OAAA,CAAQ,MAAM,CAAA;AAAA;AACvD,EAEA,MAAc,WACZ,CAAA,EAAA,EACA,OAKC,EAAA;AACD,IAAI,IAAA,OAAA,CAAQ,SAAS,OAAS,EAAA;AAC5B,MAAMC,MAAAA,MAAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,MAAMC,MAAAA,SAAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,MAAA,MAAMC,YAAW,OAAQ,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,SAAS,CAAA;AAErD,MAAA,KAAA,MAAW,SAASV,uBAAO,CAAA,KAAA,CAAM,OAAQ,CAAA,KAAA,EAAO,GAAI,CAAG,EAAA;AACrD,QAAA,MAAM,aAAa,KAAM,CAAA,GAAA,CAAI,OAAKE,+BAAmB,CAAA,CAAA,CAAE,MAAM,CAAC,CAAA;AAC9D,QAAA,MAAM,IAAO,GAAA,MAAM,EAAsB,CAAA,eAAe,EACrD,MAAO,CAAA,CAAC,YAAc,EAAA,kBAAA,EAAoB,cAAc,CAAC,CACzD,CAAA,OAAA,CAAQ,cAAc,UAAU,CAAA;AACnC,QAAA,MAAM,YAAY,IAAI,GAAA;AAAA,UACpB,IAAA,CAAK,IAAI,CAAO,GAAA,KAAA;AAAA,YACd,GAAI,CAAA,UAAA;AAAA,YACJ;AAAA,cACE,kBAAkB,GAAI,CAAA,gBAAA;AAAA,cACtB,cAAc,GAAI,CAAA;AAAA;AACpB,WACD;AAAA,SACH;AAEA,QAAM,KAAA,CAAA,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAM,KAAA;AAC7B,UAAM,MAAA,SAAA,GAAY,WAAW,CAAC,CAAA;AAC9B,UAAM,MAAA,OAAA,GAAUS,uBAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAClD,UAAM,MAAA,QAAA,GAAW,SAAU,CAAA,GAAA,CAAI,SAAS,CAAA;AACxC,UAAA,IAAI,aAAa,KAAW,CAAA,EAAA;AAE1B,YAAAH,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,sBAErC,QAAS,CAAA,WAAA,IAAe,IAAW,OAAA,QAAA,CAAS,gBAAgB,IAC7D,CAAA,EAAA;AAEA,YAAAE,SAAAA,CAAS,KAAK,SAAS,CAAA;AACvB,YAAAF,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,WACxC,MAAA,IAAW,OAAY,KAAA,QAAA,CAAS,gBAAkB,EAAA;AAEhD,YAAAC,UAAS,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA;AAC3C,SACD,CAAA;AAAA;AAGH,MAAA,OAAO,EAAE,KAAAD,EAAAA,MAAAA,EAAO,QAAAC,EAAAA,SAAAA,EAAU,UAAAC,SAAS,EAAA;AAAA;AAIrC,IAAA,MAAM,UAAU,MAAM,EAAA;AAAA,MACpB;AAAA,KACF,CACG,SAA4B,eAAiB,EAAA;AAAA,MAC5C,iBAAmB,EAAA;AAAA,KACpB,EACA,KAAM,CAAA,EAAE,YAAY,OAAQ,CAAA,SAAA,EAAW,CAAA,CACvC,MAAO,CAAA;AAAA,MACN,iBAAmB,EAAA,4CAAA;AAAA,MACnB,YAAc,EAAA,4BAAA;AAAA,MACd,gBAAkB,EAAA;AAAA,KACnB,CAAA;AAEH,IAAA,MAAM,KAAQ,GAAA,OAAA,CAAQ,KAAM,CAAA,GAAA,CAAI,CAAa,QAAA,MAAA;AAAA,MAC3C,QAAA;AAAA,MACA,GAAA,EAAKR,+BAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,MACvC,IAAA,EAAMS,uBAAmB,CAAA,QAAA,CAAS,MAAM;AAAA,KACxC,CAAA,CAAA;AAEF,IAAA,MAAM,aAAa,IAAI,GAAA;AAAA,MACrB,OAAA,CAAQ,IAAI,CAAK,CAAA,KAAA;AAAA,QACf,CAAE,CAAA,iBAAA;AAAA,QACF;AAAA,UACE,aAAa,CAAE,CAAA,YAAA;AAAA,UACf,eAAe,CAAE,CAAA;AAAA;AACnB,OACD;AAAA,KACH;AACA,IAAM,MAAA,UAAA,GAAa,IAAI,GAAI,CAAA,KAAA,CAAM,IAAI,CAAQ,IAAA,KAAA,IAAA,CAAK,GAAG,CAAC,CAAA;AAEtD,IAAM,MAAA,KAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,IAAM,MAAA,QAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,IAAA,MAAM,QAAW,GAAA,OAAA,CACd,GAAI,CAAA,CAAA,GAAA,KAAO,GAAI,CAAA,iBAAiB,CAChC,CAAA,MAAA,CAAO,CAAO,GAAA,KAAA,CAAC,UAAW,CAAA,GAAA,CAAI,GAAG,CAAC,CAAA;AAErC,IAAA,KAAA,MAAW,QAAQ,KAAO,EAAA;AACxB,MAAA,MAAM,MAAS,GAAA,UAAA,CAAW,GAAI,CAAA,IAAA,CAAK,GAAG,CAAA;AACtC,MAAA,MAAM,aAAa,EAAE,QAAA,EAAU,KAAK,QAAU,EAAA,IAAA,EAAM,KAAK,IAAK,EAAA;AAC9D,MAAA,IAAI,CAAC,MAAQ,EAAA;AAEX,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,kBAEpB,MAAO,CAAA,WAAA,IAAe,aACtB,IAAK,CAAA,QAAA,CAAS,eAAe,KAC9B,CAAA,CAAA,EAAA;AAEA,QAAS,QAAA,CAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AACtB,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,OACZ,MAAA,IAAA,MAAA,CAAO,aAAkB,KAAA,IAAA,CAAK,IAAM,EAAA;AAE7C,QAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAAA;AAC1B;AAGF,IAAO,OAAA,EAAE,KAAO,EAAA,QAAA,EAAU,QAAS,EAAA;AAAA;AAEvC;;;;"}
1
+ {"version":3,"file":"DefaultProviderDatabase.cjs.js","sources":["../../src/database/DefaultProviderDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringifyEntityRef } from '@backstage/catalog-model';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { v4 as uuid } from 'uuid';\nimport { rethrowError } from './conversion';\nimport { deleteWithEagerPruningOfChildren } from './operations/provider/deleteWithEagerPruningOfChildren';\nimport { refreshByRefreshKeys } from './operations/provider/refreshByRefreshKeys';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { DbRefreshStateReferencesRow, DbRefreshStateRow } from './tables';\nimport {\n ProviderDatabase,\n RefreshByKeyOptions,\n ReplaceUnprocessedEntitiesOptions,\n Transaction,\n} from './types';\nimport { generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProviderDatabase implements ProviderDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n },\n ) {}\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the\n // transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n async replaceUnprocessedEntities(\n txOpaque: Knex | Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex | Knex.Transaction;\n const { toAdd, toUpsert, toRemove } = await this.createDelta(tx, options);\n\n if (toRemove.length) {\n const removedCount = await deleteWithEagerPruningOfChildren({\n knex: tx,\n entityRefs: toRemove,\n sourceKey: options.sourceKey,\n });\n this.options.logger.debug(\n `removed, ${removedCount} entities: ${JSON.stringify(toRemove)}`,\n );\n }\n\n if (toAdd.length) {\n // The reason for this chunking, rather than just massively batch\n // inserting the entire payload, is that we fall back to the individual\n // upsert mechanism below on conflicts. That path is massively slower than\n // the fast batch path, so we don't want to end up accidentally having to\n // for example item-by-item upsert tens of thousands of entities in a\n // large initial delivery dump. The implication is that the size of these\n // chunks needs to weigh the benefit of fast successful inserts, against\n // the drawback of super slow but more rare fallbacks. There's quickly\n // diminishing returns though with turning up this value way high.\n for (const chunk of lodash.chunk(toAdd, 50)) {\n try {\n await tx.batchInsert(\n 'refresh_state',\n chunk.map(item => ({\n entity_id: uuid(),\n entity_ref: stringifyEntityRef(item.deferred.entity),\n unprocessed_entity: JSON.stringify(item.deferred.entity),\n unprocessed_hash: item.hash,\n errors: '',\n location_key: item.deferred.locationKey,\n next_update_at: tx.fn.now(),\n last_discovery_at: tx.fn.now(),\n })),\n BATCH_SIZE,\n );\n await tx.batchInsert(\n 'refresh_state_references',\n chunk.map(item => ({\n source_key: options.sourceKey,\n target_entity_ref: stringifyEntityRef(item.deferred.entity),\n })),\n BATCH_SIZE,\n );\n } catch (error) {\n if (!isDatabaseConflictError(error)) {\n throw error;\n } else {\n this.options.logger.debug(\n `Fast insert path failed, falling back to slow path, ${error}`,\n );\n toUpsert.push(...chunk);\n }\n }\n }\n }\n\n if (toUpsert.length) {\n for (const {\n deferred: { entity, locationKey },\n hash,\n } of toUpsert) {\n const entityRef = stringifyEntityRef(entity);\n\n try {\n let ok = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (!ok) {\n ok = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n }\n if (ok) {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .delete();\n\n await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n ).insert({\n source_key: options.sourceKey,\n target_entity_ref: entityRef,\n });\n } else {\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n .where('target_entity_ref', entityRef)\n .andWhere({ source_key: options.sourceKey })\n .delete();\n\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Source ${options.sourceKey} detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n }\n }\n } catch (error) {\n this.options.logger.error(\n `Failed to add '${entityRef}' from source '${options.sourceKey}', ${error}`,\n );\n }\n }\n }\n }\n\n async refreshByRefreshKeys(\n txOpaque: Transaction,\n options: RefreshByKeyOptions,\n ) {\n const tx = txOpaque as Knex.Transaction;\n await refreshByRefreshKeys({ tx, keys: options.keys });\n }\n\n private async createDelta(\n tx: Knex | Knex.Transaction,\n options: ReplaceUnprocessedEntitiesOptions,\n ): Promise<{\n toAdd: { deferred: DeferredEntity; hash: string }[];\n toUpsert: { deferred: DeferredEntity; hash: string }[];\n toRemove: string[];\n }> {\n if (options.type === 'delta') {\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = options.removed.map(e => e.entityRef);\n\n for (const chunk of lodash.chunk(options.added, 1000)) {\n const entityRefs = chunk.map(e => stringifyEntityRef(e.entity));\n const rows = await tx<DbRefreshStateRow>('refresh_state')\n .select(['entity_ref', 'unprocessed_hash', 'location_key'])\n .whereIn('entity_ref', entityRefs);\n const oldStates = new Map(\n rows.map(row => [\n row.entity_ref,\n {\n unprocessed_hash: row.unprocessed_hash,\n location_key: row.location_key,\n },\n ]),\n );\n\n chunk.forEach((deferred, i) => {\n const entityRef = entityRefs[i];\n const newHash = generateStableHash(deferred.entity);\n const oldState = oldStates.get(entityRef);\n if (oldState === undefined) {\n // Add any entity that does not exist in the database\n toAdd.push({ deferred, hash: newHash });\n } else if (\n (deferred.locationKey ?? null) !== (oldState.location_key ?? null)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(entityRef);\n toAdd.push({ deferred, hash: newHash });\n } else if (newHash !== oldState.unprocessed_hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push({ deferred, hash: newHash });\n }\n });\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n\n // Grab all of the existing references from the same source, and their locationKeys as well\n const oldRefs = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .leftJoin<DbRefreshStateRow>('refresh_state', {\n target_entity_ref: 'entity_ref',\n })\n .where({ source_key: options.sourceKey })\n .select({\n target_entity_ref: 'refresh_state_references.target_entity_ref',\n location_key: 'refresh_state.location_key',\n unprocessed_hash: 'refresh_state.unprocessed_hash',\n });\n\n const items = options.items.map(deferred => ({\n deferred,\n ref: stringifyEntityRef(deferred.entity),\n hash: generateStableHash(deferred.entity),\n }));\n\n const oldRefsSet = new Map(\n oldRefs.map(r => [\n r.target_entity_ref,\n {\n locationKey: r.location_key,\n oldEntityHash: r.unprocessed_hash,\n },\n ]),\n );\n const newRefsSet = new Set(items.map(item => item.ref));\n\n const toAdd = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toUpsert = new Array<{ deferred: DeferredEntity; hash: string }>();\n const toRemove = oldRefs\n .map(row => row.target_entity_ref)\n .filter(ref => !newRefsSet.has(ref));\n\n for (const item of items) {\n const oldRef = oldRefsSet.get(item.ref);\n const upsertItem = { deferred: item.deferred, hash: item.hash };\n if (!oldRef) {\n // Add any entity that does not exist in the database\n toAdd.push(upsertItem);\n } else if (\n (oldRef.locationKey ?? undefined) !==\n (item.deferred.locationKey ?? undefined)\n ) {\n // Remove and then re-add any entity that exists, but with a different location key\n toRemove.push(item.ref);\n toAdd.push(upsertItem);\n } else if (oldRef.oldEntityHash !== item.hash) {\n // Entities with modifications should be pushed through too\n toUpsert.push(upsertItem);\n }\n }\n\n return { toAdd, toUpsert, toRemove };\n }\n}\n"],"names":["rethrowError","deleteWithEagerPruningOfChildren","lodash","uuid","stringifyEntityRef","isDatabaseConflictError","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","refreshByRefreshKeys","toAdd","toUpsert","toRemove","generateStableHash"],"mappings":";;;;;;;;;;;;;;;;;;AA4CA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,uBAAoD,CAAA;AAAA,EAC/D,YACmB,OAIjB,EAAA;AAJiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAAA;AAIhB,EAEH,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAC5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAIV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AACA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMA,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEA,MAAM,0BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,OAAO,QAAU,EAAA,QAAA,KAAa,MAAM,IAAA,CAAK,WAAY,CAAA,EAAA,EAAI,OAAO,CAAA;AAExE,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAM,MAAA,YAAA,GAAe,MAAMC,iEAAiC,CAAA;AAAA,QAC1D,IAAM,EAAA,EAAA;AAAA,QACN,UAAY,EAAA,QAAA;AAAA,QACZ,WAAW,OAAQ,CAAA;AAAA,OACpB,CAAA;AACD,MAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,QAClB,YAAY,YAAY,CAAA,WAAA,EAAc,IAAK,CAAA,SAAA,CAAU,QAAQ,CAAC,CAAA;AAAA,OAChE;AAAA;AAGF,IAAA,IAAI,MAAM,MAAQ,EAAA;AAUhB,MAAA,KAAA,MAAW,KAAS,IAAAC,uBAAA,CAAO,KAAM,CAAA,KAAA,EAAO,EAAE,CAAG,EAAA;AAC3C,QAAI,IAAA;AACF,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,eAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,WAAWC,OAAK,EAAA;AAAA,cAChB,UAAY,EAAAC,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,cACnD,kBAAoB,EAAA,IAAA,CAAK,SAAU,CAAA,IAAA,CAAK,SAAS,MAAM,CAAA;AAAA,cACvD,kBAAkB,IAAK,CAAA,IAAA;AAAA,cACvB,MAAQ,EAAA,EAAA;AAAA,cACR,YAAA,EAAc,KAAK,QAAS,CAAA,WAAA;AAAA,cAC5B,cAAA,EAAgB,EAAG,CAAA,EAAA,CAAG,GAAI,EAAA;AAAA,cAC1B,iBAAA,EAAmB,EAAG,CAAA,EAAA,CAAG,GAAI;AAAA,aAC7B,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AACA,UAAA,MAAM,EAAG,CAAA,WAAA;AAAA,YACP,0BAAA;AAAA,YACA,KAAA,CAAM,IAAI,CAAS,IAAA,MAAA;AAAA,cACjB,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAAA,+BAAA,CAAmB,IAAK,CAAA,QAAA,CAAS,MAAM;AAAA,aAC1D,CAAA,CAAA;AAAA,YACF;AAAA,WACF;AAAA,iBACO,KAAO,EAAA;AACd,UAAI,IAAA,CAACC,wCAAwB,CAAA,KAAK,CAAG,EAAA;AACnC,YAAM,MAAA,KAAA;AAAA,WACD,MAAA;AACL,YAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,cAClB,uDAAuD,KAAK,CAAA;AAAA,aAC9D;AACA,YAAS,QAAA,CAAA,IAAA,CAAK,GAAG,KAAK,CAAA;AAAA;AACxB;AACF;AACF;AAGF,IAAA,IAAI,SAAS,MAAQ,EAAA;AACnB,MAAW,KAAA,MAAA;AAAA,QACT,QAAA,EAAU,EAAE,MAAA,EAAQ,WAAY,EAAA;AAAA,QAChC;AAAA,WACG,QAAU,EAAA;AACb,QAAM,MAAA,SAAA,GAAYD,gCAAmB,MAAM,CAAA;AAE3C,QAAI,IAAA;AACF,UAAI,IAAA,EAAA,GAAK,MAAME,+CAAwB,CAAA;AAAA,YACrC,EAAA;AAAA,YACA,MAAA;AAAA,YACA,IAAA;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,IAAI,CAAC,EAAI,EAAA;AACP,YAAA,EAAA,GAAK,MAAMC,+CAAwB,CAAA;AAAA,cACjC,EAAA;AAAA,cACA,MAAA;AAAA,cACA,IAAA;AAAA,cACA,WAAA;AAAA,cACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,aACtB,CAAA;AAAA;AAEH,UAAA,IAAI,EAAI,EAAA;AACN,YAAA,MAAM,GAAgC,0BAA0B,CAAA,CAC7D,MAAM,mBAAqB,EAAA,SAAS,EACpC,MAAO,EAAA;AAEV,YAAM,MAAA,EAAA;AAAA,cACJ;AAAA,cACA,MAAO,CAAA;AAAA,cACP,YAAY,OAAQ,CAAA,SAAA;AAAA,cACpB,iBAAmB,EAAA;AAAA,aACpB,CAAA;AAAA,WACI,MAAA;AACL,YAAA,MAAM,EAAgC,CAAA,0BAA0B,CAC7D,CAAA,KAAA,CAAM,qBAAqB,SAAS,CAAA,CACpC,QAAS,CAAA,EAAE,UAAY,EAAA,OAAA,CAAQ,SAAU,EAAC,EAC1C,MAAO,EAAA;AAEV,YAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,cACpD,EAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACD,CAAA;AACD,YAAA,IAAI,cAAgB,EAAA;AAClB,cAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,gBAClB,CAAA,OAAA,EAAU,QAAQ,SAAS,CAAA,gCAAA,EAAmC,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,eAC7I;AAAA;AACF;AACF,iBACO,KAAO,EAAA;AACd,UAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,KAAA;AAAA,YAClB,kBAAkB,SAAS,CAAA,eAAA,EAAkB,OAAQ,CAAA,SAAS,MAAM,KAAK,CAAA;AAAA,WAC3E;AAAA;AACF;AACF;AACF;AACF,EAEA,MAAM,oBACJ,CAAA,QAAA,EACA,OACA,EAAA;AACA,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAMC,0CAAqB,EAAE,EAAA,EAAI,IAAM,EAAA,OAAA,CAAQ,MAAM,CAAA;AAAA;AACvD,EAEA,MAAc,WACZ,CAAA,EAAA,EACA,OAKC,EAAA;AACD,IAAI,IAAA,OAAA,CAAQ,SAAS,OAAS,EAAA;AAC5B,MAAMC,MAAAA,MAAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,MAAMC,MAAAA,SAAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,MAAA,MAAMC,YAAW,OAAQ,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,SAAS,CAAA;AAErD,MAAA,KAAA,MAAW,SAASV,uBAAO,CAAA,KAAA,CAAM,OAAQ,CAAA,KAAA,EAAO,GAAI,CAAG,EAAA;AACrD,QAAA,MAAM,aAAa,KAAM,CAAA,GAAA,CAAI,OAAKE,+BAAmB,CAAA,CAAA,CAAE,MAAM,CAAC,CAAA;AAC9D,QAAA,MAAM,IAAO,GAAA,MAAM,EAAsB,CAAA,eAAe,EACrD,MAAO,CAAA,CAAC,YAAc,EAAA,kBAAA,EAAoB,cAAc,CAAC,CACzD,CAAA,OAAA,CAAQ,cAAc,UAAU,CAAA;AACnC,QAAA,MAAM,YAAY,IAAI,GAAA;AAAA,UACpB,IAAA,CAAK,IAAI,CAAO,GAAA,KAAA;AAAA,YACd,GAAI,CAAA,UAAA;AAAA,YACJ;AAAA,cACE,kBAAkB,GAAI,CAAA,gBAAA;AAAA,cACtB,cAAc,GAAI,CAAA;AAAA;AACpB,WACD;AAAA,SACH;AAEA,QAAM,KAAA,CAAA,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAM,KAAA;AAC7B,UAAM,MAAA,SAAA,GAAY,WAAW,CAAC,CAAA;AAC9B,UAAM,MAAA,OAAA,GAAUS,uBAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAClD,UAAM,MAAA,QAAA,GAAW,SAAU,CAAA,GAAA,CAAI,SAAS,CAAA;AACxC,UAAA,IAAI,aAAa,KAAW,CAAA,EAAA;AAE1B,YAAAH,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,sBAErC,QAAS,CAAA,WAAA,IAAe,IAAW,OAAA,QAAA,CAAS,gBAAgB,IAC7D,CAAA,EAAA;AAEA,YAAAE,SAAAA,CAAS,KAAK,SAAS,CAAA;AACvB,YAAAF,OAAM,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA,WACxC,MAAA,IAAW,OAAY,KAAA,QAAA,CAAS,gBAAkB,EAAA;AAEhD,YAAAC,UAAS,IAAK,CAAA,EAAE,QAAU,EAAA,IAAA,EAAM,SAAS,CAAA;AAAA;AAC3C,SACD,CAAA;AAAA;AAGH,MAAA,OAAO,EAAE,KAAAD,EAAAA,MAAAA,EAAO,QAAAC,EAAAA,SAAAA,EAAU,UAAAC,SAAS,EAAA;AAAA;AAIrC,IAAA,MAAM,UAAU,MAAM,EAAA;AAAA,MACpB;AAAA,KACF,CACG,SAA4B,eAAiB,EAAA;AAAA,MAC5C,iBAAmB,EAAA;AAAA,KACpB,EACA,KAAM,CAAA,EAAE,YAAY,OAAQ,CAAA,SAAA,EAAW,CAAA,CACvC,MAAO,CAAA;AAAA,MACN,iBAAmB,EAAA,4CAAA;AAAA,MACnB,YAAc,EAAA,4BAAA;AAAA,MACd,gBAAkB,EAAA;AAAA,KACnB,CAAA;AAEH,IAAA,MAAM,KAAQ,GAAA,OAAA,CAAQ,KAAM,CAAA,GAAA,CAAI,CAAa,QAAA,MAAA;AAAA,MAC3C,QAAA;AAAA,MACA,GAAA,EAAKR,+BAAmB,CAAA,QAAA,CAAS,MAAM,CAAA;AAAA,MACvC,IAAA,EAAMS,uBAAmB,CAAA,QAAA,CAAS,MAAM;AAAA,KACxC,CAAA,CAAA;AAEF,IAAA,MAAM,aAAa,IAAI,GAAA;AAAA,MACrB,OAAA,CAAQ,IAAI,CAAK,CAAA,KAAA;AAAA,QACf,CAAE,CAAA,iBAAA;AAAA,QACF;AAAA,UACE,aAAa,CAAE,CAAA,YAAA;AAAA,UACf,eAAe,CAAE,CAAA;AAAA;AACnB,OACD;AAAA,KACH;AACA,IAAM,MAAA,UAAA,GAAa,IAAI,GAAI,CAAA,KAAA,CAAM,IAAI,CAAQ,IAAA,KAAA,IAAA,CAAK,GAAG,CAAC,CAAA;AAEtD,IAAM,MAAA,KAAA,GAAQ,IAAI,KAAkD,EAAA;AACpE,IAAM,MAAA,QAAA,GAAW,IAAI,KAAkD,EAAA;AACvE,IAAA,MAAM,QAAW,GAAA,OAAA,CACd,GAAI,CAAA,CAAA,GAAA,KAAO,GAAI,CAAA,iBAAiB,CAChC,CAAA,MAAA,CAAO,CAAO,GAAA,KAAA,CAAC,UAAW,CAAA,GAAA,CAAI,GAAG,CAAC,CAAA;AAErC,IAAA,KAAA,MAAW,QAAQ,KAAO,EAAA;AACxB,MAAA,MAAM,MAAS,GAAA,UAAA,CAAW,GAAI,CAAA,IAAA,CAAK,GAAG,CAAA;AACtC,MAAA,MAAM,aAAa,EAAE,QAAA,EAAU,KAAK,QAAU,EAAA,IAAA,EAAM,KAAK,IAAK,EAAA;AAC9D,MAAA,IAAI,CAAC,MAAQ,EAAA;AAEX,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,kBAEpB,MAAO,CAAA,WAAA,IAAe,aACtB,IAAK,CAAA,QAAA,CAAS,eAAe,KAC9B,CAAA,CAAA,EAAA;AAEA,QAAS,QAAA,CAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AACtB,QAAA,KAAA,CAAM,KAAK,UAAU,CAAA;AAAA,OACZ,MAAA,IAAA,MAAA,CAAO,aAAkB,KAAA,IAAA,CAAK,IAAM,EAAA;AAE7C,QAAA,QAAA,CAAS,KAAK,UAAU,CAAA;AAAA;AAC1B;AAGF,IAAO,OAAA,EAAE,KAAO,EAAA,QAAA,EAAU,QAAS,EAAA;AAAA;AAEvC;;;;"}
@@ -6,6 +6,7 @@ var uuid = require('uuid');
6
6
  var buildEntitySearch = require('./buildEntitySearch.cjs.js');
7
7
  var markDeferredStitchCompleted = require('./markDeferredStitchCompleted.cjs.js');
8
8
  var util = require('./util.cjs.js');
9
+ var backendPluginApi = require('@backstage/backend-plugin-api');
9
10
 
10
11
  const scriptProtocolPattern = (
11
12
  // eslint-disable-next-line no-control-regex
@@ -20,12 +21,20 @@ async function performStitching(options) {
20
21
  if (!entityResult.length) {
21
22
  return "abandoned";
22
23
  }
23
- await knex("final_entities").insert({
24
- entity_id: entityResult[0].entity_id,
25
- hash: "",
26
- entity_ref: entityRef,
27
- stitch_ticket: stitchTicket
28
- }).onConflict("entity_id").merge(["stitch_ticket"]);
24
+ try {
25
+ await knex("final_entities").insert({
26
+ entity_id: entityResult[0].entity_id,
27
+ hash: "",
28
+ entity_ref: entityRef,
29
+ stitch_ticket: stitchTicket
30
+ }).onConflict("entity_id").merge(["stitch_ticket"]);
31
+ } catch (error) {
32
+ if (backendPluginApi.isDatabaseConflictError(error)) {
33
+ logger.debug(`Skipping stitching of ${entityRef}, conflict`, error);
34
+ return "abandoned";
35
+ }
36
+ throw error;
37
+ }
29
38
  const [processedResult, relationsResult] = await Promise.all([
30
39
  knex.with("incoming_references", function incomingReferences(builder) {
31
40
  return builder.from("refresh_state_references").where({ target_entity_ref: entityRef }).count({ count: "*" });
@@ -1 +1 @@
1
- {"version":3,"file":"performStitching.cjs.js","sources":["../../../../src/database/operations/stitcher/performStitching.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ENTITY_STATUS_CATALOG_PROCESSING_TYPE } from '@backstage/catalog-client';\nimport {\n ANNOTATION_EDIT_URL,\n ANNOTATION_VIEW_URL,\n EntityRelation,\n} from '@backstage/catalog-model';\nimport { AlphaEntity, EntityStatusItem } from '@backstage/catalog-model/alpha';\nimport { SerializedError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { v4 as uuid } from 'uuid';\nimport { StitchingStrategy } from '../../../stitching/types';\nimport {\n DbFinalEntitiesRow,\n DbRefreshStateRow,\n DbSearchRow,\n} from '../../tables';\nimport { buildEntitySearch } from './buildEntitySearch';\nimport { markDeferredStitchCompleted } from './markDeferredStitchCompleted';\nimport { BATCH_SIZE, generateStableHash } from './util';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// See https://github.com/facebook/react/blob/f0cf832e1d0c8544c36aa8b310960885a11a847c/packages/react-dom-bindings/src/shared/sanitizeURL.js\nconst scriptProtocolPattern =\n // eslint-disable-next-line no-control-regex\n /^[\\u0000-\\u001F ]*j[\\r\\n\\t]*a[\\r\\n\\t]*v[\\r\\n\\t]*a[\\r\\n\\t]*s[\\r\\n\\t]*c[\\r\\n\\t]*r[\\r\\n\\t]*i[\\r\\n\\t]*p[\\r\\n\\t]*t[\\r\\n\\t]*\\:/i;\n\n/**\n * Performs the act of stitching - to take all of the various outputs from the\n * ingestion process, and stitching them together into the final entity JSON\n * shape.\n */\nexport async function performStitching(options: {\n knex: Knex | Knex.Transaction;\n logger: LoggerService;\n strategy: StitchingStrategy;\n entityRef: string;\n stitchTicket?: string;\n}): Promise<'changed' | 'unchanged' | 'abandoned'> {\n const { knex, logger, entityRef } = options;\n const stitchTicket = options.stitchTicket ?? uuid();\n\n // In deferred mode, the entity is removed from the stitch queue on ANY\n // completion, except when an exception is thrown. In the latter case, the\n // entity will be retried at a later time.\n let removeFromStitchQueueOnCompletion = options.strategy.mode === 'deferred';\n\n try {\n const entityResult = await knex<DbRefreshStateRow>('refresh_state')\n .where({ entity_ref: entityRef })\n .limit(1)\n .select('entity_id');\n if (!entityResult.length) {\n // Entity does no exist in refresh state table, no stitching required.\n return 'abandoned';\n }\n\n // Insert stitching ticket that will be compared before inserting the final entity.\n await knex<DbFinalEntitiesRow>('final_entities')\n .insert({\n entity_id: entityResult[0].entity_id,\n hash: '',\n entity_ref: entityRef,\n stitch_ticket: stitchTicket,\n })\n .onConflict('entity_id')\n .merge(['stitch_ticket']);\n\n // Selecting from refresh_state and final_entities should yield exactly\n // one row (except in abnormal cases where the stitch was invoked for\n // something that didn't exist at all, in which case it's zero rows).\n // The join with the temporary incoming_references still gives one row.\n const [processedResult, relationsResult] = await Promise.all([\n knex\n .with('incoming_references', function incomingReferences(builder) {\n return builder\n .from('refresh_state_references')\n .where({ target_entity_ref: entityRef })\n .count({ count: '*' });\n })\n .select({\n entityId: 'refresh_state.entity_id',\n processedEntity: 'refresh_state.processed_entity',\n errors: 'refresh_state.errors',\n incomingReferenceCount: 'incoming_references.count',\n previousHash: 'final_entities.hash',\n })\n .from('refresh_state')\n .where({ 'refresh_state.entity_ref': entityRef })\n .crossJoin(knex.raw('incoming_references'))\n .leftOuterJoin('final_entities', {\n 'final_entities.entity_id': 'refresh_state.entity_id',\n }),\n knex\n .distinct({\n relationType: 'type',\n relationTarget: 'target_entity_ref',\n })\n .from('relations')\n .where({ source_entity_ref: entityRef })\n .orderBy('relationType', 'asc')\n .orderBy('relationTarget', 'asc'),\n ]);\n\n // If there were no rows returned, it would mean that there was no\n // matching row even in the refresh_state. This can happen for example\n // if we emit a relation to something that hasn't been ingested yet.\n // It's safe to ignore this stitch attempt in that case.\n if (!processedResult.length) {\n logger.debug(\n `Unable to stitch ${entityRef}, item does not exist in refresh state table`,\n );\n return 'abandoned';\n }\n\n const {\n entityId,\n processedEntity,\n errors,\n incomingReferenceCount,\n previousHash,\n } = processedResult[0];\n\n // If there was no processed entity in place, the target hasn't been\n // through the processing steps yet. It's safe to ignore this stitch\n // attempt in that case, since another stitch will be triggered when\n // that processing has finished.\n if (!processedEntity) {\n logger.debug(\n `Unable to stitch ${entityRef}, the entity has not yet been processed`,\n );\n return 'abandoned';\n }\n\n // Grab the processed entity and stitch all of the relevant data into\n // it\n const entity = JSON.parse(processedEntity) as AlphaEntity;\n const isOrphan = Number(incomingReferenceCount) === 0;\n let statusItems: EntityStatusItem[] = [];\n\n if (isOrphan) {\n logger.debug(`${entityRef} is an orphan`);\n entity.metadata.annotations = {\n ...entity.metadata.annotations,\n ['backstage.io/orphan']: 'true',\n };\n }\n if (errors) {\n const parsedErrors = JSON.parse(errors) as SerializedError[];\n if (Array.isArray(parsedErrors) && parsedErrors.length) {\n statusItems = parsedErrors.map(e => ({\n type: ENTITY_STATUS_CATALOG_PROCESSING_TYPE,\n level: 'error',\n message: `${e.name}: ${e.message}`,\n error: e,\n }));\n }\n }\n // We opt to do this check here as we otherwise can't guarantee that it will be run after all processors\n for (const annotation of [ANNOTATION_VIEW_URL, ANNOTATION_EDIT_URL]) {\n const value = entity.metadata.annotations?.[annotation];\n if (typeof value === 'string' && scriptProtocolPattern.test(value)) {\n entity.metadata.annotations![annotation] =\n 'https://backstage.io/annotation-rejected-for-security-reasons';\n }\n }\n\n // TODO: entityRef is lower case and should be uppercase in the final\n // result\n entity.relations = relationsResult\n .filter(row => row.relationType /* exclude null row, if relevant */)\n .map<EntityRelation>(row => ({\n type: row.relationType!,\n targetRef: row.relationTarget!,\n }));\n if (statusItems.length) {\n entity.status = {\n ...entity.status,\n items: [...(entity.status?.items ?? []), ...statusItems],\n };\n }\n\n // If the output entity was actually not changed, just abort\n const hash = generateStableHash(entity);\n if (hash === previousHash) {\n logger.debug(`Skipped stitching of ${entityRef}, no changes`);\n return 'unchanged';\n }\n\n entity.metadata.uid = entityId;\n if (!entity.metadata.etag) {\n // If the original data source did not have its own etag handling,\n // use the hash as a good-quality etag\n entity.metadata.etag = hash;\n }\n\n // This may throw if the entity is invalid, so we call it before\n // the final_entities write, even though we may end up not needing\n // to write the search index.\n const searchEntries = buildEntitySearch(entityId, entity);\n\n const amountOfRowsChanged = await knex<DbFinalEntitiesRow>('final_entities')\n .update({\n final_entity: JSON.stringify(entity),\n hash,\n last_updated_at: knex.fn.now(),\n })\n .where('entity_id', entityId)\n .where('stitch_ticket', stitchTicket);\n\n if (amountOfRowsChanged === 0) {\n logger.debug(`Entity ${entityRef} is already stitched, skipping write.`);\n return 'abandoned';\n }\n\n await knex.transaction(async trx => {\n await trx<DbSearchRow>('search').where({ entity_id: entityId }).delete();\n await trx.batchInsert('search', searchEntries, BATCH_SIZE);\n });\n\n return 'changed';\n } catch (error) {\n removeFromStitchQueueOnCompletion = false;\n throw error;\n } finally {\n if (removeFromStitchQueueOnCompletion) {\n await markDeferredStitchCompleted({\n knex: knex,\n entityRef,\n stitchTicket,\n });\n }\n }\n}\n"],"names":["uuid","ENTITY_STATUS_CATALOG_PROCESSING_TYPE","ANNOTATION_VIEW_URL","ANNOTATION_EDIT_URL","generateStableHash","buildEntitySearch","BATCH_SIZE","markDeferredStitchCompleted"],"mappings":";;;;;;;;;AAsCA,MAAM,qBAAA;AAAA;AAAA,EAEJ;AAAA,CAAA;AAOF,eAAsB,iBAAiB,OAMY,EAAA;AACjD,EAAA,MAAM,EAAE,IAAA,EAAM,MAAQ,EAAA,SAAA,EAAc,GAAA,OAAA;AACpC,EAAM,MAAA,YAAA,GAAe,OAAQ,CAAA,YAAA,IAAgBA,OAAK,EAAA;AAKlD,EAAI,IAAA,iCAAA,GAAoC,OAAQ,CAAA,QAAA,CAAS,IAAS,KAAA,UAAA;AAElE,EAAI,IAAA;AACF,IAAA,MAAM,YAAe,GAAA,MAAM,IAAwB,CAAA,eAAe,EAC/D,KAAM,CAAA,EAAE,UAAY,EAAA,SAAA,EAAW,CAC/B,CAAA,KAAA,CAAM,CAAC,CAAA,CACP,OAAO,WAAW,CAAA;AACrB,IAAI,IAAA,CAAC,aAAa,MAAQ,EAAA;AAExB,MAAO,OAAA,WAAA;AAAA;AAIT,IAAM,MAAA,IAAA,CAAyB,gBAAgB,CAAA,CAC5C,MAAO,CAAA;AAAA,MACN,SAAA,EAAW,YAAa,CAAA,CAAC,CAAE,CAAA,SAAA;AAAA,MAC3B,IAAM,EAAA,EAAA;AAAA,MACN,UAAY,EAAA,SAAA;AAAA,MACZ,aAAe,EAAA;AAAA,KAChB,EACA,UAAW,CAAA,WAAW,EACtB,KAAM,CAAA,CAAC,eAAe,CAAC,CAAA;AAM1B,IAAA,MAAM,CAAC,eAAiB,EAAA,eAAe,CAAI,GAAA,MAAM,QAAQ,GAAI,CAAA;AAAA,MAC3D,IACG,CAAA,IAAA,CAAK,qBAAuB,EAAA,SAAS,mBAAmB,OAAS,EAAA;AAChE,QAAA,OAAO,OACJ,CAAA,IAAA,CAAK,0BAA0B,CAAA,CAC/B,MAAM,EAAE,iBAAA,EAAmB,SAAU,EAAC,CACtC,CAAA,KAAA,CAAM,EAAE,KAAA,EAAO,KAAK,CAAA;AAAA,OACxB,EACA,MAAO,CAAA;AAAA,QACN,QAAU,EAAA,yBAAA;AAAA,QACV,eAAiB,EAAA,gCAAA;AAAA,QACjB,MAAQ,EAAA,sBAAA;AAAA,QACR,sBAAwB,EAAA,2BAAA;AAAA,QACxB,YAAc,EAAA;AAAA,OACf,CACA,CAAA,IAAA,CAAK,eAAe,CACpB,CAAA,KAAA,CAAM,EAAE,0BAA4B,EAAA,SAAA,EAAW,CAAA,CAC/C,UAAU,IAAK,CAAA,GAAA,CAAI,qBAAqB,CAAC,CAAA,CACzC,cAAc,gBAAkB,EAAA;AAAA,QAC/B,0BAA4B,EAAA;AAAA,OAC7B,CAAA;AAAA,MACH,KACG,QAAS,CAAA;AAAA,QACR,YAAc,EAAA,MAAA;AAAA,QACd,cAAgB,EAAA;AAAA,OACjB,CACA,CAAA,IAAA,CAAK,WAAW,CAAA,CAChB,MAAM,EAAE,iBAAA,EAAmB,SAAU,EAAC,EACtC,OAAQ,CAAA,cAAA,EAAgB,KAAK,CAC7B,CAAA,OAAA,CAAQ,kBAAkB,KAAK;AAAA,KACnC,CAAA;AAMD,IAAI,IAAA,CAAC,gBAAgB,MAAQ,EAAA;AAC3B,MAAO,MAAA,CAAA,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,4CAAA;AAAA,OAC/B;AACA,MAAO,OAAA,WAAA;AAAA;AAGT,IAAM,MAAA;AAAA,MACJ,QAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA,sBAAA;AAAA,MACA;AAAA,KACF,GAAI,gBAAgB,CAAC,CAAA;AAMrB,IAAA,IAAI,CAAC,eAAiB,EAAA;AACpB,MAAO,MAAA,CAAA,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,uCAAA;AAAA,OAC/B;AACA,MAAO,OAAA,WAAA;AAAA;AAKT,IAAM,MAAA,MAAA,GAAS,IAAK,CAAA,KAAA,CAAM,eAAe,CAAA;AACzC,IAAM,MAAA,QAAA,GAAW,MAAO,CAAA,sBAAsB,CAAM,KAAA,CAAA;AACpD,IAAA,IAAI,cAAkC,EAAC;AAEvC,IAAA,IAAI,QAAU,EAAA;AACZ,MAAO,MAAA,CAAA,KAAA,CAAM,CAAG,EAAA,SAAS,CAAe,aAAA,CAAA,CAAA;AACxC,MAAA,MAAA,CAAO,SAAS,WAAc,GAAA;AAAA,QAC5B,GAAG,OAAO,QAAS,CAAA,WAAA;AAAA,QACnB,CAAC,qBAAqB,GAAG;AAAA,OAC3B;AAAA;AAEF,IAAA,IAAI,MAAQ,EAAA;AACV,MAAM,MAAA,YAAA,GAAe,IAAK,CAAA,KAAA,CAAM,MAAM,CAAA;AACtC,MAAA,IAAI,KAAM,CAAA,OAAA,CAAQ,YAAY,CAAA,IAAK,aAAa,MAAQ,EAAA;AACtD,QAAc,WAAA,GAAA,YAAA,CAAa,IAAI,CAAM,CAAA,MAAA;AAAA,UACnC,IAAM,EAAAC,mDAAA;AAAA,UACN,KAAO,EAAA,OAAA;AAAA,UACP,SAAS,CAAG,EAAA,CAAA,CAAE,IAAI,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,UAChC,KAAO,EAAA;AAAA,SACP,CAAA,CAAA;AAAA;AACJ;AAGF,IAAA,KAAA,MAAW,UAAc,IAAA,CAACC,gCAAqB,EAAAC,gCAAmB,CAAG,EAAA;AACnE,MAAA,MAAM,KAAQ,GAAA,MAAA,CAAO,QAAS,CAAA,WAAA,GAAc,UAAU,CAAA;AACtD,MAAA,IAAI,OAAO,KAAU,KAAA,QAAA,IAAY,qBAAsB,CAAA,IAAA,CAAK,KAAK,CAAG,EAAA;AAClE,QAAO,MAAA,CAAA,QAAA,CAAS,WAAa,CAAA,UAAU,CACrC,GAAA,+DAAA;AAAA;AACJ;AAKF,IAAA,MAAA,CAAO,YAAY,eAChB,CAAA,MAAA;AAAA,MAAO,SAAO,GAAI,CAAA;AAAA;AAAA,KAAgD,CAClE,IAAoB,CAAQ,GAAA,MAAA;AAAA,MAC3B,MAAM,GAAI,CAAA,YAAA;AAAA,MACV,WAAW,GAAI,CAAA;AAAA,KACf,CAAA,CAAA;AACJ,IAAA,IAAI,YAAY,MAAQ,EAAA;AACtB,MAAA,MAAA,CAAO,MAAS,GAAA;AAAA,QACd,GAAG,MAAO,CAAA,MAAA;AAAA,QACV,KAAA,EAAO,CAAC,GAAI,MAAA,CAAO,QAAQ,KAAS,IAAA,EAAK,EAAA,GAAG,WAAW;AAAA,OACzD;AAAA;AAIF,IAAM,MAAA,IAAA,GAAOC,wBAAmB,MAAM,CAAA;AACtC,IAAA,IAAI,SAAS,YAAc,EAAA;AACzB,MAAO,MAAA,CAAA,KAAA,CAAM,CAAwB,qBAAA,EAAA,SAAS,CAAc,YAAA,CAAA,CAAA;AAC5D,MAAO,OAAA,WAAA;AAAA;AAGT,IAAA,MAAA,CAAO,SAAS,GAAM,GAAA,QAAA;AACtB,IAAI,IAAA,CAAC,MAAO,CAAA,QAAA,CAAS,IAAM,EAAA;AAGzB,MAAA,MAAA,CAAO,SAAS,IAAO,GAAA,IAAA;AAAA;AAMzB,IAAM,MAAA,aAAA,GAAgBC,mCAAkB,CAAA,QAAA,EAAU,MAAM,CAAA;AAExD,IAAA,MAAM,mBAAsB,GAAA,MAAM,IAAyB,CAAA,gBAAgB,EACxE,MAAO,CAAA;AAAA,MACN,YAAA,EAAc,IAAK,CAAA,SAAA,CAAU,MAAM,CAAA;AAAA,MACnC,IAAA;AAAA,MACA,eAAA,EAAiB,IAAK,CAAA,EAAA,CAAG,GAAI;AAAA,KAC9B,EACA,KAAM,CAAA,WAAA,EAAa,QAAQ,CAC3B,CAAA,KAAA,CAAM,iBAAiB,YAAY,CAAA;AAEtC,IAAA,IAAI,wBAAwB,CAAG,EAAA;AAC7B,MAAO,MAAA,CAAA,KAAA,CAAM,CAAU,OAAA,EAAA,SAAS,CAAuC,qCAAA,CAAA,CAAA;AACvE,MAAO,OAAA,WAAA;AAAA;AAGT,IAAM,MAAA,IAAA,CAAK,WAAY,CAAA,OAAM,GAAO,KAAA;AAClC,MAAM,MAAA,GAAA,CAAiB,QAAQ,CAAE,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,QAAA,EAAU,CAAA,CAAE,MAAO,EAAA;AACvE,MAAA,MAAM,GAAI,CAAA,WAAA,CAAY,QAAU,EAAA,aAAA,EAAeC,eAAU,CAAA;AAAA,KAC1D,CAAA;AAED,IAAO,OAAA,SAAA;AAAA,WACA,KAAO,EAAA;AACd,IAAoC,iCAAA,GAAA,KAAA;AACpC,IAAM,MAAA,KAAA;AAAA,GACN,SAAA;AACA,IAAA,IAAI,iCAAmC,EAAA;AACrC,MAAA,MAAMC,uDAA4B,CAAA;AAAA,QAChC,IAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA;AACH;AAEJ;;;;"}
1
+ {"version":3,"file":"performStitching.cjs.js","sources":["../../../../src/database/operations/stitcher/performStitching.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ENTITY_STATUS_CATALOG_PROCESSING_TYPE } from '@backstage/catalog-client';\nimport {\n ANNOTATION_EDIT_URL,\n ANNOTATION_VIEW_URL,\n EntityRelation,\n} from '@backstage/catalog-model';\nimport { AlphaEntity, EntityStatusItem } from '@backstage/catalog-model/alpha';\nimport { SerializedError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { v4 as uuid } from 'uuid';\nimport { StitchingStrategy } from '../../../stitching/types';\nimport {\n DbFinalEntitiesRow,\n DbRefreshStateRow,\n DbSearchRow,\n} from '../../tables';\nimport { buildEntitySearch } from './buildEntitySearch';\nimport { markDeferredStitchCompleted } from './markDeferredStitchCompleted';\nimport { BATCH_SIZE, generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// See https://github.com/facebook/react/blob/f0cf832e1d0c8544c36aa8b310960885a11a847c/packages/react-dom-bindings/src/shared/sanitizeURL.js\nconst scriptProtocolPattern =\n // eslint-disable-next-line no-control-regex\n /^[\\u0000-\\u001F ]*j[\\r\\n\\t]*a[\\r\\n\\t]*v[\\r\\n\\t]*a[\\r\\n\\t]*s[\\r\\n\\t]*c[\\r\\n\\t]*r[\\r\\n\\t]*i[\\r\\n\\t]*p[\\r\\n\\t]*t[\\r\\n\\t]*\\:/i;\n\n/**\n * Performs the act of stitching - to take all of the various outputs from the\n * ingestion process, and stitching them together into the final entity JSON\n * shape.\n */\nexport async function performStitching(options: {\n knex: Knex | Knex.Transaction;\n logger: LoggerService;\n strategy: StitchingStrategy;\n entityRef: string;\n stitchTicket?: string;\n}): Promise<'changed' | 'unchanged' | 'abandoned'> {\n const { knex, logger, entityRef } = options;\n const stitchTicket = options.stitchTicket ?? uuid();\n\n // In deferred mode, the entity is removed from the stitch queue on ANY\n // completion, except when an exception is thrown. In the latter case, the\n // entity will be retried at a later time.\n let removeFromStitchQueueOnCompletion = options.strategy.mode === 'deferred';\n\n try {\n const entityResult = await knex<DbRefreshStateRow>('refresh_state')\n .where({ entity_ref: entityRef })\n .limit(1)\n .select('entity_id');\n if (!entityResult.length) {\n // Entity does no exist in refresh state table, no stitching required.\n return 'abandoned';\n }\n\n // Insert stitching ticket that will be compared before inserting the final entity.\n try {\n await knex<DbFinalEntitiesRow>('final_entities')\n .insert({\n entity_id: entityResult[0].entity_id,\n hash: '',\n entity_ref: entityRef,\n stitch_ticket: stitchTicket,\n })\n .onConflict('entity_id')\n .merge(['stitch_ticket']);\n } catch (error) {\n // It's possible to hit a race where a refresh_state table delete + insert\n // is done just after we read the entity_id from it. This conflict is safe\n // to ignore because the current stitching operation will be triggered by\n // the old entry, and the new entry will trigger it's own stitching that\n // will update the entity.\n if (isDatabaseConflictError(error)) {\n logger.debug(`Skipping stitching of ${entityRef}, conflict`, error);\n return 'abandoned';\n }\n\n throw error;\n }\n\n // Selecting from refresh_state and final_entities should yield exactly\n // one row (except in abnormal cases where the stitch was invoked for\n // something that didn't exist at all, in which case it's zero rows).\n // The join with the temporary incoming_references still gives one row.\n const [processedResult, relationsResult] = await Promise.all([\n knex\n .with('incoming_references', function incomingReferences(builder) {\n return builder\n .from('refresh_state_references')\n .where({ target_entity_ref: entityRef })\n .count({ count: '*' });\n })\n .select({\n entityId: 'refresh_state.entity_id',\n processedEntity: 'refresh_state.processed_entity',\n errors: 'refresh_state.errors',\n incomingReferenceCount: 'incoming_references.count',\n previousHash: 'final_entities.hash',\n })\n .from('refresh_state')\n .where({ 'refresh_state.entity_ref': entityRef })\n .crossJoin(knex.raw('incoming_references'))\n .leftOuterJoin('final_entities', {\n 'final_entities.entity_id': 'refresh_state.entity_id',\n }),\n knex\n .distinct({\n relationType: 'type',\n relationTarget: 'target_entity_ref',\n })\n .from('relations')\n .where({ source_entity_ref: entityRef })\n .orderBy('relationType', 'asc')\n .orderBy('relationTarget', 'asc'),\n ]);\n\n // If there were no rows returned, it would mean that there was no\n // matching row even in the refresh_state. This can happen for example\n // if we emit a relation to something that hasn't been ingested yet.\n // It's safe to ignore this stitch attempt in that case.\n if (!processedResult.length) {\n logger.debug(\n `Unable to stitch ${entityRef}, item does not exist in refresh state table`,\n );\n return 'abandoned';\n }\n\n const {\n entityId,\n processedEntity,\n errors,\n incomingReferenceCount,\n previousHash,\n } = processedResult[0];\n\n // If there was no processed entity in place, the target hasn't been\n // through the processing steps yet. It's safe to ignore this stitch\n // attempt in that case, since another stitch will be triggered when\n // that processing has finished.\n if (!processedEntity) {\n logger.debug(\n `Unable to stitch ${entityRef}, the entity has not yet been processed`,\n );\n return 'abandoned';\n }\n\n // Grab the processed entity and stitch all of the relevant data into\n // it\n const entity = JSON.parse(processedEntity) as AlphaEntity;\n const isOrphan = Number(incomingReferenceCount) === 0;\n let statusItems: EntityStatusItem[] = [];\n\n if (isOrphan) {\n logger.debug(`${entityRef} is an orphan`);\n entity.metadata.annotations = {\n ...entity.metadata.annotations,\n ['backstage.io/orphan']: 'true',\n };\n }\n if (errors) {\n const parsedErrors = JSON.parse(errors) as SerializedError[];\n if (Array.isArray(parsedErrors) && parsedErrors.length) {\n statusItems = parsedErrors.map(e => ({\n type: ENTITY_STATUS_CATALOG_PROCESSING_TYPE,\n level: 'error',\n message: `${e.name}: ${e.message}`,\n error: e,\n }));\n }\n }\n // We opt to do this check here as we otherwise can't guarantee that it will be run after all processors\n for (const annotation of [ANNOTATION_VIEW_URL, ANNOTATION_EDIT_URL]) {\n const value = entity.metadata.annotations?.[annotation];\n if (typeof value === 'string' && scriptProtocolPattern.test(value)) {\n entity.metadata.annotations![annotation] =\n 'https://backstage.io/annotation-rejected-for-security-reasons';\n }\n }\n\n // TODO: entityRef is lower case and should be uppercase in the final\n // result\n entity.relations = relationsResult\n .filter(row => row.relationType /* exclude null row, if relevant */)\n .map<EntityRelation>(row => ({\n type: row.relationType!,\n targetRef: row.relationTarget!,\n }));\n if (statusItems.length) {\n entity.status = {\n ...entity.status,\n items: [...(entity.status?.items ?? []), ...statusItems],\n };\n }\n\n // If the output entity was actually not changed, just abort\n const hash = generateStableHash(entity);\n if (hash === previousHash) {\n logger.debug(`Skipped stitching of ${entityRef}, no changes`);\n return 'unchanged';\n }\n\n entity.metadata.uid = entityId;\n if (!entity.metadata.etag) {\n // If the original data source did not have its own etag handling,\n // use the hash as a good-quality etag\n entity.metadata.etag = hash;\n }\n\n // This may throw if the entity is invalid, so we call it before\n // the final_entities write, even though we may end up not needing\n // to write the search index.\n const searchEntries = buildEntitySearch(entityId, entity);\n\n const amountOfRowsChanged = await knex<DbFinalEntitiesRow>('final_entities')\n .update({\n final_entity: JSON.stringify(entity),\n hash,\n last_updated_at: knex.fn.now(),\n })\n .where('entity_id', entityId)\n .where('stitch_ticket', stitchTicket);\n\n if (amountOfRowsChanged === 0) {\n logger.debug(`Entity ${entityRef} is already stitched, skipping write.`);\n return 'abandoned';\n }\n\n await knex.transaction(async trx => {\n await trx<DbSearchRow>('search').where({ entity_id: entityId }).delete();\n await trx.batchInsert('search', searchEntries, BATCH_SIZE);\n });\n\n return 'changed';\n } catch (error) {\n removeFromStitchQueueOnCompletion = false;\n throw error;\n } finally {\n if (removeFromStitchQueueOnCompletion) {\n await markDeferredStitchCompleted({\n knex: knex,\n entityRef,\n stitchTicket,\n });\n }\n }\n}\n"],"names":["uuid","isDatabaseConflictError","ENTITY_STATUS_CATALOG_PROCESSING_TYPE","ANNOTATION_VIEW_URL","ANNOTATION_EDIT_URL","generateStableHash","buildEntitySearch","BATCH_SIZE","markDeferredStitchCompleted"],"mappings":";;;;;;;;;;AAyCA,MAAM,qBAAA;AAAA;AAAA,EAEJ;AAAA,CAAA;AAOF,eAAsB,iBAAiB,OAMY,EAAA;AACjD,EAAA,MAAM,EAAE,IAAA,EAAM,MAAQ,EAAA,SAAA,EAAc,GAAA,OAAA;AACpC,EAAM,MAAA,YAAA,GAAe,OAAQ,CAAA,YAAA,IAAgBA,OAAK,EAAA;AAKlD,EAAI,IAAA,iCAAA,GAAoC,OAAQ,CAAA,QAAA,CAAS,IAAS,KAAA,UAAA;AAElE,EAAI,IAAA;AACF,IAAA,MAAM,YAAe,GAAA,MAAM,IAAwB,CAAA,eAAe,EAC/D,KAAM,CAAA,EAAE,UAAY,EAAA,SAAA,EAAW,CAC/B,CAAA,KAAA,CAAM,CAAC,CAAA,CACP,OAAO,WAAW,CAAA;AACrB,IAAI,IAAA,CAAC,aAAa,MAAQ,EAAA;AAExB,MAAO,OAAA,WAAA;AAAA;AAIT,IAAI,IAAA;AACF,MAAM,MAAA,IAAA,CAAyB,gBAAgB,CAAA,CAC5C,MAAO,CAAA;AAAA,QACN,SAAA,EAAW,YAAa,CAAA,CAAC,CAAE,CAAA,SAAA;AAAA,QAC3B,IAAM,EAAA,EAAA;AAAA,QACN,UAAY,EAAA,SAAA;AAAA,QACZ,aAAe,EAAA;AAAA,OAChB,EACA,UAAW,CAAA,WAAW,EACtB,KAAM,CAAA,CAAC,eAAe,CAAC,CAAA;AAAA,aACnB,KAAO,EAAA;AAMd,MAAI,IAAAC,wCAAA,CAAwB,KAAK,CAAG,EAAA;AAClC,QAAA,MAAA,CAAO,KAAM,CAAA,CAAA,sBAAA,EAAyB,SAAS,CAAA,UAAA,CAAA,EAAc,KAAK,CAAA;AAClE,QAAO,OAAA,WAAA;AAAA;AAGT,MAAM,MAAA,KAAA;AAAA;AAOR,IAAA,MAAM,CAAC,eAAiB,EAAA,eAAe,CAAI,GAAA,MAAM,QAAQ,GAAI,CAAA;AAAA,MAC3D,IACG,CAAA,IAAA,CAAK,qBAAuB,EAAA,SAAS,mBAAmB,OAAS,EAAA;AAChE,QAAA,OAAO,OACJ,CAAA,IAAA,CAAK,0BAA0B,CAAA,CAC/B,MAAM,EAAE,iBAAA,EAAmB,SAAU,EAAC,CACtC,CAAA,KAAA,CAAM,EAAE,KAAA,EAAO,KAAK,CAAA;AAAA,OACxB,EACA,MAAO,CAAA;AAAA,QACN,QAAU,EAAA,yBAAA;AAAA,QACV,eAAiB,EAAA,gCAAA;AAAA,QACjB,MAAQ,EAAA,sBAAA;AAAA,QACR,sBAAwB,EAAA,2BAAA;AAAA,QACxB,YAAc,EAAA;AAAA,OACf,CACA,CAAA,IAAA,CAAK,eAAe,CACpB,CAAA,KAAA,CAAM,EAAE,0BAA4B,EAAA,SAAA,EAAW,CAAA,CAC/C,UAAU,IAAK,CAAA,GAAA,CAAI,qBAAqB,CAAC,CAAA,CACzC,cAAc,gBAAkB,EAAA;AAAA,QAC/B,0BAA4B,EAAA;AAAA,OAC7B,CAAA;AAAA,MACH,KACG,QAAS,CAAA;AAAA,QACR,YAAc,EAAA,MAAA;AAAA,QACd,cAAgB,EAAA;AAAA,OACjB,CACA,CAAA,IAAA,CAAK,WAAW,CAAA,CAChB,MAAM,EAAE,iBAAA,EAAmB,SAAU,EAAC,EACtC,OAAQ,CAAA,cAAA,EAAgB,KAAK,CAC7B,CAAA,OAAA,CAAQ,kBAAkB,KAAK;AAAA,KACnC,CAAA;AAMD,IAAI,IAAA,CAAC,gBAAgB,MAAQ,EAAA;AAC3B,MAAO,MAAA,CAAA,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,4CAAA;AAAA,OAC/B;AACA,MAAO,OAAA,WAAA;AAAA;AAGT,IAAM,MAAA;AAAA,MACJ,QAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA,sBAAA;AAAA,MACA;AAAA,KACF,GAAI,gBAAgB,CAAC,CAAA;AAMrB,IAAA,IAAI,CAAC,eAAiB,EAAA;AACpB,MAAO,MAAA,CAAA,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,uCAAA;AAAA,OAC/B;AACA,MAAO,OAAA,WAAA;AAAA;AAKT,IAAM,MAAA,MAAA,GAAS,IAAK,CAAA,KAAA,CAAM,eAAe,CAAA;AACzC,IAAM,MAAA,QAAA,GAAW,MAAO,CAAA,sBAAsB,CAAM,KAAA,CAAA;AACpD,IAAA,IAAI,cAAkC,EAAC;AAEvC,IAAA,IAAI,QAAU,EAAA;AACZ,MAAO,MAAA,CAAA,KAAA,CAAM,CAAG,EAAA,SAAS,CAAe,aAAA,CAAA,CAAA;AACxC,MAAA,MAAA,CAAO,SAAS,WAAc,GAAA;AAAA,QAC5B,GAAG,OAAO,QAAS,CAAA,WAAA;AAAA,QACnB,CAAC,qBAAqB,GAAG;AAAA,OAC3B;AAAA;AAEF,IAAA,IAAI,MAAQ,EAAA;AACV,MAAM,MAAA,YAAA,GAAe,IAAK,CAAA,KAAA,CAAM,MAAM,CAAA;AACtC,MAAA,IAAI,KAAM,CAAA,OAAA,CAAQ,YAAY,CAAA,IAAK,aAAa,MAAQ,EAAA;AACtD,QAAc,WAAA,GAAA,YAAA,CAAa,IAAI,CAAM,CAAA,MAAA;AAAA,UACnC,IAAM,EAAAC,mDAAA;AAAA,UACN,KAAO,EAAA,OAAA;AAAA,UACP,SAAS,CAAG,EAAA,CAAA,CAAE,IAAI,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,UAChC,KAAO,EAAA;AAAA,SACP,CAAA,CAAA;AAAA;AACJ;AAGF,IAAA,KAAA,MAAW,UAAc,IAAA,CAACC,gCAAqB,EAAAC,gCAAmB,CAAG,EAAA;AACnE,MAAA,MAAM,KAAQ,GAAA,MAAA,CAAO,QAAS,CAAA,WAAA,GAAc,UAAU,CAAA;AACtD,MAAA,IAAI,OAAO,KAAU,KAAA,QAAA,IAAY,qBAAsB,CAAA,IAAA,CAAK,KAAK,CAAG,EAAA;AAClE,QAAO,MAAA,CAAA,QAAA,CAAS,WAAa,CAAA,UAAU,CACrC,GAAA,+DAAA;AAAA;AACJ;AAKF,IAAA,MAAA,CAAO,YAAY,eAChB,CAAA,MAAA;AAAA,MAAO,SAAO,GAAI,CAAA;AAAA;AAAA,KAAgD,CAClE,IAAoB,CAAQ,GAAA,MAAA;AAAA,MAC3B,MAAM,GAAI,CAAA,YAAA;AAAA,MACV,WAAW,GAAI,CAAA;AAAA,KACf,CAAA,CAAA;AACJ,IAAA,IAAI,YAAY,MAAQ,EAAA;AACtB,MAAA,MAAA,CAAO,MAAS,GAAA;AAAA,QACd,GAAG,MAAO,CAAA,MAAA;AAAA,QACV,KAAA,EAAO,CAAC,GAAI,MAAA,CAAO,QAAQ,KAAS,IAAA,EAAK,EAAA,GAAG,WAAW;AAAA,OACzD;AAAA;AAIF,IAAM,MAAA,IAAA,GAAOC,wBAAmB,MAAM,CAAA;AACtC,IAAA,IAAI,SAAS,YAAc,EAAA;AACzB,MAAO,MAAA,CAAA,KAAA,CAAM,CAAwB,qBAAA,EAAA,SAAS,CAAc,YAAA,CAAA,CAAA;AAC5D,MAAO,OAAA,WAAA;AAAA;AAGT,IAAA,MAAA,CAAO,SAAS,GAAM,GAAA,QAAA;AACtB,IAAI,IAAA,CAAC,MAAO,CAAA,QAAA,CAAS,IAAM,EAAA;AAGzB,MAAA,MAAA,CAAO,SAAS,IAAO,GAAA,IAAA;AAAA;AAMzB,IAAM,MAAA,aAAA,GAAgBC,mCAAkB,CAAA,QAAA,EAAU,MAAM,CAAA;AAExD,IAAA,MAAM,mBAAsB,GAAA,MAAM,IAAyB,CAAA,gBAAgB,EACxE,MAAO,CAAA;AAAA,MACN,YAAA,EAAc,IAAK,CAAA,SAAA,CAAU,MAAM,CAAA;AAAA,MACnC,IAAA;AAAA,MACA,eAAA,EAAiB,IAAK,CAAA,EAAA,CAAG,GAAI;AAAA,KAC9B,EACA,KAAM,CAAA,WAAA,EAAa,QAAQ,CAC3B,CAAA,KAAA,CAAM,iBAAiB,YAAY,CAAA;AAEtC,IAAA,IAAI,wBAAwB,CAAG,EAAA;AAC7B,MAAO,MAAA,CAAA,KAAA,CAAM,CAAU,OAAA,EAAA,SAAS,CAAuC,qCAAA,CAAA,CAAA;AACvE,MAAO,OAAA,WAAA;AAAA;AAGT,IAAM,MAAA,IAAA,CAAK,WAAY,CAAA,OAAM,GAAO,KAAA;AAClC,MAAM,MAAA,GAAA,CAAiB,QAAQ,CAAE,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,QAAA,EAAU,CAAA,CAAE,MAAO,EAAA;AACvE,MAAA,MAAM,GAAI,CAAA,WAAA,CAAY,QAAU,EAAA,aAAA,EAAeC,eAAU,CAAA;AAAA,KAC1D,CAAA;AAED,IAAO,OAAA,SAAA;AAAA,WACA,KAAO,EAAA;AACd,IAAoC,iCAAA,GAAA,KAAA;AACpC,IAAM,MAAA,KAAA;AAAA,GACN,SAAA;AACA,IAAA,IAAI,iCAAmC,EAAA;AACrC,MAAA,MAAMC,uDAA4B,CAAA;AAAA,QAChC,IAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA;AACH;AAEJ;;;;"}
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
3
- import { LoggerService, UrlReaderService, DiscoveryService, DatabaseService, RootConfigService, PermissionsService, SchedulerService, AuthService, HttpAuthService } from '@backstage/backend-plugin-api';
3
+ import { LoggerService, UrlReaderService, DiscoveryService, DatabaseService, RootConfigService, PermissionsService, PermissionsRegistryService, SchedulerService, AuthService, HttpAuthService, AuditorService } from '@backstage/backend-plugin-api';
4
4
  import { Entity, EntityPolicy, Validators } from '@backstage/catalog-model';
5
5
  import { ScmIntegrationRegistry } from '@backstage/integration';
6
6
  import { LocationSpec as LocationSpec$1, CatalogEntityDocument, AnalyzeLocationRequest as AnalyzeLocationRequest$1, AnalyzeLocationResponse as AnalyzeLocationResponse$1, AnalyzeLocationExistingEntity as AnalyzeLocationExistingEntity$1, AnalyzeLocationGenerateEntity as AnalyzeLocationGenerateEntity$1, AnalyzeLocationEntityField as AnalyzeLocationEntityField$1 } from '@backstage/plugin-catalog-common';
@@ -10,8 +10,8 @@ import { TokenManager } from '@backstage/backend-common';
10
10
  import { GetEntitiesRequest, CatalogApi } from '@backstage/catalog-client';
11
11
  import { Permission, PermissionRuleParams, PermissionAuthorizer } from '@backstage/plugin-permission-common';
12
12
  import { Router } from 'express';
13
- import { PermissionRule } from '@backstage/plugin-permission-node';
14
13
  import { EventBroker, EventsService } from '@backstage/plugin-events-node';
14
+ import { PermissionRule } from '@backstage/plugin-permission-node';
15
15
  import { CatalogCollatorEntityTransformer as CatalogCollatorEntityTransformer$1 } from '@backstage/plugin-search-backend-module-catalog';
16
16
  import { DocumentCollatorFactory } from '@backstage/plugin-search-common';
17
17
  import { Readable } from 'stream';
@@ -216,10 +216,12 @@ type CatalogEnvironment = {
216
216
  config: RootConfigService;
217
217
  reader: UrlReaderService;
218
218
  permissions: PermissionsService | PermissionAuthorizer;
219
+ permissionsRegistry?: PermissionsRegistryService;
219
220
  scheduler?: SchedulerService;
220
221
  discovery?: DiscoveryService;
221
222
  auth?: AuthService;
222
223
  httpAuth?: HttpAuthService;
224
+ auditor?: AuditorService;
223
225
  };
224
226
  /**
225
227
  * A builder that helps wire up all of the component parts of the catalog.
@@ -5,6 +5,22 @@ var catalogModel = require('@backstage/catalog-model');
5
5
  var integration = require('@backstage/integration');
6
6
  var crypto = require('crypto');
7
7
  var lodash = require('lodash');
8
+ var config = require('@backstage/config');
9
+ var alpha = require('@backstage/plugin-catalog-common/alpha');
10
+ var pluginPermissionCommon = require('@backstage/plugin-permission-common');
11
+ var pluginPermissionNode = require('@backstage/plugin-permission-node');
12
+ var types = require('@backstage/types');
13
+ var DefaultCatalogDatabase = require('../database/DefaultCatalogDatabase.cjs.js');
14
+ var DefaultProcessingDatabase = require('../database/DefaultProcessingDatabase.cjs.js');
15
+ var DefaultProviderDatabase = require('../database/DefaultProviderDatabase.cjs.js');
16
+ var migrations = require('../database/migrations.cjs.js');
17
+ var CatalogRules = require('../ingestion/CatalogRules.cjs.js');
18
+ var LocationAnalyzer = require('../ingestion/LocationAnalyzer.cjs.js');
19
+ var index = require('../permissions/rules/index.cjs.js');
20
+ var refresh = require('../processing/refresh.cjs.js');
21
+ var connectEntityProviders = require('../processing/connectEntityProviders.cjs.js');
22
+ var DefaultCatalogProcessingEngine = require('../processing/DefaultCatalogProcessingEngine.cjs.js');
23
+ var DefaultCatalogProcessingOrchestrator = require('../processing/DefaultCatalogProcessingOrchestrator.cjs.js');
8
24
  var AnnotateLocationEntityProcessor = require('../processors/AnnotateLocationEntityProcessor.cjs.js');
9
25
  require('git-url-parse');
10
26
  var BuiltinKindsEntityProcessor = require('../processors/BuiltinKindsEntityProcessor.cjs.js');
@@ -16,36 +32,20 @@ var PlaceholderProcessor = require('../processors/PlaceholderProcessor.cjs.js');
16
32
  var UrlReaderProcessor = require('../processors/UrlReaderProcessor.cjs.js');
17
33
  var ConfigLocationEntityProvider = require('../providers/ConfigLocationEntityProvider.cjs.js');
18
34
  var DefaultLocationStore = require('../providers/DefaultLocationStore.cjs.js');
19
- var LocationAnalyzer = require('../ingestion/LocationAnalyzer.cjs.js');
20
- var AuthorizedLocationAnalyzer = require('./AuthorizedLocationAnalyzer.cjs.js');
21
- var parse = require('../util/parse.cjs.js');
22
- var refresh = require('../processing/refresh.cjs.js');
23
- var DefaultProcessingDatabase = require('../database/DefaultProcessingDatabase.cjs.js');
24
- var migrations = require('../database/migrations.cjs.js');
25
- var DefaultCatalogProcessingEngine = require('../processing/DefaultCatalogProcessingEngine.cjs.js');
26
- var DefaultLocationService = require('./DefaultLocationService.cjs.js');
27
- var DefaultEntitiesCatalog = require('./DefaultEntitiesCatalog.cjs.js');
28
- var DefaultCatalogProcessingOrchestrator = require('../processing/DefaultCatalogProcessingOrchestrator.cjs.js');
29
35
  var DefaultStitcher = require('../stitching/DefaultStitcher.cjs.js');
36
+ var parse = require('../util/parse.cjs.js');
37
+ var AuthorizedEntitiesCatalog = require('./AuthorizedEntitiesCatalog.cjs.js');
38
+ var AuthorizedLocationAnalyzer = require('./AuthorizedLocationAnalyzer.cjs.js');
39
+ var AuthorizedLocationService = require('./AuthorizedLocationService.cjs.js');
40
+ var AuthorizedRefreshService = require('./AuthorizedRefreshService.cjs.js');
30
41
  var createRouter = require('./createRouter.cjs.js');
42
+ var DefaultEntitiesCatalog = require('./DefaultEntitiesCatalog.cjs.js');
43
+ var DefaultLocationService = require('./DefaultLocationService.cjs.js');
31
44
  var DefaultRefreshService = require('./DefaultRefreshService.cjs.js');
32
- var AuthorizedRefreshService = require('./AuthorizedRefreshService.cjs.js');
33
- var CatalogRules = require('../ingestion/CatalogRules.cjs.js');
34
- var config = require('@backstage/config');
35
- var connectEntityProviders = require('../processing/connectEntityProviders.cjs.js');
36
- var pluginPermissionCommon = require('@backstage/plugin-permission-common');
37
- var index = require('../permissions/rules/index.cjs.js');
38
- var pluginPermissionNode = require('@backstage/plugin-permission-node');
39
- var AuthorizedEntitiesCatalog = require('./AuthorizedEntitiesCatalog.cjs.js');
40
45
  require('./request/entitiesBatchRequest.cjs.js');
41
46
  var basicEntityFilter = require('./request/basicEntityFilter.cjs.js');
42
47
  require('@backstage/errors');
43
48
  require('./util.cjs.js');
44
- var alpha = require('@backstage/plugin-catalog-common/alpha');
45
- var AuthorizedLocationService = require('./AuthorizedLocationService.cjs.js');
46
- var DefaultProviderDatabase = require('../database/DefaultProviderDatabase.cjs.js');
47
- var DefaultCatalogDatabase = require('../database/DefaultCatalogDatabase.cjs.js');
48
- var types = require('@backstage/types');
49
49
  var process$1 = require('./response/process.cjs.js');
50
50
 
51
51
  function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
@@ -316,7 +316,9 @@ class CatalogBuilder {
316
316
  logger,
317
317
  permissions,
318
318
  scheduler,
319
- discovery = backendCommon.HostDiscovery.fromConfig(config)
319
+ permissionsRegistry,
320
+ discovery = backendCommon.HostDiscovery.fromConfig(config),
321
+ auditor
320
322
  } = this.env;
321
323
  const { auth, httpAuth } = backendCommon.createLegacyAuthAdapters({
322
324
  ...this.env,
@@ -382,7 +384,7 @@ class CatalogBuilder {
382
384
  permissionsService,
383
385
  pluginPermissionNode.createConditionTransformer(this.permissionRules)
384
386
  );
385
- const permissionIntegrationRouter = pluginPermissionNode.createPermissionIntegrationRouter({
387
+ const catalogPermissionResource = {
386
388
  resourceType: alpha.RESOURCE_TYPE_CATALOG_ENTITY,
387
389
  getResources: async (resourceRefs) => {
388
390
  const { entities } = await unauthorizedEntitiesCatalog.entities({
@@ -408,7 +410,15 @@ class CatalogBuilder {
408
410
  },
409
411
  permissions: this.permissions,
410
412
  rules: this.permissionRules
411
- });
413
+ };
414
+ let permissionIntegrationRouter;
415
+ if (permissionsRegistry) {
416
+ permissionsRegistry.addResourceType(catalogPermissionResource);
417
+ } else {
418
+ permissionIntegrationRouter = pluginPermissionNode.createPermissionIntegrationRouter(
419
+ catalogPermissionResource
420
+ );
421
+ }
412
422
  const locationStore = new DefaultLocationStore.DefaultLocationStore(dbClient);
413
423
  const configLocationProvider = new ConfigLocationEntityProvider.ConfigLocationEntityProvider(config);
414
424
  const entityProviders = lodash__default.default.uniqBy(
@@ -456,6 +466,7 @@ class CatalogBuilder {
456
466
  auth,
457
467
  httpAuth,
458
468
  permissionsService,
469
+ auditor,
459
470
  disableRelationsCompatibility
460
471
  });
461
472
  await connectEntityProviders.connectEntityProviders(providerDatabase, entityProviders);