@backstage/plugin-catalog-backend 1.32.2-next.0 → 2.0.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +110 -0
- package/config.d.ts +9 -0
- package/dist/alpha.cjs.js +1 -5
- package/dist/alpha.cjs.js.map +1 -1
- package/dist/alpha.d.ts +1 -5
- package/dist/database/DefaultProcessingDatabase.cjs.js.map +1 -1
- package/dist/index.cjs.js +0 -16
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +21 -607
- package/dist/processing/DefaultCatalogProcessingOrchestrator.cjs.js.map +1 -1
- package/dist/processing/refresh.cjs.js.map +1 -1
- package/dist/service/CatalogBuilder.cjs.js +10 -13
- package/dist/service/CatalogBuilder.cjs.js.map +1 -1
- package/dist/service/CatalogPlugin.cjs.js +0 -3
- package/dist/service/CatalogPlugin.cjs.js.map +1 -1
- package/dist/service/createRouter.cjs.js.map +1 -1
- package/dist/util/conversion.cjs.js.map +1 -1
- package/dist/util/parse.cjs.js +1 -36
- package/dist/util/parse.cjs.js.map +1 -1
- package/migrations/20250401200503_update_refresh_state_columns.js +43 -0
- package/package.json +17 -22
- package/dist/deprecated.cjs.js +0 -152
- package/dist/deprecated.cjs.js.map +0 -1
- package/dist/processors/LocationEntityProcessor.cjs.js +0 -64
- package/dist/processors/LocationEntityProcessor.cjs.js.map +0 -1
- package/dist/search/DefaultCatalogCollator.cjs.js +0 -79
- package/dist/search/DefaultCatalogCollator.cjs.js.map +0 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,115 @@
|
|
|
1
1
|
# @backstage/plugin-catalog-backend
|
|
2
2
|
|
|
3
|
+
## 2.0.0-next.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies
|
|
8
|
+
- @backstage/integration@1.17.0-next.2
|
|
9
|
+
- @backstage/backend-openapi-utils@0.5.3-next.1
|
|
10
|
+
- @backstage/config@1.3.2
|
|
11
|
+
- @backstage/plugin-permission-node@0.10.0-next.1
|
|
12
|
+
- @backstage/backend-plugin-api@1.3.1-next.1
|
|
13
|
+
- @backstage/catalog-client@1.10.0-next.0
|
|
14
|
+
- @backstage/catalog-model@1.7.3
|
|
15
|
+
- @backstage/errors@1.2.7
|
|
16
|
+
- @backstage/types@1.2.1
|
|
17
|
+
- @backstage/plugin-catalog-common@1.1.4-next.0
|
|
18
|
+
- @backstage/plugin-catalog-node@1.17.0-next.1
|
|
19
|
+
- @backstage/plugin-events-node@0.4.11-next.1
|
|
20
|
+
- @backstage/plugin-permission-common@0.9.0-next.0
|
|
21
|
+
|
|
22
|
+
## 2.0.0-next.1
|
|
23
|
+
|
|
24
|
+
### Major Changes
|
|
25
|
+
|
|
26
|
+
- 90ab044: **BREAKING**: Removed all deprecated exports, and removed support for the old backend system.
|
|
27
|
+
|
|
28
|
+
It also removes the `CodeOwnersProcessor` from the default set of processors, because it is expensive to run and has vague semantics. You need to update your backend to add it to the `catalogProcessingExtensionPoint` if you wish to continue using it.
|
|
29
|
+
|
|
30
|
+
The following removed exports are available from `@backstage/plugin-catalog-node`:
|
|
31
|
+
|
|
32
|
+
- `locationSpecToMetadataName`
|
|
33
|
+
- `locationSpecToLocationEntity`
|
|
34
|
+
- `processingResult`
|
|
35
|
+
- `EntitiesSearchFilter`
|
|
36
|
+
- `EntityFilter`
|
|
37
|
+
- `DeferredEntity`
|
|
38
|
+
- `EntityRelationSpec`
|
|
39
|
+
- `CatalogProcessor`
|
|
40
|
+
- `CatalogProcessorParser`
|
|
41
|
+
- `CatalogProcessorCache`
|
|
42
|
+
- `CatalogProcessorEmit`
|
|
43
|
+
- `CatalogProcessorLocationResult`
|
|
44
|
+
- `CatalogProcessorEntityResult`
|
|
45
|
+
- `CatalogProcessorRelationResult`
|
|
46
|
+
- `CatalogProcessorErrorResult`
|
|
47
|
+
- `CatalogProcessorRefreshKeysResult`
|
|
48
|
+
- `CatalogProcessorResult`
|
|
49
|
+
- `EntityProvider`
|
|
50
|
+
- `EntityProviderConnection`
|
|
51
|
+
- `EntityProviderMutation`
|
|
52
|
+
- `AnalyzeOptions`
|
|
53
|
+
- `LocationAnalyzer`
|
|
54
|
+
- `ScmLocationAnalyzer`
|
|
55
|
+
- `PlaceholderResolver`
|
|
56
|
+
- `PlaceholderResolverParams`
|
|
57
|
+
- `PlaceholderResolverRead`
|
|
58
|
+
- `PlaceholderResolverResolveUrl`
|
|
59
|
+
- `parseEntityYaml`
|
|
60
|
+
|
|
61
|
+
The following removed exports are available from `@backstage/plugin-catalog-common`:
|
|
62
|
+
|
|
63
|
+
- `LocationSpec`
|
|
64
|
+
- `AnalyzeLocationRequest`
|
|
65
|
+
- `AnalyzeLocationResponse`
|
|
66
|
+
- `AnalyzeLocationExistingEntity`
|
|
67
|
+
- `AnalyzeLocationGenerateEntity`
|
|
68
|
+
- `AnalyzeLocationEntityField`
|
|
69
|
+
|
|
70
|
+
The following removed exports are instead implemented in the new backend system by `@backstage/plugin-search-backend-module-catalog`:
|
|
71
|
+
|
|
72
|
+
- `defaultCatalogCollatorEntityTransformer`
|
|
73
|
+
- `CatalogCollatorEntityTransformer`
|
|
74
|
+
- `DefaultCatalogCollator`
|
|
75
|
+
|
|
76
|
+
The following exports are removed without a direct replacement:
|
|
77
|
+
|
|
78
|
+
- `DefaultCatalogCollatorFactory`
|
|
79
|
+
- `DefaultCatalogCollatorFactoryOptions`
|
|
80
|
+
- `LocationEntityProcessor`
|
|
81
|
+
- `LocationEntityProcessorOptions`
|
|
82
|
+
- `CatalogBuilder`
|
|
83
|
+
- `CatalogEnvironment`
|
|
84
|
+
- `CatalogPermissionRuleInput`
|
|
85
|
+
- `CatalogProcessingEngine`
|
|
86
|
+
- `createRandomProcessingInterval`
|
|
87
|
+
- `ProcessingIntervalFunction`
|
|
88
|
+
|
|
89
|
+
### Minor Changes
|
|
90
|
+
|
|
91
|
+
- 6c9b88e: **BREAKING ALPHA**: You can no longer import the catalog plugin from the `/alpha` export; please use the regular root default export instead.
|
|
92
|
+
- d88b922: Adds the ability to disable the default entity processors using a new boolean app config item `catalog.disableDefaultProcessors`.
|
|
93
|
+
|
|
94
|
+
### Patch Changes
|
|
95
|
+
|
|
96
|
+
- 0e710fc: This patch addresses an issue identified in Backstage when configured with a MySQL database. If an entity of type location
|
|
97
|
+
(e..all.yaml) has more than 70 referenced entities, clicking "Refresh" does not update the referenced entities as expected. This occurs because the TEXT type in MySQL has a limit of 65,535 bytes, which is insufficient to store all the referenced entities, causing the refresh operation to fail.
|
|
98
|
+
- Updated dependencies
|
|
99
|
+
- @backstage/plugin-catalog-node@1.17.0-next.1
|
|
100
|
+
- @backstage/backend-plugin-api@1.3.1-next.1
|
|
101
|
+
- @backstage/integration@1.16.4-next.1
|
|
102
|
+
- @backstage/plugin-permission-common@0.9.0-next.0
|
|
103
|
+
- @backstage/plugin-permission-node@0.10.0-next.1
|
|
104
|
+
- @backstage/backend-openapi-utils@0.5.3-next.1
|
|
105
|
+
- @backstage/catalog-client@1.10.0-next.0
|
|
106
|
+
- @backstage/catalog-model@1.7.3
|
|
107
|
+
- @backstage/config@1.3.2
|
|
108
|
+
- @backstage/errors@1.2.7
|
|
109
|
+
- @backstage/types@1.2.1
|
|
110
|
+
- @backstage/plugin-catalog-common@1.1.4-next.0
|
|
111
|
+
- @backstage/plugin-events-node@0.4.11-next.1
|
|
112
|
+
|
|
3
113
|
## 1.32.2-next.0
|
|
4
114
|
|
|
5
115
|
### Patch Changes
|
package/config.d.ts
CHANGED
|
@@ -148,6 +148,15 @@ export interface Config {
|
|
|
148
148
|
*/
|
|
149
149
|
disableRelationsCompatibility?: boolean;
|
|
150
150
|
|
|
151
|
+
/**
|
|
152
|
+
* Disables the default backstage processors.
|
|
153
|
+
*
|
|
154
|
+
* Enabling this option allows more complete control of which processors are included
|
|
155
|
+
* in the backstage processing loop.
|
|
156
|
+
*
|
|
157
|
+
*/
|
|
158
|
+
disableDefaultProcessors?: boolean;
|
|
159
|
+
|
|
151
160
|
/**
|
|
152
161
|
* The strategy to use for entities that are orphaned, i.e. no longer have
|
|
153
162
|
* any other entities or providers referencing them. The default value is
|
package/dist/alpha.cjs.js
CHANGED
|
@@ -1,17 +1,13 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
|
-
var CatalogPlugin = require('./service/CatalogPlugin.cjs.js');
|
|
6
3
|
var conditionExports = require('./permissions/conditionExports.cjs.js');
|
|
7
4
|
var index = require('./permissions/rules/index.cjs.js');
|
|
8
5
|
var util = require('./permissions/rules/util.cjs.js');
|
|
9
6
|
|
|
10
|
-
|
|
7
|
+
|
|
11
8
|
|
|
12
9
|
exports.catalogConditions = conditionExports.catalogConditions;
|
|
13
10
|
exports.createCatalogConditionalDecision = conditionExports.createCatalogConditionalDecision;
|
|
14
11
|
exports.permissionRules = index.permissionRules;
|
|
15
12
|
exports.createCatalogPermissionRule = util.createCatalogPermissionRule;
|
|
16
|
-
exports.default = _feature;
|
|
17
13
|
//# sourceMappingURL=alpha.cjs.js.map
|
package/dist/alpha.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"alpha.cjs.js","sources":[
|
|
1
|
+
{"version":3,"file":"alpha.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;"}
|
package/dist/alpha.d.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
|
|
2
1
|
import * as _backstage_plugin_permission_common from '@backstage/plugin-permission-common';
|
|
3
2
|
import { PermissionRuleParams } from '@backstage/plugin-permission-common';
|
|
4
3
|
import * as _backstage_plugin_permission_node from '@backstage/plugin-permission-node';
|
|
@@ -117,7 +116,4 @@ declare const permissionRules: {
|
|
|
117
116
|
}>;
|
|
118
117
|
};
|
|
119
118
|
|
|
120
|
-
|
|
121
|
-
declare const _feature: _backstage_backend_plugin_api.BackendFeature;
|
|
122
|
-
|
|
123
|
-
export { type CatalogPermissionRule, catalogConditions, createCatalogConditionalDecision, createCatalogPermissionRule, _feature as default, permissionRules };
|
|
119
|
+
export { type CatalogPermissionRule, catalogConditions, createCatalogConditionalDecision, createCatalogPermissionRule, permissionRules };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DefaultProcessingDatabase.cjs.js","sources":["../../src/database/DefaultProcessingDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { ConflictError } from '@backstage/errors';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { ProcessingIntervalFunction } from '../processing';\nimport { rethrowError, timestampToDateTime } from './conversion';\nimport { initDatabaseMetrics } from './metrics';\nimport {\n DbRefreshKeysRow,\n DbRefreshStateReferencesRow,\n DbRefreshStateRow,\n DbRelationsRow,\n} from './tables';\nimport {\n GetProcessableEntitiesResult,\n ListParentsOptions,\n ListParentsResult,\n ProcessingDatabase,\n RefreshStateItem,\n Transaction,\n UpdateEntityCacheOptions,\n UpdateProcessedEntityOptions,\n} from './types';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { generateStableHash, generateTargetKey } from './util';\nimport {\n EventBroker,\n EventParams,\n EventsService,\n} from '@backstage/plugin-events-node';\nimport { DateTime } from 'luxon';\nimport { CATALOG_CONFLICTS_TOPIC } from '../constants';\nimport { CatalogConflictEventPayload } from '../catalog/types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProcessingDatabase implements ProcessingDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n refreshInterval: ProcessingIntervalFunction;\n eventBroker?: EventBroker | EventsService;\n },\n ) {\n initDatabaseMetrics(options.database);\n }\n\n async updateProcessedEntity(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<{ previous: { relations: DbRelationsRow[] } }> {\n const tx = txOpaque as Knex.Transaction;\n const {\n id,\n processedEntity,\n resultHash,\n errors,\n relations,\n deferredEntities,\n refreshKeys,\n locationKey,\n } = options;\n const configClient = tx.client.config.client;\n const refreshResult = await tx<DbRefreshStateRow>('refresh_state')\n .update({\n processed_entity: JSON.stringify(processedEntity),\n result_hash: resultHash,\n errors,\n location_key: locationKey,\n })\n .where('entity_id', id)\n .andWhere(inner => {\n if (!locationKey) {\n return inner.whereNull('location_key');\n }\n return inner\n .where('location_key', locationKey)\n .orWhereNull('location_key');\n });\n if (refreshResult === 0) {\n throw new ConflictError(\n `Conflicting write of processing result for ${id} with location key '${locationKey}'`,\n );\n }\n const sourceEntityRef = stringifyEntityRef(processedEntity);\n\n // Schedule all deferred entities for future processing.\n await this.addUnprocessedEntities(tx, {\n entities: deferredEntities,\n sourceEntityRef,\n });\n\n // Delete old relations\n // NOTE(freben): knex implemented support for returning() on update queries for sqlite, but at the current time of writing (Sep 2022) not for delete() queries.\n let previousRelationRows: DbRelationsRow[];\n if (configClient.includes('sqlite3') || configClient.includes('mysql')) {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .select('*')\n .where({ originating_entity_id: id });\n await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete();\n } else {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete()\n .returning('*');\n }\n\n // Batch insert new relations\n const relationRows: DbRelationsRow[] = relations.map(\n ({ source, target, type }) => ({\n originating_entity_id: id,\n source_entity_ref: stringifyEntityRef(source),\n target_entity_ref: stringifyEntityRef(target),\n type,\n }),\n );\n\n await tx.batchInsert(\n 'relations',\n this.deduplicateRelations(relationRows),\n BATCH_SIZE,\n );\n\n // Delete old refresh keys\n await tx<DbRefreshKeysRow>('refresh_keys')\n .where({ entity_id: id })\n .delete();\n\n // Insert the refresh keys for the processed entity\n await tx.batchInsert(\n 'refresh_keys',\n refreshKeys.map(k => ({\n entity_id: id,\n key: generateTargetKey(k.key),\n })),\n BATCH_SIZE,\n );\n\n return {\n previous: {\n relations: previousRelationRows,\n },\n };\n }\n\n async updateProcessedEntityErrors(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, errors, resultHash } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({\n errors,\n result_hash: resultHash,\n })\n .where('entity_id', id);\n }\n\n async updateEntityCache(\n txOpaque: Transaction,\n options: UpdateEntityCacheOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, state } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({ cache: JSON.stringify(state ?? {}) })\n .where('entity_id', id);\n }\n\n async getProcessableEntities(\n maybeTx: Transaction | Knex,\n request: { processBatchSize: number },\n ): Promise<GetProcessableEntitiesResult> {\n const knex = maybeTx as Knex.Transaction | Knex;\n\n let itemsQuery = knex<DbRefreshStateRow>('refresh_state').select([\n 'entity_id',\n 'entity_ref',\n 'unprocessed_entity',\n 'result_hash',\n 'cache',\n 'errors',\n 'location_key',\n 'next_update_at',\n ]);\n\n // This avoids duplication of work because of race conditions and is\n // also fast because locked rows are ignored rather than blocking.\n // It's only available in MySQL and PostgreSQL\n if (['mysql', 'mysql2', 'pg'].includes(knex.client.config.client)) {\n itemsQuery = itemsQuery.forUpdate().skipLocked();\n }\n\n const items = await itemsQuery\n .where('next_update_at', '<=', knex.fn.now())\n .limit(request.processBatchSize)\n .orderBy('next_update_at', 'asc');\n\n const interval = this.options.refreshInterval();\n\n const nextUpdateAt = (refreshInterval: number) => {\n if (knex.client.config.client.includes('sqlite3')) {\n return knex.raw(`datetime('now', ?)`, [`${refreshInterval} seconds`]);\n } else if (knex.client.config.client.includes('mysql')) {\n return knex.raw(`now() + interval ${refreshInterval} second`);\n }\n return knex.raw(`now() + interval '${refreshInterval} seconds'`);\n };\n\n await knex<DbRefreshStateRow>('refresh_state')\n .whereIn(\n 'entity_ref',\n items.map(i => i.entity_ref),\n )\n .update({\n next_update_at: nextUpdateAt(interval),\n });\n\n return {\n items: items.map(\n i =>\n ({\n id: i.entity_id,\n entityRef: i.entity_ref,\n unprocessedEntity: JSON.parse(i.unprocessed_entity) as Entity,\n resultHash: i.result_hash || '',\n nextUpdateAt: timestampToDateTime(i.next_update_at),\n state: i.cache ? JSON.parse(i.cache) : undefined,\n errors: i.errors,\n locationKey: i.location_key,\n } satisfies RefreshStateItem),\n ),\n };\n }\n\n async listParents(\n txOpaque: Transaction,\n options: ListParentsOptions,\n ): Promise<ListParentsResult> {\n const tx = txOpaque as Knex.Transaction;\n\n const rows = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .whereIn('target_entity_ref', options.entityRefs)\n .select();\n\n const entityRefs = rows.map(r => r.source_entity_ref!).filter(Boolean);\n\n return { entityRefs };\n }\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n private deduplicateRelations(rows: DbRelationsRow[]): DbRelationsRow[] {\n return lodash.uniqBy(\n rows,\n r => `${r.source_entity_ref}:${r.target_entity_ref}:${r.type}`,\n );\n }\n\n /**\n * Add a set of deferred entities for processing.\n * The entities will be added at the front of the processing queue.\n */\n private async addUnprocessedEntities(\n txOpaque: Transaction,\n options: {\n sourceEntityRef: string;\n entities: DeferredEntity[];\n },\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n\n // Keeps track of the entities that we end up inserting to update refresh_state_references afterwards\n const stateReferences = new Array<string>();\n\n // Upsert all of the unprocessed entities into the refresh_state table, by\n // their entity ref.\n for (const { entity, locationKey } of options.entities) {\n const entityRef = stringifyEntityRef(entity);\n const hash = generateStableHash(entity);\n\n const updated = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (updated) {\n stateReferences.push(entityRef);\n continue;\n }\n\n const inserted = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n if (inserted) {\n stateReferences.push(entityRef);\n continue;\n }\n\n // If the row can't be inserted, we have a conflict, but it could be either\n // because of a conflicting locationKey or a race with another instance, so check\n // whether the conflicting entity has the same entityRef but a different locationKey\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n if (this.options.eventBroker && locationKey) {\n const eventParams: EventParams<CatalogConflictEventPayload> = {\n topic: CATALOG_CONFLICTS_TOPIC,\n eventPayload: {\n unprocessedEntity: entity,\n entityRef,\n newLocationKey: locationKey,\n existingLocationKey: conflictingKey,\n lastConflictAt: DateTime.now().toISO()!,\n },\n };\n await this.options.eventBroker?.publish(eventParams);\n }\n }\n }\n\n // Lastly, replace refresh state references for the originating entity and any successfully added entities\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n // Remove all existing references from the originating entity\n .where({ source_entity_ref: options.sourceEntityRef })\n // And remove any existing references to entities that we're inserting new references for\n .orWhereIn('target_entity_ref', stateReferences)\n .delete();\n await tx.batchInsert(\n 'refresh_state_references',\n stateReferences.map(entityRef => ({\n source_entity_ref: options.sourceEntityRef,\n target_entity_ref: entityRef,\n })),\n BATCH_SIZE,\n );\n }\n}\n"],"names":["initDatabaseMetrics","errors","ConflictError","stringifyEntityRef","generateTargetKey","timestampToDateTime","rethrowError","lodash","generateStableHash","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","CATALOG_CONFLICTS_TOPIC","DateTime"],"mappings":";;;;;;;;;;;;;;;;;;AA0DA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,yBAAwD,CAAA;AAAA,EACnE,YACmB,OAMjB,EAAA;AANiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAOjB,IAAAA,2BAAA,CAAoB,QAAQ,QAAQ,CAAA;AAAA;AACtC,EAEA,MAAM,qBACJ,CAAA,QAAA,EACA,OACwD,EAAA;AACxD,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA;AAAA,MACJ,EAAA;AAAA,MACA,eAAA;AAAA,MACA,UAAA;AAAA,cACAC,QAAA;AAAA,MACA,SAAA;AAAA,MACA,gBAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACE,GAAA,OAAA;AACJ,IAAM,MAAA,YAAA,GAAe,EAAG,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA;AACtC,IAAA,MAAM,aAAgB,GAAA,MAAM,EAAsB,CAAA,eAAe,EAC9D,MAAO,CAAA;AAAA,MACN,gBAAA,EAAkB,IAAK,CAAA,SAAA,CAAU,eAAe,CAAA;AAAA,MAChD,WAAa,EAAA,UAAA;AAAA,cACbA,QAAA;AAAA,MACA,YAAc,EAAA;AAAA,KACf,CACA,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA,CACrB,SAAS,CAAS,KAAA,KAAA;AACjB,MAAA,IAAI,CAAC,WAAa,EAAA;AAChB,QAAO,OAAA,KAAA,CAAM,UAAU,cAAc,CAAA;AAAA;AAEvC,MAAA,OAAO,MACJ,KAAM,CAAA,cAAA,EAAgB,WAAW,CAAA,CACjC,YAAY,cAAc,CAAA;AAAA,KAC9B,CAAA;AACH,IAAA,IAAI,kBAAkB,CAAG,EAAA;AACvB,MAAA,MAAM,IAAIC,oBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,EAAE,CAAA,oBAAA,EAAuB,WAAW,CAAA,CAAA;AAAA,OACpF;AAAA;AAEF,IAAM,MAAA,eAAA,GAAkBC,gCAAmB,eAAe,CAAA;AAG1D,IAAM,MAAA,IAAA,CAAK,uBAAuB,EAAI,EAAA;AAAA,MACpC,QAAU,EAAA,gBAAA;AAAA,MACV;AAAA,KACD,CAAA;AAID,IAAI,IAAA,oBAAA;AACJ,IAAA,IAAI,aAAa,QAAS,CAAA,SAAS,KAAK,YAAa,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtE,MAAuB,oBAAA,GAAA,MAAM,EAAmB,CAAA,WAAW,CACxD,CAAA,MAAA,CAAO,GAAG,CAAA,CACV,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA;AACtC,MAAM,MAAA,EAAA,CAAmB,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA;AAAA,KACL,MAAA;AACL,MAAA,oBAAA,GAAuB,MAAM,EAAA,CAAmB,WAAW,CAAA,CACxD,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA,CACP,UAAU,GAAG,CAAA;AAAA;AAIlB,IAAA,MAAM,eAAiC,SAAU,CAAA,GAAA;AAAA,MAC/C,CAAC,EAAE,MAAQ,EAAA,MAAA,EAAQ,MAAY,MAAA;AAAA,QAC7B,qBAAuB,EAAA,EAAA;AAAA,QACvB,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C;AAAA,OACF;AAAA,KACF;AAEA,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,WAAA;AAAA,MACA,IAAA,CAAK,qBAAqB,YAAY,CAAA;AAAA,MACtC;AAAA,KACF;AAGA,IAAM,MAAA,EAAA,CAAqB,cAAc,CACtC,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,EAAA,EAAI,CAAA,CACvB,MAAO,EAAA;AAGV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,cAAA;AAAA,MACA,WAAA,CAAY,IAAI,CAAM,CAAA,MAAA;AAAA,QACpB,SAAW,EAAA,EAAA;AAAA,QACX,GAAA,EAAKC,sBAAkB,CAAA,CAAA,CAAE,GAAG;AAAA,OAC5B,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAEA,IAAO,OAAA;AAAA,MACL,QAAU,EAAA;AAAA,QACR,SAAW,EAAA;AAAA;AACb,KACF;AAAA;AACF,EAEA,MAAM,2BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAM,EAAE,EAAA,EAAI,MAAQ,EAAA,UAAA,EAAe,GAAA,OAAA;AAEnC,IAAM,MAAA,EAAA,CAAsB,eAAe,CAAA,CACxC,MAAO,CAAA;AAAA,MACN,MAAA;AAAA,MACA,WAAa,EAAA;AAAA,KACd,CAAA,CACA,KAAM,CAAA,WAAA,EAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,iBACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,EAAI,EAAA,KAAA,EAAU,GAAA,OAAA;AAEtB,IAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,MAAO,CAAA,EAAE,OAAO,IAAK,CAAA,SAAA,CAAU,KAAS,IAAA,EAAE,CAAE,EAAC,CAC7C,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,sBACJ,CAAA,OAAA,EACA,OACuC,EAAA;AACvC,IAAA,MAAM,IAAO,GAAA,OAAA;AAEb,IAAA,IAAI,UAAa,GAAA,IAAA,CAAwB,eAAe,CAAA,CAAE,MAAO,CAAA;AAAA,MAC/D,WAAA;AAAA,MACA,YAAA;AAAA,MACA,oBAAA;AAAA,MACA,aAAA;AAAA,MACA,OAAA;AAAA,MACA,QAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACD,CAAA;AAKD,IAAI,IAAA,CAAC,OAAS,EAAA,QAAA,EAAU,IAAI,CAAA,CAAE,SAAS,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAM,CAAG,EAAA;AACjE,MAAa,UAAA,GAAA,UAAA,CAAW,SAAU,EAAA,CAAE,UAAW,EAAA;AAAA;AAGjD,IAAA,MAAM,QAAQ,MAAM,UAAA,CACjB,KAAM,CAAA,gBAAA,EAAkB,MAAM,IAAK,CAAA,EAAA,CAAG,GAAI,EAAC,EAC3C,KAAM,CAAA,OAAA,CAAQ,gBAAgB,CAC9B,CAAA,OAAA,CAAQ,kBAAkB,KAAK,CAAA;AAElC,IAAM,MAAA,QAAA,GAAW,IAAK,CAAA,OAAA,CAAQ,eAAgB,EAAA;AAE9C,IAAM,MAAA,YAAA,GAAe,CAAC,eAA4B,KAAA;AAChD,MAAA,IAAI,KAAK,MAAO,CAAA,MAAA,CAAO,MAAO,CAAA,QAAA,CAAS,SAAS,CAAG,EAAA;AACjD,QAAA,OAAO,KAAK,GAAI,CAAA,CAAA,kBAAA,CAAA,EAAsB,CAAC,CAAG,EAAA,eAAe,UAAU,CAAC,CAAA;AAAA,iBAC3D,IAAK,CAAA,MAAA,CAAO,OAAO,MAAO,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtD,QAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAoB,iBAAA,EAAA,eAAe,CAAS,OAAA,CAAA,CAAA;AAAA;AAE9D,MAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAqB,kBAAA,EAAA,eAAe,CAAW,SAAA,CAAA,CAAA;AAAA,KACjE;AAEA,IAAM,MAAA,IAAA,CAAwB,eAAe,CAC1C,CAAA,OAAA;AAAA,MACC,YAAA;AAAA,MACA,KAAM,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,UAAU;AAAA,MAE5B,MAAO,CAAA;AAAA,MACN,cAAA,EAAgB,aAAa,QAAQ;AAAA,KACtC,CAAA;AAEH,IAAO,OAAA;AAAA,MACL,OAAO,KAAM,CAAA,GAAA;AAAA,QACX,CACG,CAAA,MAAA;AAAA,UACC,IAAI,CAAE,CAAA,SAAA;AAAA,UACN,WAAW,CAAE,CAAA,UAAA;AAAA,UACb,iBAAmB,EAAA,IAAA,CAAK,KAAM,CAAA,CAAA,CAAE,kBAAkB,CAAA;AAAA,UAClD,UAAA,EAAY,EAAE,WAAe,IAAA,EAAA;AAAA,UAC7B,YAAA,EAAcC,8BAAoB,CAAA,CAAA,CAAE,cAAc,CAAA;AAAA,UAClD,OAAO,CAAE,CAAA,KAAA,GAAQ,KAAK,KAAM,CAAA,CAAA,CAAE,KAAK,CAAI,GAAA,KAAA,CAAA;AAAA,UACvC,QAAQ,CAAE,CAAA,MAAA;AAAA,UACV,aAAa,CAAE,CAAA;AAAA,SACjB;AAAA;AACJ,KACF;AAAA;AACF,EAEA,MAAM,WACJ,CAAA,QAAA,EACA,OAC4B,EAAA;AAC5B,IAAA,MAAM,EAAK,GAAA,QAAA;AAEX,IAAA,MAAM,OAAO,MAAM,EAAA;AAAA,MACjB;AAAA,MAEC,OAAQ,CAAA,mBAAA,EAAqB,OAAQ,CAAA,UAAU,EAC/C,MAAO,EAAA;AAEV,IAAM,MAAA,UAAA,GAAa,KAAK,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,iBAAkB,CAAA,CAAE,OAAO,OAAO,CAAA;AAErE,IAAA,OAAO,EAAE,UAAW,EAAA;AAAA;AACtB,EAEA,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAE5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAGV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AAEA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMC,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEQ,qBAAqB,IAA0C,EAAA;AACrE,IAAA,OAAOC,uBAAO,CAAA,MAAA;AAAA,MACZ,IAAA;AAAA,MACA,CAAA,CAAA,KAAK,GAAG,CAAE,CAAA,iBAAiB,IAAI,CAAE,CAAA,iBAAiB,CAAI,CAAA,EAAA,CAAA,CAAE,IAAI,CAAA;AAAA,KAC9D;AAAA;AACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBACZ,CAAA,QAAA,EACA,OAIe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AAGX,IAAM,MAAA,eAAA,GAAkB,IAAI,KAAc,EAAA;AAI1C,IAAA,KAAA,MAAW,EAAE,MAAA,EAAQ,WAAY,EAAA,IAAK,QAAQ,QAAU,EAAA;AACtD,MAAM,MAAA,SAAA,GAAYJ,gCAAmB,MAAM,CAAA;AAC3C,MAAM,MAAA,IAAA,GAAOK,wBAAmB,MAAM,CAAA;AAEtC,MAAM,MAAA,OAAA,GAAU,MAAMC,+CAAwB,CAAA;AAAA,QAC5C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,OAAS,EAAA;AACX,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAGF,MAAM,MAAA,QAAA,GAAW,MAAMC,+CAAwB,CAAA;AAAA,QAC7C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,OACtB,CAAA;AACD,MAAA,IAAI,QAAU,EAAA;AACZ,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAMF,MAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,QACpD,EAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,cAAgB,EAAA;AAClB,QAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,UAClB,CAAkC,+BAAA,EAAA,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,SACjH;AACA,QAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,WAAA,IAAe,WAAa,EAAA;AAC3C,UAAA,MAAM,WAAwD,GAAA;AAAA,YAC5D,KAAO,EAAAC,iCAAA;AAAA,YACP,YAAc,EAAA;AAAA,cACZ,iBAAmB,EAAA,MAAA;AAAA,cACnB,SAAA;AAAA,cACA,cAAgB,EAAA,WAAA;AAAA,cAChB,mBAAqB,EAAA,cAAA;AAAA,cACrB,cAAgB,EAAAC,cAAA,CAAS,GAAI,EAAA,CAAE,KAAM;AAAA;AACvC,WACF;AACA,UAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,WAAa,EAAA,OAAA,CAAQ,WAAW,CAAA;AAAA;AACrD;AACF;AAIF,IAAA,MAAM,EAAgC,CAAA,0BAA0B,CAE7D,CAAA,KAAA,CAAM,EAAE,iBAAmB,EAAA,OAAA,CAAQ,eAAgB,EAAC,CAEpD,CAAA,SAAA,CAAU,mBAAqB,EAAA,eAAe,EAC9C,MAAO,EAAA;AACV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,0BAAA;AAAA,MACA,eAAA,CAAgB,IAAI,CAAc,SAAA,MAAA;AAAA,QAChC,mBAAmB,OAAQ,CAAA,eAAA;AAAA,QAC3B,iBAAmB,EAAA;AAAA,OACnB,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAAA;AAEJ;;;;"}
|
|
1
|
+
{"version":3,"file":"DefaultProcessingDatabase.cjs.js","sources":["../../src/database/DefaultProcessingDatabase.ts"],"sourcesContent":["/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { ConflictError } from '@backstage/errors';\nimport { DeferredEntity } from '@backstage/plugin-catalog-node';\nimport { Knex } from 'knex';\nimport lodash from 'lodash';\nimport { ProcessingIntervalFunction } from '../processing/refresh';\nimport { rethrowError, timestampToDateTime } from './conversion';\nimport { initDatabaseMetrics } from './metrics';\nimport {\n DbRefreshKeysRow,\n DbRefreshStateReferencesRow,\n DbRefreshStateRow,\n DbRelationsRow,\n} from './tables';\nimport {\n GetProcessableEntitiesResult,\n ListParentsOptions,\n ListParentsResult,\n ProcessingDatabase,\n RefreshStateItem,\n Transaction,\n UpdateEntityCacheOptions,\n UpdateProcessedEntityOptions,\n} from './types';\nimport { checkLocationKeyConflict } from './operations/refreshState/checkLocationKeyConflict';\nimport { insertUnprocessedEntity } from './operations/refreshState/insertUnprocessedEntity';\nimport { updateUnprocessedEntity } from './operations/refreshState/updateUnprocessedEntity';\nimport { generateStableHash, generateTargetKey } from './util';\nimport {\n EventBroker,\n EventParams,\n EventsService,\n} from '@backstage/plugin-events-node';\nimport { DateTime } from 'luxon';\nimport { CATALOG_CONFLICTS_TOPIC } from '../constants';\nimport { CatalogConflictEventPayload } from '../catalog/types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// The number of items that are sent per batch to the database layer, when\n// doing .batchInsert calls to knex. This needs to be low enough to not cause\n// errors in the underlying engine due to exceeding query limits, but large\n// enough to get the speed benefits.\nconst BATCH_SIZE = 50;\n\nexport class DefaultProcessingDatabase implements ProcessingDatabase {\n constructor(\n private readonly options: {\n database: Knex;\n logger: LoggerService;\n refreshInterval: ProcessingIntervalFunction;\n eventBroker?: EventBroker | EventsService;\n },\n ) {\n initDatabaseMetrics(options.database);\n }\n\n async updateProcessedEntity(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<{ previous: { relations: DbRelationsRow[] } }> {\n const tx = txOpaque as Knex.Transaction;\n const {\n id,\n processedEntity,\n resultHash,\n errors,\n relations,\n deferredEntities,\n refreshKeys,\n locationKey,\n } = options;\n const configClient = tx.client.config.client;\n const refreshResult = await tx<DbRefreshStateRow>('refresh_state')\n .update({\n processed_entity: JSON.stringify(processedEntity),\n result_hash: resultHash,\n errors,\n location_key: locationKey,\n })\n .where('entity_id', id)\n .andWhere(inner => {\n if (!locationKey) {\n return inner.whereNull('location_key');\n }\n return inner\n .where('location_key', locationKey)\n .orWhereNull('location_key');\n });\n if (refreshResult === 0) {\n throw new ConflictError(\n `Conflicting write of processing result for ${id} with location key '${locationKey}'`,\n );\n }\n const sourceEntityRef = stringifyEntityRef(processedEntity);\n\n // Schedule all deferred entities for future processing.\n await this.addUnprocessedEntities(tx, {\n entities: deferredEntities,\n sourceEntityRef,\n });\n\n // Delete old relations\n // NOTE(freben): knex implemented support for returning() on update queries for sqlite, but at the current time of writing (Sep 2022) not for delete() queries.\n let previousRelationRows: DbRelationsRow[];\n if (configClient.includes('sqlite3') || configClient.includes('mysql')) {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .select('*')\n .where({ originating_entity_id: id });\n await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete();\n } else {\n previousRelationRows = await tx<DbRelationsRow>('relations')\n .where({ originating_entity_id: id })\n .delete()\n .returning('*');\n }\n\n // Batch insert new relations\n const relationRows: DbRelationsRow[] = relations.map(\n ({ source, target, type }) => ({\n originating_entity_id: id,\n source_entity_ref: stringifyEntityRef(source),\n target_entity_ref: stringifyEntityRef(target),\n type,\n }),\n );\n\n await tx.batchInsert(\n 'relations',\n this.deduplicateRelations(relationRows),\n BATCH_SIZE,\n );\n\n // Delete old refresh keys\n await tx<DbRefreshKeysRow>('refresh_keys')\n .where({ entity_id: id })\n .delete();\n\n // Insert the refresh keys for the processed entity\n await tx.batchInsert(\n 'refresh_keys',\n refreshKeys.map(k => ({\n entity_id: id,\n key: generateTargetKey(k.key),\n })),\n BATCH_SIZE,\n );\n\n return {\n previous: {\n relations: previousRelationRows,\n },\n };\n }\n\n async updateProcessedEntityErrors(\n txOpaque: Transaction,\n options: UpdateProcessedEntityOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, errors, resultHash } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({\n errors,\n result_hash: resultHash,\n })\n .where('entity_id', id);\n }\n\n async updateEntityCache(\n txOpaque: Transaction,\n options: UpdateEntityCacheOptions,\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n const { id, state } = options;\n\n await tx<DbRefreshStateRow>('refresh_state')\n .update({ cache: JSON.stringify(state ?? {}) })\n .where('entity_id', id);\n }\n\n async getProcessableEntities(\n maybeTx: Transaction | Knex,\n request: { processBatchSize: number },\n ): Promise<GetProcessableEntitiesResult> {\n const knex = maybeTx as Knex.Transaction | Knex;\n\n let itemsQuery = knex<DbRefreshStateRow>('refresh_state').select([\n 'entity_id',\n 'entity_ref',\n 'unprocessed_entity',\n 'result_hash',\n 'cache',\n 'errors',\n 'location_key',\n 'next_update_at',\n ]);\n\n // This avoids duplication of work because of race conditions and is\n // also fast because locked rows are ignored rather than blocking.\n // It's only available in MySQL and PostgreSQL\n if (['mysql', 'mysql2', 'pg'].includes(knex.client.config.client)) {\n itemsQuery = itemsQuery.forUpdate().skipLocked();\n }\n\n const items = await itemsQuery\n .where('next_update_at', '<=', knex.fn.now())\n .limit(request.processBatchSize)\n .orderBy('next_update_at', 'asc');\n\n const interval = this.options.refreshInterval();\n\n const nextUpdateAt = (refreshInterval: number) => {\n if (knex.client.config.client.includes('sqlite3')) {\n return knex.raw(`datetime('now', ?)`, [`${refreshInterval} seconds`]);\n } else if (knex.client.config.client.includes('mysql')) {\n return knex.raw(`now() + interval ${refreshInterval} second`);\n }\n return knex.raw(`now() + interval '${refreshInterval} seconds'`);\n };\n\n await knex<DbRefreshStateRow>('refresh_state')\n .whereIn(\n 'entity_ref',\n items.map(i => i.entity_ref),\n )\n .update({\n next_update_at: nextUpdateAt(interval),\n });\n\n return {\n items: items.map(\n i =>\n ({\n id: i.entity_id,\n entityRef: i.entity_ref,\n unprocessedEntity: JSON.parse(i.unprocessed_entity) as Entity,\n resultHash: i.result_hash || '',\n nextUpdateAt: timestampToDateTime(i.next_update_at),\n state: i.cache ? JSON.parse(i.cache) : undefined,\n errors: i.errors,\n locationKey: i.location_key,\n } satisfies RefreshStateItem),\n ),\n };\n }\n\n async listParents(\n txOpaque: Transaction,\n options: ListParentsOptions,\n ): Promise<ListParentsResult> {\n const tx = txOpaque as Knex.Transaction;\n\n const rows = await tx<DbRefreshStateReferencesRow>(\n 'refresh_state_references',\n )\n .whereIn('target_entity_ref', options.entityRefs)\n .select();\n\n const entityRefs = rows.map(r => r.source_entity_ref!).filter(Boolean);\n\n return { entityRefs };\n }\n\n async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {\n try {\n let result: T | undefined = undefined;\n\n await this.options.database.transaction(\n async tx => {\n // We can't return here, as knex swallows the return type in case the transaction is rolled back:\n // https://github.com/knex/knex/blob/e37aeaa31c8ef9c1b07d2e4d3ec6607e557d800d/lib/transaction.js#L136\n result = await fn(tx);\n },\n {\n // If we explicitly trigger a rollback, don't fail.\n doNotRejectOnRollback: true,\n },\n );\n\n return result!;\n } catch (e) {\n this.options.logger.debug(`Error during transaction, ${e}`);\n throw rethrowError(e);\n }\n }\n\n private deduplicateRelations(rows: DbRelationsRow[]): DbRelationsRow[] {\n return lodash.uniqBy(\n rows,\n r => `${r.source_entity_ref}:${r.target_entity_ref}:${r.type}`,\n );\n }\n\n /**\n * Add a set of deferred entities for processing.\n * The entities will be added at the front of the processing queue.\n */\n private async addUnprocessedEntities(\n txOpaque: Transaction,\n options: {\n sourceEntityRef: string;\n entities: DeferredEntity[];\n },\n ): Promise<void> {\n const tx = txOpaque as Knex.Transaction;\n\n // Keeps track of the entities that we end up inserting to update refresh_state_references afterwards\n const stateReferences = new Array<string>();\n\n // Upsert all of the unprocessed entities into the refresh_state table, by\n // their entity ref.\n for (const { entity, locationKey } of options.entities) {\n const entityRef = stringifyEntityRef(entity);\n const hash = generateStableHash(entity);\n\n const updated = await updateUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n });\n if (updated) {\n stateReferences.push(entityRef);\n continue;\n }\n\n const inserted = await insertUnprocessedEntity({\n tx,\n entity,\n hash,\n locationKey,\n logger: this.options.logger,\n });\n if (inserted) {\n stateReferences.push(entityRef);\n continue;\n }\n\n // If the row can't be inserted, we have a conflict, but it could be either\n // because of a conflicting locationKey or a race with another instance, so check\n // whether the conflicting entity has the same entityRef but a different locationKey\n const conflictingKey = await checkLocationKeyConflict({\n tx,\n entityRef,\n locationKey,\n });\n if (conflictingKey) {\n this.options.logger.warn(\n `Detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`,\n );\n if (this.options.eventBroker && locationKey) {\n const eventParams: EventParams<CatalogConflictEventPayload> = {\n topic: CATALOG_CONFLICTS_TOPIC,\n eventPayload: {\n unprocessedEntity: entity,\n entityRef,\n newLocationKey: locationKey,\n existingLocationKey: conflictingKey,\n lastConflictAt: DateTime.now().toISO()!,\n },\n };\n await this.options.eventBroker?.publish(eventParams);\n }\n }\n }\n\n // Lastly, replace refresh state references for the originating entity and any successfully added entities\n await tx<DbRefreshStateReferencesRow>('refresh_state_references')\n // Remove all existing references from the originating entity\n .where({ source_entity_ref: options.sourceEntityRef })\n // And remove any existing references to entities that we're inserting new references for\n .orWhereIn('target_entity_ref', stateReferences)\n .delete();\n await tx.batchInsert(\n 'refresh_state_references',\n stateReferences.map(entityRef => ({\n source_entity_ref: options.sourceEntityRef,\n target_entity_ref: entityRef,\n })),\n BATCH_SIZE,\n );\n }\n}\n"],"names":["initDatabaseMetrics","errors","ConflictError","stringifyEntityRef","generateTargetKey","timestampToDateTime","rethrowError","lodash","generateStableHash","updateUnprocessedEntity","insertUnprocessedEntity","checkLocationKeyConflict","CATALOG_CONFLICTS_TOPIC","DateTime"],"mappings":";;;;;;;;;;;;;;;;;;AA0DA,MAAM,UAAa,GAAA,EAAA;AAEZ,MAAM,yBAAwD,CAAA;AAAA,EACnE,YACmB,OAMjB,EAAA;AANiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA;AAOjB,IAAAA,2BAAA,CAAoB,QAAQ,QAAQ,CAAA;AAAA;AACtC,EAEA,MAAM,qBACJ,CAAA,QAAA,EACA,OACwD,EAAA;AACxD,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA;AAAA,MACJ,EAAA;AAAA,MACA,eAAA;AAAA,MACA,UAAA;AAAA,cACAC,QAAA;AAAA,MACA,SAAA;AAAA,MACA,gBAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACE,GAAA,OAAA;AACJ,IAAM,MAAA,YAAA,GAAe,EAAG,CAAA,MAAA,CAAO,MAAO,CAAA,MAAA;AACtC,IAAA,MAAM,aAAgB,GAAA,MAAM,EAAsB,CAAA,eAAe,EAC9D,MAAO,CAAA;AAAA,MACN,gBAAA,EAAkB,IAAK,CAAA,SAAA,CAAU,eAAe,CAAA;AAAA,MAChD,WAAa,EAAA,UAAA;AAAA,cACbA,QAAA;AAAA,MACA,YAAc,EAAA;AAAA,KACf,CACA,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA,CACrB,SAAS,CAAS,KAAA,KAAA;AACjB,MAAA,IAAI,CAAC,WAAa,EAAA;AAChB,QAAO,OAAA,KAAA,CAAM,UAAU,cAAc,CAAA;AAAA;AAEvC,MAAA,OAAO,MACJ,KAAM,CAAA,cAAA,EAAgB,WAAW,CAAA,CACjC,YAAY,cAAc,CAAA;AAAA,KAC9B,CAAA;AACH,IAAA,IAAI,kBAAkB,CAAG,EAAA;AACvB,MAAA,MAAM,IAAIC,oBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,EAAE,CAAA,oBAAA,EAAuB,WAAW,CAAA,CAAA;AAAA,OACpF;AAAA;AAEF,IAAM,MAAA,eAAA,GAAkBC,gCAAmB,eAAe,CAAA;AAG1D,IAAM,MAAA,IAAA,CAAK,uBAAuB,EAAI,EAAA;AAAA,MACpC,QAAU,EAAA,gBAAA;AAAA,MACV;AAAA,KACD,CAAA;AAID,IAAI,IAAA,oBAAA;AACJ,IAAA,IAAI,aAAa,QAAS,CAAA,SAAS,KAAK,YAAa,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtE,MAAuB,oBAAA,GAAA,MAAM,EAAmB,CAAA,WAAW,CACxD,CAAA,MAAA,CAAO,GAAG,CAAA,CACV,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA;AACtC,MAAM,MAAA,EAAA,CAAmB,WAAW,CACjC,CAAA,KAAA,CAAM,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA;AAAA,KACL,MAAA;AACL,MAAA,oBAAA,GAAuB,MAAM,EAAA,CAAmB,WAAW,CAAA,CACxD,KAAM,CAAA,EAAE,qBAAuB,EAAA,EAAA,EAAI,CAAA,CACnC,MAAO,EAAA,CACP,UAAU,GAAG,CAAA;AAAA;AAIlB,IAAA,MAAM,eAAiC,SAAU,CAAA,GAAA;AAAA,MAC/C,CAAC,EAAE,MAAQ,EAAA,MAAA,EAAQ,MAAY,MAAA;AAAA,QAC7B,qBAAuB,EAAA,EAAA;AAAA,QACvB,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C,iBAAA,EAAmBA,gCAAmB,MAAM,CAAA;AAAA,QAC5C;AAAA,OACF;AAAA,KACF;AAEA,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,WAAA;AAAA,MACA,IAAA,CAAK,qBAAqB,YAAY,CAAA;AAAA,MACtC;AAAA,KACF;AAGA,IAAM,MAAA,EAAA,CAAqB,cAAc,CACtC,CAAA,KAAA,CAAM,EAAE,SAAW,EAAA,EAAA,EAAI,CAAA,CACvB,MAAO,EAAA;AAGV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,cAAA;AAAA,MACA,WAAA,CAAY,IAAI,CAAM,CAAA,MAAA;AAAA,QACpB,SAAW,EAAA,EAAA;AAAA,QACX,GAAA,EAAKC,sBAAkB,CAAA,CAAA,CAAE,GAAG;AAAA,OAC5B,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAEA,IAAO,OAAA;AAAA,MACL,QAAU,EAAA;AAAA,QACR,SAAW,EAAA;AAAA;AACb,KACF;AAAA;AACF,EAEA,MAAM,2BACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAA,MAAM,EAAE,EAAA,EAAI,MAAQ,EAAA,UAAA,EAAe,GAAA,OAAA;AAEnC,IAAM,MAAA,EAAA,CAAsB,eAAe,CAAA,CACxC,MAAO,CAAA;AAAA,MACN,MAAA;AAAA,MACA,WAAa,EAAA;AAAA,KACd,CAAA,CACA,KAAM,CAAA,WAAA,EAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,iBACJ,CAAA,QAAA,EACA,OACe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AACX,IAAM,MAAA,EAAE,EAAI,EAAA,KAAA,EAAU,GAAA,OAAA;AAEtB,IAAA,MAAM,GAAsB,eAAe,CAAA,CACxC,MAAO,CAAA,EAAE,OAAO,IAAK,CAAA,SAAA,CAAU,KAAS,IAAA,EAAE,CAAE,EAAC,CAC7C,CAAA,KAAA,CAAM,aAAa,EAAE,CAAA;AAAA;AAC1B,EAEA,MAAM,sBACJ,CAAA,OAAA,EACA,OACuC,EAAA;AACvC,IAAA,MAAM,IAAO,GAAA,OAAA;AAEb,IAAA,IAAI,UAAa,GAAA,IAAA,CAAwB,eAAe,CAAA,CAAE,MAAO,CAAA;AAAA,MAC/D,WAAA;AAAA,MACA,YAAA;AAAA,MACA,oBAAA;AAAA,MACA,aAAA;AAAA,MACA,OAAA;AAAA,MACA,QAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACD,CAAA;AAKD,IAAI,IAAA,CAAC,OAAS,EAAA,QAAA,EAAU,IAAI,CAAA,CAAE,SAAS,IAAK,CAAA,MAAA,CAAO,MAAO,CAAA,MAAM,CAAG,EAAA;AACjE,MAAa,UAAA,GAAA,UAAA,CAAW,SAAU,EAAA,CAAE,UAAW,EAAA;AAAA;AAGjD,IAAA,MAAM,QAAQ,MAAM,UAAA,CACjB,KAAM,CAAA,gBAAA,EAAkB,MAAM,IAAK,CAAA,EAAA,CAAG,GAAI,EAAC,EAC3C,KAAM,CAAA,OAAA,CAAQ,gBAAgB,CAC9B,CAAA,OAAA,CAAQ,kBAAkB,KAAK,CAAA;AAElC,IAAM,MAAA,QAAA,GAAW,IAAK,CAAA,OAAA,CAAQ,eAAgB,EAAA;AAE9C,IAAM,MAAA,YAAA,GAAe,CAAC,eAA4B,KAAA;AAChD,MAAA,IAAI,KAAK,MAAO,CAAA,MAAA,CAAO,MAAO,CAAA,QAAA,CAAS,SAAS,CAAG,EAAA;AACjD,QAAA,OAAO,KAAK,GAAI,CAAA,CAAA,kBAAA,CAAA,EAAsB,CAAC,CAAG,EAAA,eAAe,UAAU,CAAC,CAAA;AAAA,iBAC3D,IAAK,CAAA,MAAA,CAAO,OAAO,MAAO,CAAA,QAAA,CAAS,OAAO,CAAG,EAAA;AACtD,QAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAoB,iBAAA,EAAA,eAAe,CAAS,OAAA,CAAA,CAAA;AAAA;AAE9D,MAAA,OAAO,IAAK,CAAA,GAAA,CAAI,CAAqB,kBAAA,EAAA,eAAe,CAAW,SAAA,CAAA,CAAA;AAAA,KACjE;AAEA,IAAM,MAAA,IAAA,CAAwB,eAAe,CAC1C,CAAA,OAAA;AAAA,MACC,YAAA;AAAA,MACA,KAAM,CAAA,GAAA,CAAI,CAAK,CAAA,KAAA,CAAA,CAAE,UAAU;AAAA,MAE5B,MAAO,CAAA;AAAA,MACN,cAAA,EAAgB,aAAa,QAAQ;AAAA,KACtC,CAAA;AAEH,IAAO,OAAA;AAAA,MACL,OAAO,KAAM,CAAA,GAAA;AAAA,QACX,CACG,CAAA,MAAA;AAAA,UACC,IAAI,CAAE,CAAA,SAAA;AAAA,UACN,WAAW,CAAE,CAAA,UAAA;AAAA,UACb,iBAAmB,EAAA,IAAA,CAAK,KAAM,CAAA,CAAA,CAAE,kBAAkB,CAAA;AAAA,UAClD,UAAA,EAAY,EAAE,WAAe,IAAA,EAAA;AAAA,UAC7B,YAAA,EAAcC,8BAAoB,CAAA,CAAA,CAAE,cAAc,CAAA;AAAA,UAClD,OAAO,CAAE,CAAA,KAAA,GAAQ,KAAK,KAAM,CAAA,CAAA,CAAE,KAAK,CAAI,GAAA,KAAA,CAAA;AAAA,UACvC,QAAQ,CAAE,CAAA,MAAA;AAAA,UACV,aAAa,CAAE,CAAA;AAAA,SACjB;AAAA;AACJ,KACF;AAAA;AACF,EAEA,MAAM,WACJ,CAAA,QAAA,EACA,OAC4B,EAAA;AAC5B,IAAA,MAAM,EAAK,GAAA,QAAA;AAEX,IAAA,MAAM,OAAO,MAAM,EAAA;AAAA,MACjB;AAAA,MAEC,OAAQ,CAAA,mBAAA,EAAqB,OAAQ,CAAA,UAAU,EAC/C,MAAO,EAAA;AAEV,IAAM,MAAA,UAAA,GAAa,KAAK,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,iBAAkB,CAAA,CAAE,OAAO,OAAO,CAAA;AAErE,IAAA,OAAO,EAAE,UAAW,EAAA;AAAA;AACtB,EAEA,MAAM,YAAe,EAAiD,EAAA;AACpE,IAAI,IAAA;AACF,MAAA,IAAI,MAAwB,GAAA,KAAA,CAAA;AAE5B,MAAM,MAAA,IAAA,CAAK,QAAQ,QAAS,CAAA,WAAA;AAAA,QAC1B,OAAM,EAAM,KAAA;AAGV,UAAS,MAAA,GAAA,MAAM,GAAG,EAAE,CAAA;AAAA,SACtB;AAAA,QACA;AAAA;AAAA,UAEE,qBAAuB,EAAA;AAAA;AACzB,OACF;AAEA,MAAO,OAAA,MAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA,CAAA,0BAAA,EAA6B,CAAC,CAAE,CAAA,CAAA;AAC1D,MAAA,MAAMC,wBAAa,CAAC,CAAA;AAAA;AACtB;AACF,EAEQ,qBAAqB,IAA0C,EAAA;AACrE,IAAA,OAAOC,uBAAO,CAAA,MAAA;AAAA,MACZ,IAAA;AAAA,MACA,CAAA,CAAA,KAAK,GAAG,CAAE,CAAA,iBAAiB,IAAI,CAAE,CAAA,iBAAiB,CAAI,CAAA,EAAA,CAAA,CAAE,IAAI,CAAA;AAAA,KAC9D;AAAA;AACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBACZ,CAAA,QAAA,EACA,OAIe,EAAA;AACf,IAAA,MAAM,EAAK,GAAA,QAAA;AAGX,IAAM,MAAA,eAAA,GAAkB,IAAI,KAAc,EAAA;AAI1C,IAAA,KAAA,MAAW,EAAE,MAAA,EAAQ,WAAY,EAAA,IAAK,QAAQ,QAAU,EAAA;AACtD,MAAM,MAAA,SAAA,GAAYJ,gCAAmB,MAAM,CAAA;AAC3C,MAAM,MAAA,IAAA,GAAOK,wBAAmB,MAAM,CAAA;AAEtC,MAAM,MAAA,OAAA,GAAU,MAAMC,+CAAwB,CAAA;AAAA,QAC5C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,OAAS,EAAA;AACX,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAGF,MAAM,MAAA,QAAA,GAAW,MAAMC,+CAAwB,CAAA;AAAA,QAC7C,EAAA;AAAA,QACA,MAAA;AAAA,QACA,IAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA,EAAQ,KAAK,OAAQ,CAAA;AAAA,OACtB,CAAA;AACD,MAAA,IAAI,QAAU,EAAA;AACZ,QAAA,eAAA,CAAgB,KAAK,SAAS,CAAA;AAC9B,QAAA;AAAA;AAMF,MAAM,MAAA,cAAA,GAAiB,MAAMC,iDAAyB,CAAA;AAAA,QACpD,EAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AACD,MAAA,IAAI,cAAgB,EAAA;AAClB,QAAA,IAAA,CAAK,QAAQ,MAAO,CAAA,IAAA;AAAA,UAClB,CAAkC,+BAAA,EAAA,SAAS,CAA0B,uBAAA,EAAA,cAAc,iBAAiB,WAAW,CAAA;AAAA,SACjH;AACA,QAAI,IAAA,IAAA,CAAK,OAAQ,CAAA,WAAA,IAAe,WAAa,EAAA;AAC3C,UAAA,MAAM,WAAwD,GAAA;AAAA,YAC5D,KAAO,EAAAC,iCAAA;AAAA,YACP,YAAc,EAAA;AAAA,cACZ,iBAAmB,EAAA,MAAA;AAAA,cACnB,SAAA;AAAA,cACA,cAAgB,EAAA,WAAA;AAAA,cAChB,mBAAqB,EAAA,cAAA;AAAA,cACrB,cAAgB,EAAAC,cAAA,CAAS,GAAI,EAAA,CAAE,KAAM;AAAA;AACvC,WACF;AACA,UAAA,MAAM,IAAK,CAAA,OAAA,CAAQ,WAAa,EAAA,OAAA,CAAQ,WAAW,CAAA;AAAA;AACrD;AACF;AAIF,IAAA,MAAM,EAAgC,CAAA,0BAA0B,CAE7D,CAAA,KAAA,CAAM,EAAE,iBAAmB,EAAA,OAAA,CAAQ,eAAgB,EAAC,CAEpD,CAAA,SAAA,CAAU,mBAAqB,EAAA,eAAe,EAC9C,MAAO,EAAA;AACV,IAAA,MAAM,EAAG,CAAA,WAAA;AAAA,MACP,0BAAA;AAAA,MACA,eAAA,CAAgB,IAAI,CAAc,SAAA,MAAA;AAAA,QAChC,mBAAmB,OAAQ,CAAA,eAAA;AAAA,QAC3B,iBAAmB,EAAA;AAAA,OACnB,CAAA,CAAA;AAAA,MACF;AAAA,KACF;AAAA;AAEJ;;;;"}
|
package/dist/index.cjs.js
CHANGED
|
@@ -8,16 +8,10 @@ var AnnotateScmSlugEntityProcessor = require('./processors/AnnotateScmSlugEntity
|
|
|
8
8
|
var BuiltinKindsEntityProcessor = require('./processors/BuiltinKindsEntityProcessor.cjs.js');
|
|
9
9
|
var CodeOwnersProcessor = require('./processors/CodeOwnersProcessor.cjs.js');
|
|
10
10
|
var FileReaderProcessor = require('./processors/FileReaderProcessor.cjs.js');
|
|
11
|
-
var LocationEntityProcessor = require('./processors/LocationEntityProcessor.cjs.js');
|
|
12
11
|
var PlaceholderProcessor = require('./processors/PlaceholderProcessor.cjs.js');
|
|
13
12
|
var UrlReaderProcessor = require('./processors/UrlReaderProcessor.cjs.js');
|
|
14
13
|
var transformLegacyPolicyToProcessor = require('./processors/transformLegacyPolicyToProcessor.cjs.js');
|
|
15
|
-
var refresh = require('./processing/refresh.cjs.js');
|
|
16
|
-
var DefaultCatalogCollator = require('./search/DefaultCatalogCollator.cjs.js');
|
|
17
|
-
var CatalogBuilder = require('./service/CatalogBuilder.cjs.js');
|
|
18
|
-
var deprecated = require('./deprecated.cjs.js');
|
|
19
14
|
var constants = require('./constants.cjs.js');
|
|
20
|
-
var parse = require('./util/parse.cjs.js');
|
|
21
15
|
|
|
22
16
|
|
|
23
17
|
|
|
@@ -27,19 +21,9 @@ exports.AnnotateScmSlugEntityProcessor = AnnotateScmSlugEntityProcessor.Annotate
|
|
|
27
21
|
exports.BuiltinKindsEntityProcessor = BuiltinKindsEntityProcessor.BuiltinKindsEntityProcessor;
|
|
28
22
|
exports.CodeOwnersProcessor = CodeOwnersProcessor.CodeOwnersProcessor;
|
|
29
23
|
exports.FileReaderProcessor = FileReaderProcessor.FileReaderProcessor;
|
|
30
|
-
exports.LocationEntityProcessor = LocationEntityProcessor.LocationEntityProcessor;
|
|
31
24
|
exports.PlaceholderProcessor = PlaceholderProcessor.PlaceholderProcessor;
|
|
32
25
|
exports.UrlReaderProcessor = UrlReaderProcessor.UrlReaderProcessor;
|
|
33
26
|
exports.transformLegacyPolicyToProcessor = transformLegacyPolicyToProcessor.transformLegacyPolicyToProcessor;
|
|
34
|
-
exports.createRandomProcessingInterval = refresh.createRandomProcessingInterval;
|
|
35
|
-
exports.DefaultCatalogCollator = DefaultCatalogCollator.DefaultCatalogCollator;
|
|
36
|
-
exports.CatalogBuilder = CatalogBuilder.CatalogBuilder;
|
|
37
|
-
exports.DefaultCatalogCollatorFactory = deprecated.DefaultCatalogCollatorFactory;
|
|
38
|
-
exports.defaultCatalogCollatorEntityTransformer = deprecated.defaultCatalogCollatorEntityTransformer;
|
|
39
|
-
exports.locationSpecToLocationEntity = deprecated.locationSpecToLocationEntity;
|
|
40
|
-
exports.locationSpecToMetadataName = deprecated.locationSpecToMetadataName;
|
|
41
|
-
exports.processingResult = deprecated.processingResult;
|
|
42
27
|
exports.CATALOG_CONFLICTS_TOPIC = constants.CATALOG_CONFLICTS_TOPIC;
|
|
43
28
|
exports.CATALOG_ERRORS_TOPIC = constants.CATALOG_ERRORS_TOPIC;
|
|
44
|
-
exports.parseEntityYaml = parse.parseEntityYaml;
|
|
45
29
|
//# sourceMappingURL=index.cjs.js.map
|
package/dist/index.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;"}
|