@backstage/plugin-catalog-backend-module-incremental-ingestion 0.5.5-next.1 → 0.5.5-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/alpha/package.json +1 -1
- package/dist/alpha.cjs.js +3 -129
- package/dist/alpha.cjs.js.map +1 -1
- package/dist/database/IncrementalIngestionDatabaseManager.cjs.js +487 -0
- package/dist/database/IncrementalIngestionDatabaseManager.cjs.js.map +1 -0
- package/dist/database/migrations.cjs.js +18 -0
- package/dist/database/migrations.cjs.js.map +1 -0
- package/dist/database/tables.cjs.js +6 -0
- package/dist/database/tables.cjs.js.map +1 -0
- package/dist/engine/IncrementalIngestionEngine.cjs.js +327 -0
- package/dist/engine/IncrementalIngestionEngine.cjs.js.map +1 -0
- package/dist/index.cjs.js +3 -83
- package/dist/index.cjs.js.map +1 -1
- package/dist/module/WrapperProviders.cjs.js +83 -0
- package/dist/module/WrapperProviders.cjs.js.map +1 -0
- package/dist/module/catalogModuleIncrementalIngestionEntityProvider.cjs.js +56 -0
- package/dist/module/catalogModuleIncrementalIngestionEntityProvider.cjs.js.map +1 -0
- package/dist/router/routes.cjs.js +218 -0
- package/dist/router/routes.cjs.js.map +1 -0
- package/dist/service/IncrementalCatalogBuilder.cjs.js +84 -0
- package/dist/service/IncrementalCatalogBuilder.cjs.js.map +1 -0
- package/dist/util.cjs.js +28 -0
- package/dist/util.cjs.js.map +1 -0
- package/package.json +8 -8
- package/dist/cjs/util-B3Qn9g5Y.cjs.js +0 -1063
- package/dist/cjs/util-B3Qn9g5Y.cjs.js.map +0 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# @backstage/plugin-catalog-backend-module-incremental-ingestion
|
|
2
2
|
|
|
3
|
+
## 0.5.5-next.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies
|
|
8
|
+
- @backstage/plugin-catalog-node@1.13.1-next.1
|
|
9
|
+
- @backstage/plugin-catalog-backend@1.26.2-next.2
|
|
10
|
+
- @backstage/backend-plugin-api@1.0.1-next.1
|
|
11
|
+
- @backstage/catalog-model@1.7.0
|
|
12
|
+
- @backstage/config@1.2.0
|
|
13
|
+
- @backstage/errors@1.2.4
|
|
14
|
+
- @backstage/plugin-events-node@0.4.1-next.1
|
|
15
|
+
- @backstage/plugin-permission-common@0.8.1
|
|
16
|
+
|
|
3
17
|
## 0.5.5-next.1
|
|
4
18
|
|
|
5
19
|
### Patch Changes
|
package/alpha/package.json
CHANGED
package/dist/alpha.cjs.js
CHANGED
|
@@ -2,136 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
4
|
|
|
5
|
-
var
|
|
6
|
-
var alpha = require('@backstage/plugin-catalog-node/alpha');
|
|
7
|
-
var errors = require('@backstage/errors');
|
|
8
|
-
var luxon = require('luxon');
|
|
9
|
-
var util = require('./cjs/util-B3Qn9g5Y.cjs.js');
|
|
10
|
-
require('perf_hooks');
|
|
11
|
-
require('uuid');
|
|
12
|
-
require('@backstage/catalog-model');
|
|
13
|
-
require('@backstage/backend-common');
|
|
14
|
-
require('express');
|
|
15
|
-
require('express-promise-router');
|
|
5
|
+
var catalogModuleIncrementalIngestionEntityProvider = require('./module/catalogModuleIncrementalIngestionEntityProvider.cjs.js');
|
|
16
6
|
|
|
17
|
-
class WrapperProviders {
|
|
18
|
-
constructor(options) {
|
|
19
|
-
this.options = options;
|
|
20
|
-
}
|
|
21
|
-
migrate;
|
|
22
|
-
numberOfProvidersToConnect = 0;
|
|
23
|
-
readySignal = new util.Deferred();
|
|
24
|
-
wrap(provider, options) {
|
|
25
|
-
this.numberOfProvidersToConnect += 1;
|
|
26
|
-
return {
|
|
27
|
-
getProviderName: () => provider.getProviderName(),
|
|
28
|
-
connect: async (connection) => {
|
|
29
|
-
await this.startProvider(provider, options, connection);
|
|
30
|
-
this.numberOfProvidersToConnect -= 1;
|
|
31
|
-
if (this.numberOfProvidersToConnect === 0) {
|
|
32
|
-
this.readySignal.resolve();
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
};
|
|
36
|
-
}
|
|
37
|
-
async adminRouter() {
|
|
38
|
-
return await new util.IncrementalProviderRouter(
|
|
39
|
-
new util.IncrementalIngestionDatabaseManager({ client: this.options.client }),
|
|
40
|
-
this.options.logger
|
|
41
|
-
).createRouter();
|
|
42
|
-
}
|
|
43
|
-
async startProvider(provider, providerOptions, connection) {
|
|
44
|
-
const logger = this.options.logger.child({
|
|
45
|
-
entityProvider: provider.getProviderName()
|
|
46
|
-
});
|
|
47
|
-
try {
|
|
48
|
-
if (!this.migrate) {
|
|
49
|
-
this.migrate = Promise.resolve().then(async () => {
|
|
50
|
-
const apply = this.options.applyDatabaseMigrations ?? util.applyDatabaseMigrations;
|
|
51
|
-
await apply(this.options.client);
|
|
52
|
-
});
|
|
53
|
-
}
|
|
54
|
-
await this.migrate;
|
|
55
|
-
const { burstInterval, burstLength, restLength } = providerOptions;
|
|
56
|
-
logger.info(`Connecting`);
|
|
57
|
-
const manager = new util.IncrementalIngestionDatabaseManager({
|
|
58
|
-
client: this.options.client
|
|
59
|
-
});
|
|
60
|
-
const engine = new util.IncrementalIngestionEngine({
|
|
61
|
-
...providerOptions,
|
|
62
|
-
ready: this.readySignal,
|
|
63
|
-
manager,
|
|
64
|
-
logger,
|
|
65
|
-
provider,
|
|
66
|
-
restLength,
|
|
67
|
-
connection
|
|
68
|
-
});
|
|
69
|
-
const frequency = luxon.Duration.isDuration(burstInterval) ? burstInterval : luxon.Duration.fromObject(burstInterval);
|
|
70
|
-
const length = luxon.Duration.isDuration(burstLength) ? burstLength : luxon.Duration.fromObject(burstLength);
|
|
71
|
-
await this.options.scheduler.scheduleTask({
|
|
72
|
-
id: provider.getProviderName(),
|
|
73
|
-
fn: engine.taskFn.bind(engine),
|
|
74
|
-
frequency,
|
|
75
|
-
timeout: length
|
|
76
|
-
});
|
|
77
|
-
} catch (error) {
|
|
78
|
-
logger.warn(
|
|
79
|
-
`Failed to initialize incremental ingestion provider ${provider.getProviderName()}, ${errors.stringifyError(
|
|
80
|
-
error
|
|
81
|
-
)}`
|
|
82
|
-
);
|
|
83
|
-
throw error;
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
7
|
|
|
88
|
-
const incrementalIngestionProvidersExtensionPoint = backendPluginApi.createExtensionPoint({
|
|
89
|
-
id: "catalog.incrementalIngestionProvider.providers"
|
|
90
|
-
});
|
|
91
|
-
const catalogModuleIncrementalIngestionEntityProvider = backendPluginApi.createBackendModule({
|
|
92
|
-
pluginId: "catalog",
|
|
93
|
-
moduleId: "incremental-ingestion-entity-provider",
|
|
94
|
-
register(env) {
|
|
95
|
-
const addedProviders = new Array();
|
|
96
|
-
env.registerExtensionPoint(incrementalIngestionProvidersExtensionPoint, {
|
|
97
|
-
addProvider({ options, provider }) {
|
|
98
|
-
addedProviders.push({ options, provider });
|
|
99
|
-
}
|
|
100
|
-
});
|
|
101
|
-
env.registerInit({
|
|
102
|
-
deps: {
|
|
103
|
-
catalog: alpha.catalogProcessingExtensionPoint,
|
|
104
|
-
config: backendPluginApi.coreServices.rootConfig,
|
|
105
|
-
database: backendPluginApi.coreServices.database,
|
|
106
|
-
httpRouter: backendPluginApi.coreServices.httpRouter,
|
|
107
|
-
logger: backendPluginApi.coreServices.logger,
|
|
108
|
-
scheduler: backendPluginApi.coreServices.scheduler
|
|
109
|
-
},
|
|
110
|
-
async init({
|
|
111
|
-
catalog,
|
|
112
|
-
config,
|
|
113
|
-
database,
|
|
114
|
-
httpRouter,
|
|
115
|
-
logger,
|
|
116
|
-
scheduler
|
|
117
|
-
}) {
|
|
118
|
-
const client = await database.getClient();
|
|
119
|
-
const providers = new WrapperProviders({
|
|
120
|
-
config,
|
|
121
|
-
logger,
|
|
122
|
-
client,
|
|
123
|
-
scheduler
|
|
124
|
-
});
|
|
125
|
-
for (const entry of addedProviders) {
|
|
126
|
-
const wrapped = providers.wrap(entry.provider, entry.options);
|
|
127
|
-
catalog.addEntityProvider(wrapped);
|
|
128
|
-
}
|
|
129
|
-
httpRouter.use(await providers.adminRouter());
|
|
130
|
-
}
|
|
131
|
-
});
|
|
132
|
-
}
|
|
133
|
-
});
|
|
134
8
|
|
|
135
|
-
exports.default = catalogModuleIncrementalIngestionEntityProvider;
|
|
136
|
-
exports.incrementalIngestionProvidersExtensionPoint = incrementalIngestionProvidersExtensionPoint;
|
|
9
|
+
exports.default = catalogModuleIncrementalIngestionEntityProvider.catalogModuleIncrementalIngestionEntityProvider;
|
|
10
|
+
exports.incrementalIngestionProvidersExtensionPoint = catalogModuleIncrementalIngestionEntityProvider.incrementalIngestionProvidersExtensionPoint;
|
|
137
11
|
//# sourceMappingURL=alpha.cjs.js.map
|
package/dist/alpha.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"alpha.cjs.js","sources":["../src/module/WrapperProviders.ts","../src/module/catalogModuleIncrementalIngestionEntityProvider.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootConfigService,\n SchedulerService,\n} from '@backstage/backend-plugin-api';\nimport { stringifyError } from '@backstage/errors';\nimport {\n EntityProvider,\n EntityProviderConnection,\n} from '@backstage/plugin-catalog-node';\nimport express from 'express';\nimport { Knex } from 'knex';\nimport { Duration } from 'luxon';\nimport { IncrementalIngestionDatabaseManager } from '../database/IncrementalIngestionDatabaseManager';\nimport { applyDatabaseMigrations } from '../database/migrations';\nimport { IncrementalIngestionEngine } from '../engine/IncrementalIngestionEngine';\nimport { IncrementalProviderRouter } from '../router/routes';\nimport {\n IncrementalEntityProvider,\n IncrementalEntityProviderOptions,\n} from '../types';\nimport { Deferred } from '../util';\n\n/**\n * Helps in the creation of the catalog entity providers that wrap the\n * incremental ones.\n */\nexport class WrapperProviders {\n private migrate: Promise<void> | undefined;\n private numberOfProvidersToConnect = 0;\n private readonly readySignal = new Deferred<void>();\n\n constructor(\n private readonly options: {\n config: RootConfigService;\n logger: LoggerService;\n client: Knex;\n scheduler: SchedulerService;\n applyDatabaseMigrations?: typeof applyDatabaseMigrations;\n },\n ) {}\n\n wrap(\n provider: IncrementalEntityProvider<unknown, unknown>,\n options: IncrementalEntityProviderOptions,\n ): EntityProvider {\n this.numberOfProvidersToConnect += 1;\n return {\n getProviderName: () => provider.getProviderName(),\n connect: async connection => {\n await this.startProvider(provider, options, connection);\n this.numberOfProvidersToConnect -= 1;\n if (this.numberOfProvidersToConnect === 0) {\n this.readySignal.resolve();\n }\n },\n };\n }\n\n async adminRouter(): Promise<express.Router> {\n return await new IncrementalProviderRouter(\n new IncrementalIngestionDatabaseManager({ client: this.options.client }),\n this.options.logger,\n ).createRouter();\n }\n\n private async startProvider(\n provider: IncrementalEntityProvider<unknown, unknown>,\n providerOptions: IncrementalEntityProviderOptions,\n connection: EntityProviderConnection,\n ) {\n const logger = this.options.logger.child({\n entityProvider: provider.getProviderName(),\n });\n\n try {\n if (!this.migrate) {\n this.migrate = Promise.resolve().then(async () => {\n const apply =\n this.options.applyDatabaseMigrations ?? applyDatabaseMigrations;\n await apply(this.options.client);\n });\n }\n\n await this.migrate;\n\n const { burstInterval, burstLength, restLength } = providerOptions;\n\n logger.info(`Connecting`);\n\n const manager = new IncrementalIngestionDatabaseManager({\n client: this.options.client,\n });\n const engine = new IncrementalIngestionEngine({\n ...providerOptions,\n ready: this.readySignal,\n manager,\n logger,\n provider,\n restLength,\n connection,\n });\n\n const frequency = Duration.isDuration(burstInterval)\n ? burstInterval\n : Duration.fromObject(burstInterval);\n const length = Duration.isDuration(burstLength)\n ? burstLength\n : Duration.fromObject(burstLength);\n\n await this.options.scheduler.scheduleTask({\n id: provider.getProviderName(),\n fn: engine.taskFn.bind(engine),\n frequency,\n timeout: length,\n });\n } catch (error) {\n logger.warn(\n `Failed to initialize incremental ingestion provider ${provider.getProviderName()}, ${stringifyError(\n error,\n )}`,\n );\n throw error;\n }\n }\n}\n","/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n coreServices,\n createBackendModule,\n createExtensionPoint,\n} from '@backstage/backend-plugin-api';\nimport { catalogProcessingExtensionPoint } from '@backstage/plugin-catalog-node/alpha';\nimport {\n IncrementalEntityProvider,\n IncrementalEntityProviderOptions,\n} from '@backstage/plugin-catalog-backend-module-incremental-ingestion';\nimport { WrapperProviders } from './WrapperProviders';\n\n/**\n * @alpha\n * Interface for {@link incrementalIngestionProvidersExtensionPoint}.\n */\nexport interface IncrementalIngestionProviderExtensionPoint {\n /** Adds a new incremental entity provider */\n addProvider<TCursor, TContext>(config: {\n options: IncrementalEntityProviderOptions;\n provider: IncrementalEntityProvider<TCursor, TContext>;\n }): void;\n}\n\n/**\n * @alpha\n *\n * Extension point for registering incremental ingestion providers.\n * The `catalogModuleIncrementalIngestionEntityProvider` must be installed for these providers to work.\n *\n * @example\n *\n * ```ts\n * backend.add(createBackendModule({\n * pluginId: 'catalog',\n * moduleId: 'my-incremental-provider',\n * register(env) {\n * env.registerInit({\n * deps: {\n * extension: incrementalIngestionProvidersExtensionPoint,\n * },\n * async init({ extension }) {\n * extension.addProvider({\n * burstInterval: ...,\n * burstLength: ...,\n * restLength: ...,\n * }, {\n * next(context, cursor) {\n * ...\n * },\n * ...\n * })\n * })\n * })\n * }\n * }))\n * ```\n */\nexport const incrementalIngestionProvidersExtensionPoint =\n createExtensionPoint<IncrementalIngestionProviderExtensionPoint>({\n id: 'catalog.incrementalIngestionProvider.providers',\n });\n\n/**\n * Registers the incremental entity provider with the catalog processing extension point.\n *\n * @alpha\n */\nexport const catalogModuleIncrementalIngestionEntityProvider =\n createBackendModule({\n pluginId: 'catalog',\n moduleId: 'incremental-ingestion-entity-provider',\n register(env) {\n const addedProviders = new Array<{\n provider: IncrementalEntityProvider<unknown, unknown>;\n options: IncrementalEntityProviderOptions;\n }>();\n\n env.registerExtensionPoint(incrementalIngestionProvidersExtensionPoint, {\n addProvider({ options, provider }) {\n addedProviders.push({ options, provider });\n },\n });\n\n env.registerInit({\n deps: {\n catalog: catalogProcessingExtensionPoint,\n config: coreServices.rootConfig,\n database: coreServices.database,\n httpRouter: coreServices.httpRouter,\n logger: coreServices.logger,\n scheduler: coreServices.scheduler,\n },\n async init({\n catalog,\n config,\n database,\n httpRouter,\n logger,\n scheduler,\n }) {\n const client = await database.getClient();\n\n const providers = new WrapperProviders({\n config,\n logger,\n client,\n scheduler,\n });\n\n for (const entry of addedProviders) {\n const wrapped = providers.wrap(entry.provider, entry.options);\n catalog.addEntityProvider(wrapped);\n }\n\n httpRouter.use(await providers.adminRouter());\n },\n });\n },\n });\n"],"names":["Deferred","IncrementalProviderRouter","IncrementalIngestionDatabaseManager","applyDatabaseMigrations","IncrementalIngestionEngine","Duration","stringifyError","createExtensionPoint","createBackendModule","catalogProcessingExtensionPoint","coreServices"],"mappings":";;;;;;;;;;;;;;;;AA2CO,MAAM,gBAAiB,CAAA;AAAA,EAK5B,YACmB,OAOjB,EAAA;AAPiB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA,CAAA;AAAA,GAOhB;AAAA,EAZK,OAAA,CAAA;AAAA,EACA,0BAA6B,GAAA,CAAA,CAAA;AAAA,EACpB,WAAA,GAAc,IAAIA,aAAe,EAAA,CAAA;AAAA,EAYlD,IAAA,CACE,UACA,OACgB,EAAA;AAChB,IAAA,IAAA,CAAK,0BAA8B,IAAA,CAAA,CAAA;AACnC,IAAO,OAAA;AAAA,MACL,eAAA,EAAiB,MAAM,QAAA,CAAS,eAAgB,EAAA;AAAA,MAChD,OAAA,EAAS,OAAM,UAAc,KAAA;AAC3B,QAAA,MAAM,IAAK,CAAA,aAAA,CAAc,QAAU,EAAA,OAAA,EAAS,UAAU,CAAA,CAAA;AACtD,QAAA,IAAA,CAAK,0BAA8B,IAAA,CAAA,CAAA;AACnC,QAAI,IAAA,IAAA,CAAK,+BAA+B,CAAG,EAAA;AACzC,UAAA,IAAA,CAAK,YAAY,OAAQ,EAAA,CAAA;AAAA,SAC3B;AAAA,OACF;AAAA,KACF,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,WAAuC,GAAA;AAC3C,IAAA,OAAO,MAAM,IAAIC,8BAAA;AAAA,MACf,IAAIC,wCAAoC,CAAA,EAAE,QAAQ,IAAK,CAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,MACvE,KAAK,OAAQ,CAAA,MAAA;AAAA,MACb,YAAa,EAAA,CAAA;AAAA,GACjB;AAAA,EAEA,MAAc,aAAA,CACZ,QACA,EAAA,eAAA,EACA,UACA,EAAA;AACA,IAAA,MAAM,MAAS,GAAA,IAAA,CAAK,OAAQ,CAAA,MAAA,CAAO,KAAM,CAAA;AAAA,MACvC,cAAA,EAAgB,SAAS,eAAgB,EAAA;AAAA,KAC1C,CAAA,CAAA;AAED,IAAI,IAAA;AACF,MAAI,IAAA,CAAC,KAAK,OAAS,EAAA;AACjB,QAAA,IAAA,CAAK,OAAU,GAAA,OAAA,CAAQ,OAAQ,EAAA,CAAE,KAAK,YAAY;AAChD,UAAM,MAAA,KAAA,GACJ,IAAK,CAAA,OAAA,CAAQ,uBAA2B,IAAAC,4BAAA,CAAA;AAC1C,UAAM,MAAA,KAAA,CAAM,IAAK,CAAA,OAAA,CAAQ,MAAM,CAAA,CAAA;AAAA,SAChC,CAAA,CAAA;AAAA,OACH;AAEA,MAAA,MAAM,IAAK,CAAA,OAAA,CAAA;AAEX,MAAA,MAAM,EAAE,aAAA,EAAe,WAAa,EAAA,UAAA,EAAe,GAAA,eAAA,CAAA;AAEnD,MAAA,MAAA,CAAO,KAAK,CAAY,UAAA,CAAA,CAAA,CAAA;AAExB,MAAM,MAAA,OAAA,GAAU,IAAID,wCAAoC,CAAA;AAAA,QACtD,MAAA,EAAQ,KAAK,OAAQ,CAAA,MAAA;AAAA,OACtB,CAAA,CAAA;AACD,MAAM,MAAA,MAAA,GAAS,IAAIE,+BAA2B,CAAA;AAAA,QAC5C,GAAG,eAAA;AAAA,QACH,OAAO,IAAK,CAAA,WAAA;AAAA,QACZ,OAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,UAAA;AAAA,QACA,UAAA;AAAA,OACD,CAAA,CAAA;AAED,MAAM,MAAA,SAAA,GAAYC,eAAS,UAAW,CAAA,aAAa,IAC/C,aACA,GAAAA,cAAA,CAAS,WAAW,aAAa,CAAA,CAAA;AACrC,MAAM,MAAA,MAAA,GAASA,eAAS,UAAW,CAAA,WAAW,IAC1C,WACA,GAAAA,cAAA,CAAS,WAAW,WAAW,CAAA,CAAA;AAEnC,MAAM,MAAA,IAAA,CAAK,OAAQ,CAAA,SAAA,CAAU,YAAa,CAAA;AAAA,QACxC,EAAA,EAAI,SAAS,eAAgB,EAAA;AAAA,QAC7B,EAAI,EAAA,MAAA,CAAO,MAAO,CAAA,IAAA,CAAK,MAAM,CAAA;AAAA,QAC7B,SAAA;AAAA,QACA,OAAS,EAAA,MAAA;AAAA,OACV,CAAA,CAAA;AAAA,aACM,KAAO,EAAA;AACd,MAAO,MAAA,CAAA,IAAA;AAAA,QACL,CAAuD,oDAAA,EAAA,QAAA,CAAS,eAAgB,EAAC,CAAK,EAAA,EAAAC,qBAAA;AAAA,UACpF,KAAA;AAAA,SACD,CAAA,CAAA;AAAA,OACH,CAAA;AACA,MAAM,MAAA,KAAA,CAAA;AAAA,KACR;AAAA,GACF;AACF;;ACnEO,MAAM,8CACXC,qCAAiE,CAAA;AAAA,EAC/D,EAAI,EAAA,gDAAA;AACN,CAAC,EAAA;AAOI,MAAM,kDACXC,oCAAoB,CAAA;AAAA,EAClB,QAAU,EAAA,SAAA;AAAA,EACV,QAAU,EAAA,uCAAA;AAAA,EACV,SAAS,GAAK,EAAA;AACZ,IAAM,MAAA,cAAA,GAAiB,IAAI,KAGxB,EAAA,CAAA;AAEH,IAAA,GAAA,CAAI,uBAAuB,2CAA6C,EAAA;AAAA,MACtE,WAAY,CAAA,EAAE,OAAS,EAAA,QAAA,EAAY,EAAA;AACjC,QAAA,cAAA,CAAe,IAAK,CAAA,EAAE,OAAS,EAAA,QAAA,EAAU,CAAA,CAAA;AAAA,OAC3C;AAAA,KACD,CAAA,CAAA;AAED,IAAA,GAAA,CAAI,YAAa,CAAA;AAAA,MACf,IAAM,EAAA;AAAA,QACJ,OAAS,EAAAC,qCAAA;AAAA,QACT,QAAQC,6BAAa,CAAA,UAAA;AAAA,QACrB,UAAUA,6BAAa,CAAA,QAAA;AAAA,QACvB,YAAYA,6BAAa,CAAA,UAAA;AAAA,QACzB,QAAQA,6BAAa,CAAA,MAAA;AAAA,QACrB,WAAWA,6BAAa,CAAA,SAAA;AAAA,OAC1B;AAAA,MACA,MAAM,IAAK,CAAA;AAAA,QACT,OAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,UAAA;AAAA,QACA,MAAA;AAAA,QACA,SAAA;AAAA,OACC,EAAA;AACD,QAAM,MAAA,MAAA,GAAS,MAAM,QAAA,CAAS,SAAU,EAAA,CAAA;AAExC,QAAM,MAAA,SAAA,GAAY,IAAI,gBAAiB,CAAA;AAAA,UACrC,MAAA;AAAA,UACA,MAAA;AAAA,UACA,MAAA;AAAA,UACA,SAAA;AAAA,SACD,CAAA,CAAA;AAED,QAAA,KAAA,MAAW,SAAS,cAAgB,EAAA;AAClC,UAAA,MAAM,UAAU,SAAU,CAAA,IAAA,CAAK,KAAM,CAAA,QAAA,EAAU,MAAM,OAAO,CAAA,CAAA;AAC5D,UAAA,OAAA,CAAQ,kBAAkB,OAAO,CAAA,CAAA;AAAA,SACnC;AAEA,QAAA,UAAA,CAAW,GAAI,CAAA,MAAM,SAAU,CAAA,WAAA,EAAa,CAAA,CAAA;AAAA,OAC9C;AAAA,KACD,CAAA,CAAA;AAAA,GACH;AACF,CAAC;;;;;"}
|
|
1
|
+
{"version":3,"file":"alpha.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;"}
|
|
@@ -0,0 +1,487 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var catalogModel = require('@backstage/catalog-model');
|
|
4
|
+
var uuid = require('uuid');
|
|
5
|
+
|
|
6
|
+
class IncrementalIngestionDatabaseManager {
|
|
7
|
+
client;
|
|
8
|
+
constructor(options) {
|
|
9
|
+
this.client = options.client;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Performs an update to the ingestion record with matching `id`.
|
|
13
|
+
* @param options - IngestionRecordUpdate
|
|
14
|
+
*/
|
|
15
|
+
async updateIngestionRecordById(options) {
|
|
16
|
+
await this.client.transaction(async (tx) => {
|
|
17
|
+
const { ingestionId, update } = options;
|
|
18
|
+
await tx("ingestions").where("id", ingestionId).update(update);
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Performs an update to the ingestion record with matching provider name. Will only update active records.
|
|
23
|
+
* @param provider - string
|
|
24
|
+
* @param update - Partial<IngestionUpsertIFace>
|
|
25
|
+
*/
|
|
26
|
+
async updateIngestionRecordByProvider(provider, update) {
|
|
27
|
+
await this.client.transaction(async (tx) => {
|
|
28
|
+
await tx("ingestions").where("provider_name", provider).andWhere("completion_ticket", "open").update(update);
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Performs an insert into the `ingestions` table with the supplied values.
|
|
33
|
+
* @param record - IngestionUpsertIFace
|
|
34
|
+
*/
|
|
35
|
+
async insertIngestionRecord(record) {
|
|
36
|
+
await this.client.transaction(async (tx) => {
|
|
37
|
+
await tx("ingestions").insert(record);
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
async deleteMarkEntities(tx, ids) {
|
|
41
|
+
const chunks = [];
|
|
42
|
+
for (let i = 0; i < ids.length; i += 100) {
|
|
43
|
+
const chunk = ids.slice(i, i + 100);
|
|
44
|
+
chunks.push(chunk);
|
|
45
|
+
}
|
|
46
|
+
let deleted = 0;
|
|
47
|
+
for (const chunk of chunks) {
|
|
48
|
+
const chunkDeleted = await tx("ingestion_mark_entities").delete().whereIn(
|
|
49
|
+
"id",
|
|
50
|
+
chunk.map((entry) => entry.id)
|
|
51
|
+
);
|
|
52
|
+
deleted += chunkDeleted;
|
|
53
|
+
}
|
|
54
|
+
return deleted;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Finds the current ingestion record for the named provider.
|
|
58
|
+
* @param provider - string
|
|
59
|
+
* @returns IngestionRecord | undefined
|
|
60
|
+
*/
|
|
61
|
+
async getCurrentIngestionRecord(provider) {
|
|
62
|
+
return await this.client.transaction(async (tx) => {
|
|
63
|
+
const record = await tx("ingestions").where("provider_name", provider).andWhere("completion_ticket", "open").first();
|
|
64
|
+
return record;
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Finds the last ingestion record for the named provider.
|
|
69
|
+
* @param provider - string
|
|
70
|
+
* @returns IngestionRecord | undefined
|
|
71
|
+
*/
|
|
72
|
+
async getPreviousIngestionRecord(provider) {
|
|
73
|
+
return await this.client.transaction(async (tx) => {
|
|
74
|
+
return await tx("ingestions").where("provider_name", provider).andWhereNot("completion_ticket", "open").first();
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Removes all entries from `ingestion_marks_entities`, `ingestion_marks`, and `ingestions`
|
|
79
|
+
* for prior ingestions that completed (i.e., have a `completion_ticket` value other than 'open').
|
|
80
|
+
* @param provider - string
|
|
81
|
+
* @returns A count of deletions for each record type.
|
|
82
|
+
*/
|
|
83
|
+
async clearFinishedIngestions(provider) {
|
|
84
|
+
return await this.client.transaction(async (tx) => {
|
|
85
|
+
const markEntitiesDeleted = await tx("ingestion_mark_entities").delete().whereIn(
|
|
86
|
+
"ingestion_mark_id",
|
|
87
|
+
tx("ingestion_marks").select("id").whereIn(
|
|
88
|
+
"ingestion_id",
|
|
89
|
+
tx("ingestions").select("id").where("provider_name", provider).andWhereNot("completion_ticket", "open")
|
|
90
|
+
)
|
|
91
|
+
);
|
|
92
|
+
const marksDeleted = await tx("ingestion_marks").delete().whereIn(
|
|
93
|
+
"ingestion_id",
|
|
94
|
+
tx("ingestions").select("id").where("provider_name", provider).andWhereNot("completion_ticket", "open")
|
|
95
|
+
);
|
|
96
|
+
const ingestionsDeleted = await tx("ingestions").delete().where("provider_name", provider).andWhereNot("completion_ticket", "open");
|
|
97
|
+
return {
|
|
98
|
+
deletions: {
|
|
99
|
+
markEntitiesDeleted,
|
|
100
|
+
marksDeleted,
|
|
101
|
+
ingestionsDeleted
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Automatically cleans up duplicate ingestion records if they were accidentally created.
|
|
108
|
+
* Any ingestion record where the `rest_completed_at` is null (meaning it is active) AND
|
|
109
|
+
* the ingestionId is incorrect is a duplicate ingestion record.
|
|
110
|
+
* @param ingestionId - string
|
|
111
|
+
* @param provider - string
|
|
112
|
+
*/
|
|
113
|
+
async clearDuplicateIngestions(ingestionId, provider) {
|
|
114
|
+
await this.client.transaction(async (tx) => {
|
|
115
|
+
const invalid = await tx("ingestions").where("provider_name", provider).andWhere("rest_completed_at", null).andWhereNot("id", ingestionId);
|
|
116
|
+
if (invalid.length > 0) {
|
|
117
|
+
await tx("ingestions").delete().whereIn("id", invalid);
|
|
118
|
+
await tx("ingestion_mark_entities").delete().whereIn(
|
|
119
|
+
"ingestion_mark_id",
|
|
120
|
+
tx("ingestion_marks").select("id").whereIn("ingestion_id", invalid)
|
|
121
|
+
);
|
|
122
|
+
await tx("ingestion_marks").delete().whereIn("ingestion_id", invalid);
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* This method fully purges and resets all ingestion records for the named provider, and
|
|
128
|
+
* leaves it in a paused state.
|
|
129
|
+
* @param provider - string
|
|
130
|
+
* @returns Counts of all deleted ingestion records
|
|
131
|
+
*/
|
|
132
|
+
async purgeAndResetProvider(provider) {
|
|
133
|
+
return await this.client.transaction(async (tx) => {
|
|
134
|
+
const ingestionIDs = await tx("ingestions").select("id").where("provider_name", provider);
|
|
135
|
+
const markIDs = ingestionIDs.length > 0 ? await tx("ingestion_marks").select("id").whereIn(
|
|
136
|
+
"ingestion_id",
|
|
137
|
+
ingestionIDs.map((entry) => entry.id)
|
|
138
|
+
) : [];
|
|
139
|
+
const markEntityIDs = markIDs.length > 0 ? await tx("ingestion_mark_entities").select("id").whereIn(
|
|
140
|
+
"ingestion_mark_id",
|
|
141
|
+
markIDs.map((entry) => entry.id)
|
|
142
|
+
) : [];
|
|
143
|
+
const markEntitiesDeleted = await this.deleteMarkEntities(
|
|
144
|
+
tx,
|
|
145
|
+
markEntityIDs
|
|
146
|
+
);
|
|
147
|
+
const marksDeleted = markIDs.length > 0 ? await tx("ingestion_marks").delete().whereIn(
|
|
148
|
+
"ingestion_id",
|
|
149
|
+
ingestionIDs.map((entry) => entry.id)
|
|
150
|
+
) : 0;
|
|
151
|
+
const ingestionsDeleted = await tx("ingestions").delete().where("provider_name", provider);
|
|
152
|
+
const next_action_at = /* @__PURE__ */ new Date();
|
|
153
|
+
next_action_at.setTime(next_action_at.getTime() + 24 * 60 * 60 * 1e3);
|
|
154
|
+
await this.insertIngestionRecord({
|
|
155
|
+
id: uuid.v4(),
|
|
156
|
+
next_action: "rest",
|
|
157
|
+
provider_name: provider,
|
|
158
|
+
next_action_at,
|
|
159
|
+
ingestion_completed_at: /* @__PURE__ */ new Date(),
|
|
160
|
+
status: "resting",
|
|
161
|
+
completion_ticket: "open"
|
|
162
|
+
});
|
|
163
|
+
return { provider, ingestionsDeleted, marksDeleted, markEntitiesDeleted };
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* This method is used to remove entity records from the ingestion_mark_entities
|
|
168
|
+
* table by their entity reference.
|
|
169
|
+
*/
|
|
170
|
+
async deleteEntityRecordsByRef(entities) {
|
|
171
|
+
const refs = entities.map((e) => e.entityRef);
|
|
172
|
+
await this.client.transaction(async (tx) => {
|
|
173
|
+
await tx("ingestion_mark_entities").delete().whereIn("ref", refs);
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Creates a new ingestion record.
|
|
178
|
+
* @param provider - string
|
|
179
|
+
* @returns A new ingestion record
|
|
180
|
+
*/
|
|
181
|
+
async createProviderIngestionRecord(provider) {
|
|
182
|
+
const ingestionId = uuid.v4();
|
|
183
|
+
const nextAction = "ingest";
|
|
184
|
+
try {
|
|
185
|
+
await this.insertIngestionRecord({
|
|
186
|
+
id: ingestionId,
|
|
187
|
+
next_action: nextAction,
|
|
188
|
+
provider_name: provider,
|
|
189
|
+
status: "bursting",
|
|
190
|
+
completion_ticket: "open"
|
|
191
|
+
});
|
|
192
|
+
return { ingestionId, nextAction, attempts: 0, nextActionAt: Date.now() };
|
|
193
|
+
} catch (_e) {
|
|
194
|
+
return void 0;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Computes which entities to remove, if any, at the end of a burst.
|
|
199
|
+
* @param provider - string
|
|
200
|
+
* @param ingestionId - string
|
|
201
|
+
* @returns All entities to remove for this burst.
|
|
202
|
+
*/
|
|
203
|
+
async computeRemoved(provider, ingestionId) {
|
|
204
|
+
const previousIngestion = await this.getPreviousIngestionRecord(provider);
|
|
205
|
+
return await this.client.transaction(async (tx) => {
|
|
206
|
+
const count = await tx("ingestion_mark_entities").count({ total: "ingestion_mark_entities.ref" }).join(
|
|
207
|
+
"ingestion_marks",
|
|
208
|
+
"ingestion_marks.id",
|
|
209
|
+
"ingestion_mark_entities.ingestion_mark_id"
|
|
210
|
+
).join("ingestions", "ingestions.id", "ingestion_marks.ingestion_id").where("ingestions.id", ingestionId);
|
|
211
|
+
const total = count.reduce((acc, cur) => acc + cur.total, 0);
|
|
212
|
+
const removed = [];
|
|
213
|
+
if (previousIngestion) {
|
|
214
|
+
const stale = await tx("ingestion_mark_entities").select("ingestion_mark_entities.ref").join(
|
|
215
|
+
"ingestion_marks",
|
|
216
|
+
"ingestion_marks.id",
|
|
217
|
+
"ingestion_mark_entities.ingestion_mark_id"
|
|
218
|
+
).join("ingestions", "ingestions.id", "ingestion_marks.ingestion_id").where("ingestions.id", previousIngestion.id);
|
|
219
|
+
for (const entityRef of stale) {
|
|
220
|
+
removed.push({ entityRef: entityRef.ref });
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return { total, removed };
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* Performs a lookup of all providers that have duplicate active ingestion records.
|
|
228
|
+
* @returns An array of all duplicate active ingestions
|
|
229
|
+
*/
|
|
230
|
+
async healthcheck() {
|
|
231
|
+
return await this.client.transaction(async (tx) => {
|
|
232
|
+
const records = await tx(
|
|
233
|
+
"ingestions"
|
|
234
|
+
).distinct("id", "provider_name").where("rest_completed_at", null);
|
|
235
|
+
return records;
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Skips any wait time for the next action to run.
|
|
240
|
+
* @param provider - string
|
|
241
|
+
*/
|
|
242
|
+
async triggerNextProviderAction(provider) {
|
|
243
|
+
await this.updateIngestionRecordByProvider(provider, {
|
|
244
|
+
next_action_at: /* @__PURE__ */ new Date()
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Purges the following tables:
|
|
249
|
+
* * `ingestions`
|
|
250
|
+
* * `ingestion_marks`
|
|
251
|
+
* * `ingestion_mark_entities`
|
|
252
|
+
*
|
|
253
|
+
* This function leaves the ingestions table with all providers in a paused state.
|
|
254
|
+
* @returns Results from cleaning up all ingestion tables.
|
|
255
|
+
*/
|
|
256
|
+
async cleanupProviders() {
|
|
257
|
+
const providers = await this.listProviders();
|
|
258
|
+
const ingestionsDeleted = await this.purgeTable("ingestions");
|
|
259
|
+
const next_action_at = /* @__PURE__ */ new Date();
|
|
260
|
+
next_action_at.setTime(next_action_at.getTime() + 24 * 60 * 60 * 1e3);
|
|
261
|
+
for (const provider of providers) {
|
|
262
|
+
await this.insertIngestionRecord({
|
|
263
|
+
id: uuid.v4(),
|
|
264
|
+
next_action: "rest",
|
|
265
|
+
provider_name: provider,
|
|
266
|
+
next_action_at,
|
|
267
|
+
ingestion_completed_at: /* @__PURE__ */ new Date(),
|
|
268
|
+
status: "resting",
|
|
269
|
+
completion_ticket: "open"
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
const ingestionMarksDeleted = await this.purgeTable("ingestion_marks");
|
|
273
|
+
const markEntitiesDeleted = await this.purgeTable(
|
|
274
|
+
"ingestion_mark_entities"
|
|
275
|
+
);
|
|
276
|
+
return { ingestionsDeleted, ingestionMarksDeleted, markEntitiesDeleted };
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Configures the current ingestion record to ingest a burst.
|
|
280
|
+
* @param ingestionId - string
|
|
281
|
+
*/
|
|
282
|
+
async setProviderIngesting(ingestionId) {
|
|
283
|
+
await this.updateIngestionRecordById({
|
|
284
|
+
ingestionId,
|
|
285
|
+
update: { next_action: "ingest" }
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Indicates the provider is currently ingesting a burst.
|
|
290
|
+
* @param ingestionId - string
|
|
291
|
+
*/
|
|
292
|
+
async setProviderBursting(ingestionId) {
|
|
293
|
+
await this.updateIngestionRecordById({
|
|
294
|
+
ingestionId,
|
|
295
|
+
update: { status: "bursting" }
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
/**
|
|
299
|
+
* Finalizes the current ingestion record to indicate that the post-ingestion rest period is complete.
|
|
300
|
+
* @param ingestionId - string
|
|
301
|
+
*/
|
|
302
|
+
async setProviderComplete(ingestionId) {
|
|
303
|
+
await this.updateIngestionRecordById({
|
|
304
|
+
ingestionId,
|
|
305
|
+
update: {
|
|
306
|
+
next_action: "nothing (done)",
|
|
307
|
+
rest_completed_at: /* @__PURE__ */ new Date(),
|
|
308
|
+
status: "complete",
|
|
309
|
+
completion_ticket: uuid.v4()
|
|
310
|
+
}
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* Marks ingestion as complete and starts the post-ingestion rest cycle.
|
|
315
|
+
* @param ingestionId - string
|
|
316
|
+
* @param restLength - Duration
|
|
317
|
+
*/
|
|
318
|
+
async setProviderResting(ingestionId, restLength) {
|
|
319
|
+
await this.updateIngestionRecordById({
|
|
320
|
+
ingestionId,
|
|
321
|
+
update: {
|
|
322
|
+
next_action: "rest",
|
|
323
|
+
next_action_at: new Date(Date.now() + restLength.as("milliseconds")),
|
|
324
|
+
ingestion_completed_at: /* @__PURE__ */ new Date(),
|
|
325
|
+
status: "resting"
|
|
326
|
+
}
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Marks ingestion as paused after a burst completes.
|
|
331
|
+
* @param ingestionId - string
|
|
332
|
+
*/
|
|
333
|
+
async setProviderInterstitial(ingestionId) {
|
|
334
|
+
await this.updateIngestionRecordById({
|
|
335
|
+
ingestionId,
|
|
336
|
+
update: { attempts: 0, status: "interstitial" }
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Starts the cancel process for the current ingestion.
|
|
341
|
+
* @param ingestionId - string
|
|
342
|
+
* @param message - string (optional)
|
|
343
|
+
*/
|
|
344
|
+
async setProviderCanceling(ingestionId, message) {
|
|
345
|
+
const update = {
|
|
346
|
+
next_action: "cancel",
|
|
347
|
+
last_error: message ? message : void 0,
|
|
348
|
+
next_action_at: /* @__PURE__ */ new Date(),
|
|
349
|
+
status: "canceling"
|
|
350
|
+
};
|
|
351
|
+
await this.updateIngestionRecordById({ ingestionId, update });
|
|
352
|
+
}
|
|
353
|
+
/**
|
|
354
|
+
* Completes the cancel process and triggers a new ingestion.
|
|
355
|
+
* @param ingestionId - string
|
|
356
|
+
*/
|
|
357
|
+
async setProviderCanceled(ingestionId) {
|
|
358
|
+
await this.updateIngestionRecordById({
|
|
359
|
+
ingestionId,
|
|
360
|
+
update: {
|
|
361
|
+
next_action: "nothing (canceled)",
|
|
362
|
+
rest_completed_at: /* @__PURE__ */ new Date(),
|
|
363
|
+
status: "complete",
|
|
364
|
+
completion_ticket: uuid.v4()
|
|
365
|
+
}
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
/**
|
|
369
|
+
* Configures the current ingestion to wait and retry, due to a data source error.
|
|
370
|
+
* @param ingestionId - string
|
|
371
|
+
* @param attempts - number
|
|
372
|
+
* @param error - Error
|
|
373
|
+
* @param backoffLength - number
|
|
374
|
+
*/
|
|
375
|
+
async setProviderBackoff(ingestionId, attempts, error, backoffLength) {
|
|
376
|
+
await this.updateIngestionRecordById({
|
|
377
|
+
ingestionId,
|
|
378
|
+
update: {
|
|
379
|
+
next_action: "backoff",
|
|
380
|
+
attempts: attempts + 1,
|
|
381
|
+
last_error: String(error),
|
|
382
|
+
next_action_at: new Date(Date.now() + backoffLength),
|
|
383
|
+
status: "backing off"
|
|
384
|
+
}
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
/**
|
|
388
|
+
* Returns the last record from `ingestion_marks` for the supplied ingestionId.
|
|
389
|
+
* @param ingestionId - string
|
|
390
|
+
* @returns MarkRecord | undefined
|
|
391
|
+
*/
|
|
392
|
+
async getLastMark(ingestionId) {
|
|
393
|
+
return await this.client.transaction(async (tx) => {
|
|
394
|
+
const mark = await tx("ingestion_marks").where("ingestion_id", ingestionId).orderBy("sequence", "desc").first();
|
|
395
|
+
return this.#decodeMark(this.client, mark);
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
/**
|
|
399
|
+
* Returns the first record from `ingestion_marks` for the supplied ingestionId.
|
|
400
|
+
* @param ingestionId - string
|
|
401
|
+
* @returns MarkRecord | undefined
|
|
402
|
+
*/
|
|
403
|
+
async getFirstMark(ingestionId) {
|
|
404
|
+
return await this.client.transaction(async (tx) => {
|
|
405
|
+
const mark = await tx("ingestion_marks").where("ingestion_id", ingestionId).orderBy("sequence", "asc").first();
|
|
406
|
+
return this.#decodeMark(this.client, mark);
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
async getAllMarks(ingestionId) {
|
|
410
|
+
return await this.client.transaction(async (tx) => {
|
|
411
|
+
const marks = await tx("ingestion_marks").where("ingestion_id", ingestionId).orderBy("sequence", "desc");
|
|
412
|
+
return marks.map((m) => this.#decodeMark(this.client, m));
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Performs an insert into the `ingestion_marks` table with the supplied values.
|
|
417
|
+
* @param options - MarkRecordInsert
|
|
418
|
+
*/
|
|
419
|
+
async createMark(options) {
|
|
420
|
+
const { record } = options;
|
|
421
|
+
await this.client.transaction(async (tx) => {
|
|
422
|
+
await tx("ingestion_marks").insert(record);
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
// Handles the fact that sqlite does not support json columns; they just
|
|
426
|
+
// persist the stringified data instead
|
|
427
|
+
#decodeMark(knex, record) {
|
|
428
|
+
if (record && knex.client.config.client.includes("sqlite3")) {
|
|
429
|
+
return {
|
|
430
|
+
...record,
|
|
431
|
+
cursor: JSON.parse(record.cursor)
|
|
432
|
+
};
|
|
433
|
+
}
|
|
434
|
+
return record;
|
|
435
|
+
}
|
|
436
|
+
/**
|
|
437
|
+
* Performs an upsert to the `ingestion_mark_entities` table for all deferred entities.
|
|
438
|
+
* @param markId - string
|
|
439
|
+
* @param entities - DeferredEntity[]
|
|
440
|
+
*/
|
|
441
|
+
async createMarkEntities(markId, entities) {
|
|
442
|
+
const refs = entities.map((e) => catalogModel.stringifyEntityRef(e.entity));
|
|
443
|
+
await this.client.transaction(async (tx) => {
|
|
444
|
+
const existingRefsArray = (await tx("ingestion_mark_entities").select("ref").whereIn("ref", refs)).map((e) => e.ref);
|
|
445
|
+
const existingRefsSet = new Set(existingRefsArray);
|
|
446
|
+
const newRefs = refs.filter((e) => !existingRefsSet.has(e));
|
|
447
|
+
await tx("ingestion_mark_entities").update("ingestion_mark_id", markId).whereIn("ref", existingRefsArray);
|
|
448
|
+
if (newRefs.length > 0) {
|
|
449
|
+
await tx("ingestion_mark_entities").insert(
|
|
450
|
+
newRefs.map((ref) => ({
|
|
451
|
+
id: uuid.v4(),
|
|
452
|
+
ingestion_mark_id: markId,
|
|
453
|
+
ref
|
|
454
|
+
}))
|
|
455
|
+
);
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
}
|
|
459
|
+
/**
|
|
460
|
+
* Deletes the entire content of a table, and returns the number of records deleted.
|
|
461
|
+
* @param table - string
|
|
462
|
+
* @returns number
|
|
463
|
+
*/
|
|
464
|
+
async purgeTable(table) {
|
|
465
|
+
return await this.client.transaction(async (tx) => {
|
|
466
|
+
return await tx(table).delete();
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
/**
|
|
470
|
+
* Returns a list of all providers.
|
|
471
|
+
* @returns string[]
|
|
472
|
+
*/
|
|
473
|
+
async listProviders() {
|
|
474
|
+
return await this.client.transaction(async (tx) => {
|
|
475
|
+
const providers = await tx(
|
|
476
|
+
"ingestions"
|
|
477
|
+
).distinct("provider_name");
|
|
478
|
+
return providers.map((entry) => entry.provider_name);
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
async updateByName(provider, update) {
|
|
482
|
+
await this.updateIngestionRecordByProvider(provider, update);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
exports.IncrementalIngestionDatabaseManager = IncrementalIngestionDatabaseManager;
|
|
487
|
+
//# sourceMappingURL=IncrementalIngestionDatabaseManager.cjs.js.map
|