@backstage/plugin-catalog-backend 1.6.0-next.3 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,72 @@
1
1
  # @backstage/plugin-catalog-backend
2
2
 
3
+ ## 1.6.0
4
+
5
+ ### Minor Changes
6
+
7
+ - 16891a212c: Added new `POST /entities/by-refs` endpoint, which allows you to efficiently
8
+ batch-fetch entities by their entity ref. This can be useful e.g. in graphql
9
+ resolvers or similar contexts where you need to fetch many entities at the same
10
+ time.
11
+ - 273ba3a77f: Deprecated Prometheus metrics in favour of OpenTelemtry metrics.
12
+ - c395abb5b2: The catalog no longer stops after the first processor `validateEntityKind`
13
+ method returns `true` when validating entity kind shapes. Instead, it continues
14
+ through all registered processors that have this method, and requires that _at
15
+ least one_ of them returned true.
16
+
17
+ The old behavior of stopping early made it harder to extend existing core kinds
18
+ with additional fields, since the `BuiltinKindsEntityProcessor` is always
19
+ present at the top of the processing chain and ensures that your additional
20
+ validation code would never be run.
21
+
22
+ This is technically a breaking change, although it should not affect anybody
23
+ under normal circumstances, except if you had problematic validation code that
24
+ you were unaware that it was not being run. That code may now start to exhibit
25
+ those problems.
26
+
27
+ If you need to disable this new behavior, `CatalogBuilder` as used in your
28
+ `packages/backend/src/plugins/catalog.ts` file now has a
29
+ `useLegacySingleProcessorValidation()` method to go back to the old behavior.
30
+
31
+ ```diff
32
+ const builder = await CatalogBuilder.create(env);
33
+ +builder.useLegacySingleProcessorValidation();
34
+ ```
35
+
36
+ - 3072ebfdd7: The search table also holds the original entity value now and the facets endpoint fetches the filtered entity data from the search table.
37
+
38
+ ### Patch Changes
39
+
40
+ - ba13ff663c: Added a new `catalog.rules[].location` configuration that makes it possible to configure catalog rules to only apply to specific locations, either via exact match or a glob pattern.
41
+ - d8593ce0e6: Do not use deprecated `LocationSpec` from the `@backstage/plugin-catalog-node` package
42
+ - c507aee8a2: Ensured typescript type checks in migration files.
43
+ - 2a8e3cc0b5: Optimize `Stitcher` process to be more memory efficient
44
+ - 884d749b14: Refactored to use `coreServices` from `@backstage/backend-plugin-api`.
45
+ - eacc8e2b55: Make it possible for entity providers to supply only entity refs, instead of full entities, in `delta` mutation deletions.
46
+ - b05dcd5530: Move the `zod` dependency to a version that does not collide with other libraries
47
+ - 5b3e2afa45: Fixed deprecated use of `substr` into `substring`.
48
+ - 71147d5c16: Internal code reorganization.
49
+ - 93870e4df1: Track the last time the final entity changed with new timestamp "last updated at" data in final entities database, which gets updated with the time when final entity is updated.
50
+ - 20a5161f04: Adds MySQL support for the catalog-backend
51
+ - 3280711113: Updated dependency `msw` to `^0.49.0`.
52
+ - e982f77fe3: Registered shutdown hook in experimental catalog plugin.
53
+ - b3fac9c107: Ignore attempts at emitting the current entity as a child of itself.
54
+ - Updated dependencies
55
+ - @backstage/catalog-client@1.2.0
56
+ - @backstage/backend-common@0.17.0
57
+ - @backstage/plugin-catalog-node@1.3.0
58
+ - @backstage/plugin-permission-common@0.7.2
59
+ - @backstage/plugin-permission-node@0.7.2
60
+ - @backstage/errors@1.1.4
61
+ - @backstage/backend-plugin-api@0.2.0
62
+ - @backstage/integration@1.4.1
63
+ - @backstage/types@1.0.2
64
+ - @backstage/plugin-search-common@1.2.0
65
+ - @backstage/catalog-model@1.1.4
66
+ - @backstage/config@1.0.5
67
+ - @backstage/plugin-catalog-common@1.0.9
68
+ - @backstage/plugin-scaffolder-common@1.2.3
69
+
3
70
  ## 1.6.0-next.3
4
71
 
5
72
  ### Patch Changes
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-catalog-backend",
3
- "version": "1.6.0-next.3",
3
+ "version": "1.6.0",
4
4
  "main": "../dist/index.cjs.js",
5
5
  "types": "../dist/index.alpha.d.ts"
6
6
  }
package/dist/index.cjs.js CHANGED
@@ -28,6 +28,7 @@ var backendCommon = require('@backstage/backend-common');
28
28
  var luxon = require('luxon');
29
29
  var promClient = require('prom-client');
30
30
  var stableStringify = require('fast-json-stable-stringify');
31
+ var api = require('@opentelemetry/api');
31
32
  var express = require('express');
32
33
  var Router = require('express-promise-router');
33
34
  var yn = require('yn');
@@ -2008,54 +2009,104 @@ class DefaultCatalogProcessingEngine {
2008
2009
  }
2009
2010
  }
2010
2011
  function progressTracker() {
2011
- const stitchedEntities = createCounterMetric({
2012
+ const promStitchedEntities = createCounterMetric({
2012
2013
  name: "catalog_stitched_entities_count",
2013
- help: "Amount of entities stitched"
2014
+ help: "Amount of entities stitched. DEPRECATED, use OpenTelemetry metrics instead"
2014
2015
  });
2015
- const processedEntities = createCounterMetric({
2016
+ const promProcessedEntities = createCounterMetric({
2016
2017
  name: "catalog_processed_entities_count",
2017
- help: "Amount of entities processed",
2018
+ help: "Amount of entities processed, DEPRECATED, use OpenTelemetry metrics instead",
2018
2019
  labelNames: ["result"]
2019
2020
  });
2020
- const processingDuration = createSummaryMetric({
2021
+ const promProcessingDuration = createSummaryMetric({
2021
2022
  name: "catalog_processing_duration_seconds",
2022
- help: "Time spent executing the full processing flow",
2023
+ help: "Time spent executing the full processing flow, DEPRECATED, use OpenTelemetry metrics instead",
2023
2024
  labelNames: ["result"]
2024
2025
  });
2025
- const processorsDuration = createSummaryMetric({
2026
+ const promProcessorsDuration = createSummaryMetric({
2026
2027
  name: "catalog_processors_duration_seconds",
2027
- help: "Time spent executing catalog processors",
2028
+ help: "Time spent executing catalog processors, DEPRECATED, use OpenTelemetry metrics instead",
2028
2029
  labelNames: ["result"]
2029
2030
  });
2030
- const processingQueueDelay = createSummaryMetric({
2031
+ const promProcessingQueueDelay = createSummaryMetric({
2031
2032
  name: "catalog_processing_queue_delay_seconds",
2032
- help: "The amount of delay between being scheduled for processing, and the start of actually being processed"
2033
+ help: "The amount of delay between being scheduled for processing, and the start of actually being processed, DEPRECATED, use OpenTelemetry metrics instead"
2033
2034
  });
2035
+ const meter = api.metrics.getMeter("default");
2036
+ const stitchedEntities = meter.createCounter(
2037
+ "catalog.stitched.entities.count",
2038
+ {
2039
+ description: "Amount of entities stitched"
2040
+ }
2041
+ );
2042
+ const processedEntities = meter.createCounter(
2043
+ "catalog.processed.entities.count",
2044
+ { description: "Amount of entities processed" }
2045
+ );
2046
+ const processingDuration = meter.createHistogram(
2047
+ "catalog.processing.duration",
2048
+ {
2049
+ description: "Time spent executing the full processing flow",
2050
+ unit: "seconds"
2051
+ }
2052
+ );
2053
+ const processorsDuration = meter.createHistogram(
2054
+ "catalog.processors.duration",
2055
+ {
2056
+ description: "Time spent executing catalog processors",
2057
+ unit: "seconds"
2058
+ }
2059
+ );
2060
+ const processingQueueDelay = meter.createHistogram(
2061
+ "catalog.processing.queue.delay",
2062
+ {
2063
+ description: "The amount of delay between being scheduled for processing, and the start of actually being processed",
2064
+ unit: "seconds"
2065
+ }
2066
+ );
2034
2067
  function processStart(item, logger) {
2068
+ const startTime = process.hrtime();
2069
+ const endOverallTimer = promProcessingDuration.startTimer();
2070
+ const endProcessorsTimer = promProcessorsDuration.startTimer();
2035
2071
  logger.debug(`Processing ${item.entityRef}`);
2036
2072
  if (item.nextUpdateAt) {
2037
- processingQueueDelay.observe(-item.nextUpdateAt.diffNow().as("seconds"));
2073
+ const seconds = -item.nextUpdateAt.diffNow().as("seconds");
2074
+ promProcessingQueueDelay.observe(seconds);
2075
+ processingQueueDelay.record(seconds);
2076
+ }
2077
+ function endTime() {
2078
+ const delta = process.hrtime(startTime);
2079
+ return delta[0] + delta[1] / 1e9;
2038
2080
  }
2039
- const endOverallTimer = processingDuration.startTimer();
2040
- const endProcessorsTimer = processorsDuration.startTimer();
2041
2081
  function markProcessorsCompleted(result) {
2042
2082
  endProcessorsTimer({ result: result.ok ? "ok" : "failed" });
2083
+ processorsDuration.record(endTime(), {
2084
+ result: result.ok ? "ok" : "failed"
2085
+ });
2043
2086
  }
2044
2087
  function markSuccessfulWithNoChanges() {
2045
2088
  endOverallTimer({ result: "unchanged" });
2046
- processedEntities.inc({ result: "unchanged" }, 1);
2089
+ promProcessedEntities.inc({ result: "unchanged" }, 1);
2090
+ processingDuration.record(endTime(), { result: "unchanged" });
2091
+ processedEntities.add(1, { result: "unchanged" });
2047
2092
  }
2048
2093
  function markSuccessfulWithErrors() {
2049
2094
  endOverallTimer({ result: "errors" });
2050
- processedEntities.inc({ result: "errors" }, 1);
2095
+ promProcessedEntities.inc({ result: "errors" }, 1);
2096
+ processingDuration.record(endTime(), { result: "errors" });
2097
+ processedEntities.add(1, { result: "errors" });
2051
2098
  }
2052
2099
  function markSuccessfulWithChanges(stitchedCount) {
2053
2100
  endOverallTimer({ result: "changed" });
2054
- stitchedEntities.inc(stitchedCount);
2055
- processedEntities.inc({ result: "changed" }, 1);
2101
+ promStitchedEntities.inc(stitchedCount);
2102
+ promProcessedEntities.inc({ result: "changed" }, 1);
2103
+ processingDuration.record(endTime(), { result: "changed" });
2104
+ stitchedEntities.add(stitchedCount);
2105
+ processedEntities.add(1, { result: "changed" });
2056
2106
  }
2057
2107
  function markFailed(error) {
2058
- processedEntities.inc({ result: "failed" }, 1);
2108
+ promProcessedEntities.inc({ result: "failed" }, 1);
2109
+ processedEntities.add(1, { result: "failed" });
2059
2110
  logger.warn(`Processing of ${item.entityRef} failed`, error);
2060
2111
  }
2061
2112
  return {
@@ -2439,8 +2490,14 @@ class ProcessorOutputCollector {
2439
2490
  this.refreshKeys = new Array();
2440
2491
  this.done = false;
2441
2492
  }
2442
- get onEmit() {
2443
- return (i) => this.receive(i);
2493
+ generic() {
2494
+ return (i) => this.receive(this.logger, i);
2495
+ }
2496
+ forProcessor(processor) {
2497
+ const logger = this.logger.child({
2498
+ processor: processor.getProcessorName()
2499
+ });
2500
+ return (i) => this.receive(logger, i);
2444
2501
  }
2445
2502
  results() {
2446
2503
  this.done = true;
@@ -2451,9 +2508,9 @@ class ProcessorOutputCollector {
2451
2508
  deferredEntities: this.deferredEntities
2452
2509
  };
2453
2510
  }
2454
- receive(i) {
2511
+ receive(logger, i) {
2455
2512
  if (this.done) {
2456
- this.logger.warn(
2513
+ logger.warn(
2457
2514
  `Item of type "${i.type}" was emitted after processing had completed. Stack trace: ${new Error().stack}`
2458
2515
  );
2459
2516
  return;
@@ -2465,10 +2522,17 @@ class ProcessorOutputCollector {
2465
2522
  entity = validateEntityEnvelope(i.entity);
2466
2523
  } catch (e) {
2467
2524
  errors.assertError(e);
2468
- this.logger.debug(`Envelope validation failed at ${location}, ${e}`);
2525
+ logger.debug(`Envelope validation failed at ${location}, ${e}`);
2469
2526
  this.errors.push(e);
2470
2527
  return;
2471
2528
  }
2529
+ const entityRef = catalogModel.stringifyEntityRef(entity);
2530
+ if (entityRef === catalogModel.stringifyEntityRef(this.parentEntity)) {
2531
+ logger.warn(
2532
+ `Ignored emitted entity ${entityRef} whose ref was identical to the one being processed. This commonly indicates mistakenly emitting the input entity instead of returning it.`
2533
+ );
2534
+ return;
2535
+ }
2472
2536
  const annotations = entity.metadata.annotations || {};
2473
2537
  if (typeof annotations === "object" && !Array.isArray(annotations)) {
2474
2538
  const originLocation = getEntityOriginLocationRef(this.parentEntity);
@@ -2666,7 +2730,7 @@ class DefaultCatalogProcessingOrchestrator {
2666
2730
  res = await processor.preProcessEntity(
2667
2731
  res,
2668
2732
  context.location,
2669
- context.collector.onEmit,
2733
+ context.collector.forProcessor(processor),
2670
2734
  context.originLocation,
2671
2735
  context.cache.forProcessor(processor)
2672
2736
  );
@@ -2749,7 +2813,7 @@ class DefaultCatalogProcessingOrchestrator {
2749
2813
  }
2750
2814
  for (const maybeRelativeTarget of targets) {
2751
2815
  if (type === "file" && maybeRelativeTarget.endsWith(path__default["default"].sep)) {
2752
- context.collector.onEmit(
2816
+ context.collector.generic()(
2753
2817
  pluginCatalogNode.processingResult.inputError(
2754
2818
  context.location,
2755
2819
  `LocationEntityProcessor cannot handle ${type} type location with target ${context.location.target} that ends with a path separator`
@@ -2774,7 +2838,7 @@ class DefaultCatalogProcessingOrchestrator {
2774
2838
  presence
2775
2839
  },
2776
2840
  presence === "optional",
2777
- context.collector.onEmit,
2841
+ context.collector.forProcessor(processor),
2778
2842
  this.options.parser,
2779
2843
  context.cache.forProcessor(processor, target)
2780
2844
  );
@@ -2805,7 +2869,7 @@ class DefaultCatalogProcessingOrchestrator {
2805
2869
  res = await processor.postProcessEntity(
2806
2870
  res,
2807
2871
  context.location,
2808
- context.collector.onEmit,
2872
+ context.collector.forProcessor(processor),
2809
2873
  context.cache.forProcessor(processor)
2810
2874
  );
2811
2875
  } catch (e) {
@@ -3041,8 +3105,9 @@ class Stitcher {
3041
3105
  "final_entities"
3042
3106
  ).update({
3043
3107
  final_entity: JSON.stringify(entity),
3044
- hash
3045
- }).where("entity_id", entityId).where("stitch_ticket", ticket).onConflict("entity_id").merge(["final_entity", "hash"]);
3108
+ hash,
3109
+ last_updated_at: this.database.fn.now()
3110
+ }).where("entity_id", entityId).where("stitch_ticket", ticket).onConflict("entity_id").merge(["final_entity", "hash", "last_updated_at"]);
3046
3111
  if (amountOfRowsChanged === 0) {
3047
3112
  this.logger.debug(
3048
3113
  `Entity ${entityRef} is already processed, skipping write.`