@backstage/plugin-catalog-backend 3.5.1-next.0 → 3.5.1-next.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # @backstage/plugin-catalog-backend
2
2
 
3
+ ## 3.5.1-next.1
4
+
5
+ ### Patch Changes
6
+
7
+ - 2e5c5f8: Bumped `glob` dependency from v7/v8/v11 to v13 to address security vulnerabilities in older versions. Bumped `rollup` from v4.27 to v4.59+ to fix a high severity path traversal vulnerability (GHSA-mw96-cpmx-2vgc).
8
+ - 6884814: Improved catalog entity filter query performance by switching from `IN (subquery)` to `EXISTS (correlated subquery)` patterns. This enables PostgreSQL semi-join optimizations and fixes `NOT IN` NULL-semantics pitfalls by using `NOT EXISTS` instead.
9
+ - 9da73bf: Reduced search table write churn during stitching by syncing only changed rows instead of doing a full delete and re-insert. On Postgres this uses a single writable CTE, on MySQL a temporary table merge with deadlock retry, and on SQLite the previous bulk replace.
10
+ - Updated dependencies
11
+ - @backstage/backend-plugin-api@1.9.0-next.1
12
+ - @backstage/backend-openapi-utils@0.6.8-next.1
13
+ - @backstage/plugin-catalog-node@2.1.1-next.1
14
+ - @backstage/plugin-events-node@0.4.21-next.1
15
+ - @backstage/plugin-permission-node@0.10.12-next.1
16
+
3
17
  ## 3.5.1-next.0
4
18
 
5
19
  ### Patch Changes
@@ -4,6 +4,7 @@ var catalogClient = require('@backstage/catalog-client');
4
4
  var catalogModel = require('@backstage/catalog-model');
5
5
  var buildEntitySearch = require('./buildEntitySearch.cjs.js');
6
6
  var markDeferredStitchCompleted = require('./markDeferredStitchCompleted.cjs.js');
7
+ var syncSearchRows = require('./syncSearchRows.cjs.js');
7
8
  var util = require('./util.cjs.js');
8
9
  var backendPluginApi = require('@backstage/backend-plugin-api');
9
10
 
@@ -134,10 +135,7 @@ async function performStitching(options) {
134
135
  logger.debug(`Entity ${entityRef} is already stitched, skipping write.`);
135
136
  return "abandoned";
136
137
  }
137
- await knex.transaction(async (trx) => {
138
- await trx("search").where({ entity_id: entityId }).delete();
139
- await trx.batchInsert("search", searchEntries, util.BATCH_SIZE);
140
- });
138
+ await syncSearchRows.syncSearchRows(knex, entityId, searchEntries);
141
139
  return "changed";
142
140
  } catch (error) {
143
141
  removeFromStitchQueueOnCompletion = false;
@@ -1 +1 @@
1
- {"version":3,"file":"performStitching.cjs.js","sources":["../../../../src/database/operations/stitcher/performStitching.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ENTITY_STATUS_CATALOG_PROCESSING_TYPE } from '@backstage/catalog-client';\nimport {\n ANNOTATION_EDIT_URL,\n ANNOTATION_VIEW_URL,\n EntityRelation,\n} from '@backstage/catalog-model';\nimport { AlphaEntity, EntityStatusItem } from '@backstage/catalog-model/alpha';\nimport { SerializedError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { StitchingStrategy } from '../../../stitching/types';\nimport {\n DbFinalEntitiesRow,\n DbRefreshStateRow,\n DbSearchRow,\n DbStitchQueueRow,\n} from '../../tables';\nimport { buildEntitySearch } from './buildEntitySearch';\nimport { markDeferredStitchCompleted } from './markDeferredStitchCompleted';\nimport { BATCH_SIZE, generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// See https://github.com/facebook/react/blob/f0cf832e1d0c8544c36aa8b310960885a11a847c/packages/react-dom-bindings/src/shared/sanitizeURL.js\nconst scriptProtocolPattern =\n // eslint-disable-next-line no-control-regex\n /^[\\u0000-\\u001F ]*j[\\r\\n\\t]*a[\\r\\n\\t]*v[\\r\\n\\t]*a[\\r\\n\\t]*s[\\r\\n\\t]*c[\\r\\n\\t]*r[\\r\\n\\t]*i[\\r\\n\\t]*p[\\r\\n\\t]*t[\\r\\n\\t]*\\:/i;\n\n/**\n * Performs the act of stitching - to take all of the various outputs from the\n * ingestion process, and stitching them together into the final entity JSON\n * shape.\n */\nexport async function performStitching(options: {\n knex: Knex | Knex.Transaction;\n logger: LoggerService;\n strategy: StitchingStrategy;\n entityRef: string;\n stitchTicket?: string;\n}): Promise<'changed' | 'unchanged' | 'abandoned'> {\n const { knex, logger, entityRef } = options;\n const stitchTicket = options.stitchTicket;\n\n // In deferred mode, the entity is removed from the stitch queue on ANY\n // completion, except when an exception is thrown. In the latter case, the\n // entity will be retried at a later time.\n let removeFromStitchQueueOnCompletion = options.strategy.mode === 'deferred';\n\n try {\n const entityResult = await knex<DbRefreshStateRow>('refresh_state')\n .where({ entity_ref: entityRef })\n .limit(1)\n .select('entity_id');\n if (!entityResult.length) {\n // Entity does no exist in refresh state table, no stitching required.\n return 'abandoned';\n }\n\n // Ensure that a final_entities row exists for this entity.\n try {\n await knex<DbFinalEntitiesRow>('final_entities')\n .insert({\n entity_id: entityResult[0].entity_id,\n hash: '',\n entity_ref: entityRef,\n })\n .onConflict('entity_id')\n .ignore();\n } catch (error) {\n // It's possible to hit a race where a refresh_state table delete + insert\n // is done just after we read the entity_id from it. This conflict is safe\n // to ignore because the current stitching operation will be triggered by\n // the old entry, and the new entry will trigger it's own stitching that\n // will update the entity.\n if (isDatabaseConflictError(error)) {\n logger.debug(`Skipping stitching of ${entityRef}, conflict`, error);\n return 'abandoned';\n }\n\n throw error;\n }\n\n // Selecting from refresh_state and final_entities should yield exactly\n // one row (except in abnormal cases where the stitch was invoked for\n // something that didn't exist at all, in which case it's zero rows).\n // The join with the temporary incoming_references still gives one row.\n const [processedResult, relationsResult] = await Promise.all([\n knex\n .with('incoming_references', function incomingReferences(builder) {\n return builder\n .from('refresh_state_references')\n .where({ target_entity_ref: entityRef })\n .count({ count: '*' });\n })\n .select({\n entityId: 'refresh_state.entity_id',\n processedEntity: 'refresh_state.processed_entity',\n errors: 'refresh_state.errors',\n incomingReferenceCount: 'incoming_references.count',\n previousHash: 'final_entities.hash',\n })\n .from('refresh_state')\n .where({ 'refresh_state.entity_ref': entityRef })\n .crossJoin(knex.raw('incoming_references'))\n .leftOuterJoin('final_entities', {\n 'final_entities.entity_id': 'refresh_state.entity_id',\n }),\n knex\n .distinct({\n relationType: 'type',\n relationTarget: 'target_entity_ref',\n })\n .from('relations')\n .where({ source_entity_ref: entityRef })\n .orderBy('relationType', 'asc')\n .orderBy('relationTarget', 'asc'),\n ]);\n\n // If there were no rows returned, it would mean that there was no\n // matching row even in the refresh_state. This can happen for example\n // if we emit a relation to something that hasn't been ingested yet.\n // It's safe to ignore this stitch attempt in that case.\n if (!processedResult.length) {\n logger.debug(\n `Unable to stitch ${entityRef}, item does not exist in refresh state table`,\n );\n return 'abandoned';\n }\n\n const {\n entityId,\n processedEntity,\n errors,\n incomingReferenceCount,\n previousHash,\n } = processedResult[0];\n\n // If there was no processed entity in place, the target hasn't been\n // through the processing steps yet. It's safe to ignore this stitch\n // attempt in that case, since another stitch will be triggered when\n // that processing has finished.\n if (!processedEntity) {\n logger.debug(\n `Unable to stitch ${entityRef}, the entity has not yet been processed`,\n );\n return 'abandoned';\n }\n\n // Grab the processed entity and stitch all of the relevant data into\n // it\n const entity = JSON.parse(processedEntity) as AlphaEntity;\n const isOrphan = Number(incomingReferenceCount) === 0;\n let statusItems: EntityStatusItem[] = [];\n\n if (isOrphan) {\n logger.debug(`${entityRef} is an orphan`);\n entity.metadata.annotations = {\n ...entity.metadata.annotations,\n ['backstage.io/orphan']: 'true',\n };\n }\n if (errors) {\n const parsedErrors = JSON.parse(errors) as SerializedError[];\n if (Array.isArray(parsedErrors) && parsedErrors.length) {\n statusItems = parsedErrors.map(e => ({\n type: ENTITY_STATUS_CATALOG_PROCESSING_TYPE,\n level: 'error',\n message: `${e.name}: ${e.message}`,\n error: e,\n }));\n }\n }\n // We opt to do this check here as we otherwise can't guarantee that it will be run after all processors\n for (const annotation of [ANNOTATION_VIEW_URL, ANNOTATION_EDIT_URL]) {\n const value = entity.metadata.annotations?.[annotation];\n if (typeof value === 'string' && scriptProtocolPattern.test(value)) {\n entity.metadata.annotations![annotation] =\n 'https://backstage.io/annotation-rejected-for-security-reasons';\n }\n }\n\n // TODO: entityRef is lower case and should be uppercase in the final\n // result\n entity.relations = relationsResult\n .filter(row => row.relationType /* exclude null row, if relevant */)\n .map<EntityRelation>(row => ({\n type: row.relationType!,\n targetRef: row.relationTarget!,\n }));\n if (statusItems.length) {\n entity.status = {\n ...entity.status,\n items: [...(entity.status?.items ?? []), ...statusItems],\n };\n }\n\n // If the output entity was actually not changed, just abort\n const hash = generateStableHash(entity);\n if (hash === previousHash) {\n logger.debug(`Skipped stitching of ${entityRef}, no changes`);\n return 'unchanged';\n }\n\n entity.metadata.uid = entityId;\n if (!entity.metadata.etag) {\n // If the original data source did not have its own etag handling,\n // use the hash as a good-quality etag\n entity.metadata.etag = hash;\n }\n\n // This may throw if the entity is invalid, so we call it before\n // the final_entities write, even though we may end up not needing\n // to write the search index.\n const searchEntries = buildEntitySearch(entityId, entity);\n\n let updateQuery = knex<DbFinalEntitiesRow>('final_entities')\n .update({\n final_entity: JSON.stringify(entity),\n hash,\n last_updated_at: knex.fn.now(),\n })\n .where('entity_id', entityId);\n\n // In deferred mode, guard against concurrent stitchers by checking that\n // the stitch_ticket in stitch_queue still matches what we were given.\n if (options.strategy.mode === 'deferred' && stitchTicket) {\n updateQuery = updateQuery.whereExists(\n knex<DbStitchQueueRow>('stitch_queue')\n .where('stitch_queue.entity_ref', entityRef)\n .where('stitch_queue.stitch_ticket', stitchTicket)\n .select(knex.raw('1')),\n );\n }\n\n const amountOfRowsChanged = await updateQuery;\n\n if (amountOfRowsChanged === 0) {\n logger.debug(`Entity ${entityRef} is already stitched, skipping write.`);\n return 'abandoned';\n }\n\n await knex.transaction(async trx => {\n await trx<DbSearchRow>('search').where({ entity_id: entityId }).delete();\n await trx.batchInsert('search', searchEntries, BATCH_SIZE);\n });\n\n return 'changed';\n } catch (error) {\n removeFromStitchQueueOnCompletion = false;\n throw error;\n } finally {\n if (removeFromStitchQueueOnCompletion && stitchTicket) {\n await markDeferredStitchCompleted({\n knex: knex,\n entityRef,\n stitchTicket,\n });\n }\n }\n}\n"],"names":["isDatabaseConflictError","ENTITY_STATUS_CATALOG_PROCESSING_TYPE","ANNOTATION_VIEW_URL","ANNOTATION_EDIT_URL","generateStableHash","buildEntitySearch","BATCH_SIZE","markDeferredStitchCompleted"],"mappings":";;;;;;;;;AAyCA,MAAM,qBAAA;AAAA;AAAA,EAEJ;AAAA,CAAA;AAOF,eAAsB,iBAAiB,OAAA,EAMY;AACjD,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,SAAA,EAAU,GAAI,OAAA;AACpC,EAAA,MAAM,eAAe,OAAA,CAAQ,YAAA;AAK7B,EAAA,IAAI,iCAAA,GAAoC,OAAA,CAAQ,QAAA,CAAS,IAAA,KAAS,UAAA;AAElE,EAAA,IAAI;AACF,IAAA,MAAM,YAAA,GAAe,MAAM,IAAA,CAAwB,eAAe,EAC/D,KAAA,CAAM,EAAE,UAAA,EAAY,SAAA,EAAW,CAAA,CAC/B,KAAA,CAAM,CAAC,CAAA,CACP,OAAO,WAAW,CAAA;AACrB,IAAA,IAAI,CAAC,aAAa,MAAA,EAAQ;AAExB,MAAA,OAAO,WAAA;AAAA,IACT;AAGA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAyB,gBAAgB,CAAA,CAC5C,MAAA,CAAO;AAAA,QACN,SAAA,EAAW,YAAA,CAAa,CAAC,CAAA,CAAE,SAAA;AAAA,QAC3B,IAAA,EAAM,EAAA;AAAA,QACN,UAAA,EAAY;AAAA,OACb,CAAA,CACA,UAAA,CAAW,WAAW,EACtB,MAAA,EAAO;AAAA,IACZ,SAAS,KAAA,EAAO;AAMd,MAAA,IAAIA,wCAAA,CAAwB,KAAK,CAAA,EAAG;AAClC,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,sBAAA,EAAyB,SAAS,CAAA,UAAA,CAAA,EAAc,KAAK,CAAA;AAClE,QAAA,OAAO,WAAA;AAAA,MACT;AAEA,MAAA,MAAM,KAAA;AAAA,IACR;AAMA,IAAA,MAAM,CAAC,eAAA,EAAiB,eAAe,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,MAC3D,IAAA,CACG,IAAA,CAAK,qBAAA,EAAuB,SAAS,mBAAmB,OAAA,EAAS;AAChE,QAAA,OAAO,OAAA,CACJ,IAAA,CAAK,0BAA0B,CAAA,CAC/B,MAAM,EAAE,iBAAA,EAAmB,SAAA,EAAW,CAAA,CACtC,KAAA,CAAM,EAAE,KAAA,EAAO,KAAK,CAAA;AAAA,MACzB,CAAC,EACA,MAAA,CAAO;AAAA,QACN,QAAA,EAAU,yBAAA;AAAA,QACV,eAAA,EAAiB,gCAAA;AAAA,QACjB,MAAA,EAAQ,sBAAA;AAAA,QACR,sBAAA,EAAwB,2BAAA;AAAA,QACxB,YAAA,EAAc;AAAA,OACf,CAAA,CACA,IAAA,CAAK,eAAe,CAAA,CACpB,KAAA,CAAM,EAAE,0BAAA,EAA4B,SAAA,EAAW,CAAA,CAC/C,UAAU,IAAA,CAAK,GAAA,CAAI,qBAAqB,CAAC,CAAA,CACzC,cAAc,gBAAA,EAAkB;AAAA,QAC/B,0BAAA,EAA4B;AAAA,OAC7B,CAAA;AAAA,MACH,KACG,QAAA,CAAS;AAAA,QACR,YAAA,EAAc,MAAA;AAAA,QACd,cAAA,EAAgB;AAAA,OACjB,CAAA,CACA,IAAA,CAAK,WAAW,CAAA,CAChB,MAAM,EAAE,iBAAA,EAAmB,SAAA,EAAW,EACtC,OAAA,CAAQ,cAAA,EAAgB,KAAK,CAAA,CAC7B,OAAA,CAAQ,kBAAkB,KAAK;AAAA,KACnC,CAAA;AAMD,IAAA,IAAI,CAAC,gBAAgB,MAAA,EAAQ;AAC3B,MAAA,MAAA,CAAO,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,4CAAA;AAAA,OAC/B;AACA,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM;AAAA,MACJ,QAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA,sBAAA;AAAA,MACA;AAAA,KACF,GAAI,gBAAgB,CAAC,CAAA;AAMrB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,MAAA,CAAO,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,uCAAA;AAAA,OAC/B;AACA,MAAA,OAAO,WAAA;AAAA,IACT;AAIA,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,eAAe,CAAA;AACzC,IAAA,MAAM,QAAA,GAAW,MAAA,CAAO,sBAAsB,CAAA,KAAM,CAAA;AACpD,IAAA,IAAI,cAAkC,EAAC;AAEvC,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,SAAS,CAAA,aAAA,CAAe,CAAA;AACxC,MAAA,MAAA,CAAO,SAAS,WAAA,GAAc;AAAA,QAC5B,GAAG,OAAO,QAAA,CAAS,WAAA;AAAA,QACnB,CAAC,qBAAqB,GAAG;AAAA,OAC3B;AAAA,IACF;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,CAAM,MAAM,CAAA;AACtC,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,YAAY,CAAA,IAAK,aAAa,MAAA,EAAQ;AACtD,QAAA,WAAA,GAAc,YAAA,CAAa,IAAI,CAAA,CAAA,MAAM;AAAA,UACnC,IAAA,EAAMC,mDAAA;AAAA,UACN,KAAA,EAAO,OAAA;AAAA,UACP,SAAS,CAAA,EAAG,CAAA,CAAE,IAAI,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,UAChC,KAAA,EAAO;AAAA,SACT,CAAE,CAAA;AAAA,MACJ;AAAA,IACF;AAEA,IAAA,KAAA,MAAW,UAAA,IAAc,CAACC,gCAAA,EAAqBC,gCAAmB,CAAA,EAAG;AACnE,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,QAAA,CAAS,WAAA,GAAc,UAAU,CAAA;AACtD,MAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,qBAAA,CAAsB,IAAA,CAAK,KAAK,CAAA,EAAG;AAClE,QAAA,MAAA,CAAO,QAAA,CAAS,WAAA,CAAa,UAAU,CAAA,GACrC,+DAAA;AAAA,MACJ;AAAA,IACF;AAIA,IAAA,MAAA,CAAO,YAAY,eAAA,CAChB,MAAA;AAAA,MAAO,SAAO,GAAA,CAAI;AAAA;AAAA,KAAgD,CAClE,IAAoB,CAAA,GAAA,MAAQ;AAAA,MAC3B,MAAM,GAAA,CAAI,YAAA;AAAA,MACV,WAAW,GAAA,CAAI;AAAA,KACjB,CAAE,CAAA;AACJ,IAAA,IAAI,YAAY,MAAA,EAAQ;AACtB,MAAA,MAAA,CAAO,MAAA,GAAS;AAAA,QACd,GAAG,MAAA,CAAO,MAAA;AAAA,QACV,KAAA,EAAO,CAAC,GAAI,MAAA,CAAO,QAAQ,KAAA,IAAS,EAAC,EAAI,GAAG,WAAW;AAAA,OACzD;AAAA,IACF;AAGA,IAAA,MAAM,IAAA,GAAOC,wBAAmB,MAAM,CAAA;AACtC,IAAA,IAAI,SAAS,YAAA,EAAc;AACzB,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,qBAAA,EAAwB,SAAS,CAAA,YAAA,CAAc,CAAA;AAC5D,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAA,CAAO,SAAS,GAAA,GAAM,QAAA;AACtB,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,IAAA,EAAM;AAGzB,MAAA,MAAA,CAAO,SAAS,IAAA,GAAO,IAAA;AAAA,IACzB;AAKA,IAAA,MAAM,aAAA,GAAgBC,mCAAA,CAAkB,QAAA,EAAU,MAAM,CAAA;AAExD,IAAA,IAAI,WAAA,GAAc,IAAA,CAAyB,gBAAgB,CAAA,CACxD,MAAA,CAAO;AAAA,MACN,YAAA,EAAc,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AAAA,MACnC,IAAA;AAAA,MACA,eAAA,EAAiB,IAAA,CAAK,EAAA,CAAG,GAAA;AAAI,KAC9B,CAAA,CACA,KAAA,CAAM,WAAA,EAAa,QAAQ,CAAA;AAI9B,IAAA,IAAI,OAAA,CAAQ,QAAA,CAAS,IAAA,KAAS,UAAA,IAAc,YAAA,EAAc;AACxD,MAAA,WAAA,GAAc,WAAA,CAAY,WAAA;AAAA,QACxB,IAAA,CAAuB,cAAc,CAAA,CAClC,KAAA,CAAM,2BAA2B,SAAS,CAAA,CAC1C,KAAA,CAAM,4BAAA,EAA8B,YAAY,CAAA,CAChD,MAAA,CAAO,IAAA,CAAK,GAAA,CAAI,GAAG,CAAC;AAAA,OACzB;AAAA,IACF;AAEA,IAAA,MAAM,sBAAsB,MAAM,WAAA;AAElC,IAAA,IAAI,wBAAwB,CAAA,EAAG;AAC7B,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,OAAA,EAAU,SAAS,CAAA,qCAAA,CAAuC,CAAA;AACvE,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM,IAAA,CAAK,WAAA,CAAY,OAAM,GAAA,KAAO;AAClC,MAAA,MAAM,GAAA,CAAiB,QAAQ,CAAA,CAAE,KAAA,CAAM,EAAE,SAAA,EAAW,QAAA,EAAU,CAAA,CAAE,MAAA,EAAO;AACvE,MAAA,MAAM,GAAA,CAAI,WAAA,CAAY,QAAA,EAAU,aAAA,EAAeC,eAAU,CAAA;AAAA,IAC3D,CAAC,CAAA;AAED,IAAA,OAAO,SAAA;AAAA,EACT,SAAS,KAAA,EAAO;AACd,IAAA,iCAAA,GAAoC,KAAA;AACpC,IAAA,MAAM,KAAA;AAAA,EACR,CAAA,SAAE;AACA,IAAA,IAAI,qCAAqC,YAAA,EAAc;AACrD,MAAA,MAAMC,uDAAA,CAA4B;AAAA,QAChC,IAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;;;;"}
1
+ {"version":3,"file":"performStitching.cjs.js","sources":["../../../../src/database/operations/stitcher/performStitching.ts"],"sourcesContent":["/*\n * Copyright 2023 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ENTITY_STATUS_CATALOG_PROCESSING_TYPE } from '@backstage/catalog-client';\nimport {\n ANNOTATION_EDIT_URL,\n ANNOTATION_VIEW_URL,\n EntityRelation,\n} from '@backstage/catalog-model';\nimport { AlphaEntity, EntityStatusItem } from '@backstage/catalog-model/alpha';\nimport { SerializedError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { StitchingStrategy } from '../../../stitching/types';\nimport {\n DbFinalEntitiesRow,\n DbRefreshStateRow,\n DbStitchQueueRow,\n} from '../../tables';\nimport { buildEntitySearch } from './buildEntitySearch';\nimport { markDeferredStitchCompleted } from './markDeferredStitchCompleted';\nimport { syncSearchRows } from './syncSearchRows';\nimport { generateStableHash } from './util';\nimport {\n LoggerService,\n isDatabaseConflictError,\n} from '@backstage/backend-plugin-api';\n\n// See https://github.com/facebook/react/blob/f0cf832e1d0c8544c36aa8b310960885a11a847c/packages/react-dom-bindings/src/shared/sanitizeURL.js\nconst scriptProtocolPattern =\n // eslint-disable-next-line no-control-regex\n /^[\\u0000-\\u001F ]*j[\\r\\n\\t]*a[\\r\\n\\t]*v[\\r\\n\\t]*a[\\r\\n\\t]*s[\\r\\n\\t]*c[\\r\\n\\t]*r[\\r\\n\\t]*i[\\r\\n\\t]*p[\\r\\n\\t]*t[\\r\\n\\t]*\\:/i;\n\n/**\n * Performs the act of stitching - to take all of the various outputs from the\n * ingestion process, and stitching them together into the final entity JSON\n * shape.\n */\nexport async function performStitching(options: {\n knex: Knex | Knex.Transaction;\n logger: LoggerService;\n strategy: StitchingStrategy;\n entityRef: string;\n stitchTicket?: string;\n}): Promise<'changed' | 'unchanged' | 'abandoned'> {\n const { knex, logger, entityRef } = options;\n const stitchTicket = options.stitchTicket;\n\n // In deferred mode, the entity is removed from the stitch queue on ANY\n // completion, except when an exception is thrown. In the latter case, the\n // entity will be retried at a later time.\n let removeFromStitchQueueOnCompletion = options.strategy.mode === 'deferred';\n\n try {\n const entityResult = await knex<DbRefreshStateRow>('refresh_state')\n .where({ entity_ref: entityRef })\n .limit(1)\n .select('entity_id');\n if (!entityResult.length) {\n // Entity does no exist in refresh state table, no stitching required.\n return 'abandoned';\n }\n\n // Ensure that a final_entities row exists for this entity.\n try {\n await knex<DbFinalEntitiesRow>('final_entities')\n .insert({\n entity_id: entityResult[0].entity_id,\n hash: '',\n entity_ref: entityRef,\n })\n .onConflict('entity_id')\n .ignore();\n } catch (error) {\n // It's possible to hit a race where a refresh_state table delete + insert\n // is done just after we read the entity_id from it. This conflict is safe\n // to ignore because the current stitching operation will be triggered by\n // the old entry, and the new entry will trigger it's own stitching that\n // will update the entity.\n if (isDatabaseConflictError(error)) {\n logger.debug(`Skipping stitching of ${entityRef}, conflict`, error);\n return 'abandoned';\n }\n\n throw error;\n }\n\n // Selecting from refresh_state and final_entities should yield exactly\n // one row (except in abnormal cases where the stitch was invoked for\n // something that didn't exist at all, in which case it's zero rows).\n // The join with the temporary incoming_references still gives one row.\n const [processedResult, relationsResult] = await Promise.all([\n knex\n .with('incoming_references', function incomingReferences(builder) {\n return builder\n .from('refresh_state_references')\n .where({ target_entity_ref: entityRef })\n .count({ count: '*' });\n })\n .select({\n entityId: 'refresh_state.entity_id',\n processedEntity: 'refresh_state.processed_entity',\n errors: 'refresh_state.errors',\n incomingReferenceCount: 'incoming_references.count',\n previousHash: 'final_entities.hash',\n })\n .from('refresh_state')\n .where({ 'refresh_state.entity_ref': entityRef })\n .crossJoin(knex.raw('incoming_references'))\n .leftOuterJoin('final_entities', {\n 'final_entities.entity_id': 'refresh_state.entity_id',\n }),\n knex\n .distinct({\n relationType: 'type',\n relationTarget: 'target_entity_ref',\n })\n .from('relations')\n .where({ source_entity_ref: entityRef })\n .orderBy('relationType', 'asc')\n .orderBy('relationTarget', 'asc'),\n ]);\n\n // If there were no rows returned, it would mean that there was no\n // matching row even in the refresh_state. This can happen for example\n // if we emit a relation to something that hasn't been ingested yet.\n // It's safe to ignore this stitch attempt in that case.\n if (!processedResult.length) {\n logger.debug(\n `Unable to stitch ${entityRef}, item does not exist in refresh state table`,\n );\n return 'abandoned';\n }\n\n const {\n entityId,\n processedEntity,\n errors,\n incomingReferenceCount,\n previousHash,\n } = processedResult[0];\n\n // If there was no processed entity in place, the target hasn't been\n // through the processing steps yet. It's safe to ignore this stitch\n // attempt in that case, since another stitch will be triggered when\n // that processing has finished.\n if (!processedEntity) {\n logger.debug(\n `Unable to stitch ${entityRef}, the entity has not yet been processed`,\n );\n return 'abandoned';\n }\n\n // Grab the processed entity and stitch all of the relevant data into\n // it\n const entity = JSON.parse(processedEntity) as AlphaEntity;\n const isOrphan = Number(incomingReferenceCount) === 0;\n let statusItems: EntityStatusItem[] = [];\n\n if (isOrphan) {\n logger.debug(`${entityRef} is an orphan`);\n entity.metadata.annotations = {\n ...entity.metadata.annotations,\n ['backstage.io/orphan']: 'true',\n };\n }\n if (errors) {\n const parsedErrors = JSON.parse(errors) as SerializedError[];\n if (Array.isArray(parsedErrors) && parsedErrors.length) {\n statusItems = parsedErrors.map(e => ({\n type: ENTITY_STATUS_CATALOG_PROCESSING_TYPE,\n level: 'error',\n message: `${e.name}: ${e.message}`,\n error: e,\n }));\n }\n }\n // We opt to do this check here as we otherwise can't guarantee that it will be run after all processors\n for (const annotation of [ANNOTATION_VIEW_URL, ANNOTATION_EDIT_URL]) {\n const value = entity.metadata.annotations?.[annotation];\n if (typeof value === 'string' && scriptProtocolPattern.test(value)) {\n entity.metadata.annotations![annotation] =\n 'https://backstage.io/annotation-rejected-for-security-reasons';\n }\n }\n\n // TODO: entityRef is lower case and should be uppercase in the final\n // result\n entity.relations = relationsResult\n .filter(row => row.relationType /* exclude null row, if relevant */)\n .map<EntityRelation>(row => ({\n type: row.relationType!,\n targetRef: row.relationTarget!,\n }));\n if (statusItems.length) {\n entity.status = {\n ...entity.status,\n items: [...(entity.status?.items ?? []), ...statusItems],\n };\n }\n\n // If the output entity was actually not changed, just abort\n const hash = generateStableHash(entity);\n if (hash === previousHash) {\n logger.debug(`Skipped stitching of ${entityRef}, no changes`);\n return 'unchanged';\n }\n\n entity.metadata.uid = entityId;\n if (!entity.metadata.etag) {\n // If the original data source did not have its own etag handling,\n // use the hash as a good-quality etag\n entity.metadata.etag = hash;\n }\n\n // This may throw if the entity is invalid, so we call it before\n // the final_entities write, even though we may end up not needing\n // to write the search index.\n const searchEntries = buildEntitySearch(entityId, entity);\n\n let updateQuery = knex<DbFinalEntitiesRow>('final_entities')\n .update({\n final_entity: JSON.stringify(entity),\n hash,\n last_updated_at: knex.fn.now(),\n })\n .where('entity_id', entityId);\n\n // In deferred mode, guard against concurrent stitchers by checking that\n // the stitch_ticket in stitch_queue still matches what we were given.\n if (options.strategy.mode === 'deferred' && stitchTicket) {\n updateQuery = updateQuery.whereExists(\n knex<DbStitchQueueRow>('stitch_queue')\n .where('stitch_queue.entity_ref', entityRef)\n .where('stitch_queue.stitch_ticket', stitchTicket)\n .select(knex.raw('1')),\n );\n }\n\n const amountOfRowsChanged = await updateQuery;\n\n if (amountOfRowsChanged === 0) {\n logger.debug(`Entity ${entityRef} is already stitched, skipping write.`);\n return 'abandoned';\n }\n\n await syncSearchRows(knex, entityId, searchEntries);\n\n return 'changed';\n } catch (error) {\n removeFromStitchQueueOnCompletion = false;\n throw error;\n } finally {\n if (removeFromStitchQueueOnCompletion && stitchTicket) {\n await markDeferredStitchCompleted({\n knex: knex,\n entityRef,\n stitchTicket,\n });\n }\n }\n}\n"],"names":["isDatabaseConflictError","ENTITY_STATUS_CATALOG_PROCESSING_TYPE","ANNOTATION_VIEW_URL","ANNOTATION_EDIT_URL","generateStableHash","buildEntitySearch","syncSearchRows","markDeferredStitchCompleted"],"mappings":";;;;;;;;;;AAyCA,MAAM,qBAAA;AAAA;AAAA,EAEJ;AAAA,CAAA;AAOF,eAAsB,iBAAiB,OAAA,EAMY;AACjD,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,SAAA,EAAU,GAAI,OAAA;AACpC,EAAA,MAAM,eAAe,OAAA,CAAQ,YAAA;AAK7B,EAAA,IAAI,iCAAA,GAAoC,OAAA,CAAQ,QAAA,CAAS,IAAA,KAAS,UAAA;AAElE,EAAA,IAAI;AACF,IAAA,MAAM,YAAA,GAAe,MAAM,IAAA,CAAwB,eAAe,EAC/D,KAAA,CAAM,EAAE,UAAA,EAAY,SAAA,EAAW,CAAA,CAC/B,KAAA,CAAM,CAAC,CAAA,CACP,OAAO,WAAW,CAAA;AACrB,IAAA,IAAI,CAAC,aAAa,MAAA,EAAQ;AAExB,MAAA,OAAO,WAAA;AAAA,IACT;AAGA,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAyB,gBAAgB,CAAA,CAC5C,MAAA,CAAO;AAAA,QACN,SAAA,EAAW,YAAA,CAAa,CAAC,CAAA,CAAE,SAAA;AAAA,QAC3B,IAAA,EAAM,EAAA;AAAA,QACN,UAAA,EAAY;AAAA,OACb,CAAA,CACA,UAAA,CAAW,WAAW,EACtB,MAAA,EAAO;AAAA,IACZ,SAAS,KAAA,EAAO;AAMd,MAAA,IAAIA,wCAAA,CAAwB,KAAK,CAAA,EAAG;AAClC,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,sBAAA,EAAyB,SAAS,CAAA,UAAA,CAAA,EAAc,KAAK,CAAA;AAClE,QAAA,OAAO,WAAA;AAAA,MACT;AAEA,MAAA,MAAM,KAAA;AAAA,IACR;AAMA,IAAA,MAAM,CAAC,eAAA,EAAiB,eAAe,CAAA,GAAI,MAAM,QAAQ,GAAA,CAAI;AAAA,MAC3D,IAAA,CACG,IAAA,CAAK,qBAAA,EAAuB,SAAS,mBAAmB,OAAA,EAAS;AAChE,QAAA,OAAO,OAAA,CACJ,IAAA,CAAK,0BAA0B,CAAA,CAC/B,MAAM,EAAE,iBAAA,EAAmB,SAAA,EAAW,CAAA,CACtC,KAAA,CAAM,EAAE,KAAA,EAAO,KAAK,CAAA;AAAA,MACzB,CAAC,EACA,MAAA,CAAO;AAAA,QACN,QAAA,EAAU,yBAAA;AAAA,QACV,eAAA,EAAiB,gCAAA;AAAA,QACjB,MAAA,EAAQ,sBAAA;AAAA,QACR,sBAAA,EAAwB,2BAAA;AAAA,QACxB,YAAA,EAAc;AAAA,OACf,CAAA,CACA,IAAA,CAAK,eAAe,CAAA,CACpB,KAAA,CAAM,EAAE,0BAAA,EAA4B,SAAA,EAAW,CAAA,CAC/C,UAAU,IAAA,CAAK,GAAA,CAAI,qBAAqB,CAAC,CAAA,CACzC,cAAc,gBAAA,EAAkB;AAAA,QAC/B,0BAAA,EAA4B;AAAA,OAC7B,CAAA;AAAA,MACH,KACG,QAAA,CAAS;AAAA,QACR,YAAA,EAAc,MAAA;AAAA,QACd,cAAA,EAAgB;AAAA,OACjB,CAAA,CACA,IAAA,CAAK,WAAW,CAAA,CAChB,MAAM,EAAE,iBAAA,EAAmB,SAAA,EAAW,EACtC,OAAA,CAAQ,cAAA,EAAgB,KAAK,CAAA,CAC7B,OAAA,CAAQ,kBAAkB,KAAK;AAAA,KACnC,CAAA;AAMD,IAAA,IAAI,CAAC,gBAAgB,MAAA,EAAQ;AAC3B,MAAA,MAAA,CAAO,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,4CAAA;AAAA,OAC/B;AACA,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAM;AAAA,MACJ,QAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA,sBAAA;AAAA,MACA;AAAA,KACF,GAAI,gBAAgB,CAAC,CAAA;AAMrB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,MAAA,CAAO,KAAA;AAAA,QACL,oBAAoB,SAAS,CAAA,uCAAA;AAAA,OAC/B;AACA,MAAA,OAAO,WAAA;AAAA,IACT;AAIA,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,eAAe,CAAA;AACzC,IAAA,MAAM,QAAA,GAAW,MAAA,CAAO,sBAAsB,CAAA,KAAM,CAAA;AACpD,IAAA,IAAI,cAAkC,EAAC;AAEvC,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,SAAS,CAAA,aAAA,CAAe,CAAA;AACxC,MAAA,MAAA,CAAO,SAAS,WAAA,GAAc;AAAA,QAC5B,GAAG,OAAO,QAAA,CAAS,WAAA;AAAA,QACnB,CAAC,qBAAqB,GAAG;AAAA,OAC3B;AAAA,IACF;AACA,IAAA,IAAI,MAAA,EAAQ;AACV,MAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,CAAM,MAAM,CAAA;AACtC,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,YAAY,CAAA,IAAK,aAAa,MAAA,EAAQ;AACtD,QAAA,WAAA,GAAc,YAAA,CAAa,IAAI,CAAA,CAAA,MAAM;AAAA,UACnC,IAAA,EAAMC,mDAAA;AAAA,UACN,KAAA,EAAO,OAAA;AAAA,UACP,SAAS,CAAA,EAAG,CAAA,CAAE,IAAI,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,UAChC,KAAA,EAAO;AAAA,SACT,CAAE,CAAA;AAAA,MACJ;AAAA,IACF;AAEA,IAAA,KAAA,MAAW,UAAA,IAAc,CAACC,gCAAA,EAAqBC,gCAAmB,CAAA,EAAG;AACnE,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,QAAA,CAAS,WAAA,GAAc,UAAU,CAAA;AACtD,MAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,qBAAA,CAAsB,IAAA,CAAK,KAAK,CAAA,EAAG;AAClE,QAAA,MAAA,CAAO,QAAA,CAAS,WAAA,CAAa,UAAU,CAAA,GACrC,+DAAA;AAAA,MACJ;AAAA,IACF;AAIA,IAAA,MAAA,CAAO,YAAY,eAAA,CAChB,MAAA;AAAA,MAAO,SAAO,GAAA,CAAI;AAAA;AAAA,KAAgD,CAClE,IAAoB,CAAA,GAAA,MAAQ;AAAA,MAC3B,MAAM,GAAA,CAAI,YAAA;AAAA,MACV,WAAW,GAAA,CAAI;AAAA,KACjB,CAAE,CAAA;AACJ,IAAA,IAAI,YAAY,MAAA,EAAQ;AACtB,MAAA,MAAA,CAAO,MAAA,GAAS;AAAA,QACd,GAAG,MAAA,CAAO,MAAA;AAAA,QACV,KAAA,EAAO,CAAC,GAAI,MAAA,CAAO,QAAQ,KAAA,IAAS,EAAC,EAAI,GAAG,WAAW;AAAA,OACzD;AAAA,IACF;AAGA,IAAA,MAAM,IAAA,GAAOC,wBAAmB,MAAM,CAAA;AACtC,IAAA,IAAI,SAAS,YAAA,EAAc;AACzB,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,qBAAA,EAAwB,SAAS,CAAA,YAAA,CAAc,CAAA;AAC5D,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAA,CAAO,SAAS,GAAA,GAAM,QAAA;AACtB,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,IAAA,EAAM;AAGzB,MAAA,MAAA,CAAO,SAAS,IAAA,GAAO,IAAA;AAAA,IACzB;AAKA,IAAA,MAAM,aAAA,GAAgBC,mCAAA,CAAkB,QAAA,EAAU,MAAM,CAAA;AAExD,IAAA,IAAI,WAAA,GAAc,IAAA,CAAyB,gBAAgB,CAAA,CACxD,MAAA,CAAO;AAAA,MACN,YAAA,EAAc,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AAAA,MACnC,IAAA;AAAA,MACA,eAAA,EAAiB,IAAA,CAAK,EAAA,CAAG,GAAA;AAAI,KAC9B,CAAA,CACA,KAAA,CAAM,WAAA,EAAa,QAAQ,CAAA;AAI9B,IAAA,IAAI,OAAA,CAAQ,QAAA,CAAS,IAAA,KAAS,UAAA,IAAc,YAAA,EAAc;AACxD,MAAA,WAAA,GAAc,WAAA,CAAY,WAAA;AAAA,QACxB,IAAA,CAAuB,cAAc,CAAA,CAClC,KAAA,CAAM,2BAA2B,SAAS,CAAA,CAC1C,KAAA,CAAM,4BAAA,EAA8B,YAAY,CAAA,CAChD,MAAA,CAAO,IAAA,CAAK,GAAA,CAAI,GAAG,CAAC;AAAA,OACzB;AAAA,IACF;AAEA,IAAA,MAAM,sBAAsB,MAAM,WAAA;AAElC,IAAA,IAAI,wBAAwB,CAAA,EAAG;AAC7B,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,OAAA,EAAU,SAAS,CAAA,qCAAA,CAAuC,CAAA;AACvE,MAAA,OAAO,WAAA;AAAA,IACT;AAEA,IAAA,MAAMC,6BAAA,CAAe,IAAA,EAAM,QAAA,EAAU,aAAa,CAAA;AAElD,IAAA,OAAO,SAAA;AAAA,EACT,SAAS,KAAA,EAAO;AACd,IAAA,iCAAA,GAAoC,KAAA;AACpC,IAAA,MAAM,KAAA;AAAA,EACR,CAAA,SAAE;AACA,IAAA,IAAI,qCAAqC,YAAA,EAAc;AACrD,MAAA,MAAMC,uDAAA,CAA4B;AAAA,QAChC,IAAA;AAAA,QACA,SAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;;;;"}
@@ -0,0 +1,103 @@
1
+ 'use strict';
2
+
3
+ var util = require('./util.cjs.js');
4
+
5
+ const NULL_SENTINEL = "";
6
+ function filterSentinelValues(entries) {
7
+ return entries.filter(
8
+ (r) => r.value !== NULL_SENTINEL && r.original_value !== NULL_SENTINEL
9
+ );
10
+ }
11
+ async function syncSearchRows(knex, entityId, searchEntries) {
12
+ const client = knex.client.config.client;
13
+ if (client === "pg") {
14
+ await syncPostgres(knex, entityId, searchEntries);
15
+ } else if (client.includes("mysql")) {
16
+ await syncMysql(knex, entityId, searchEntries);
17
+ } else {
18
+ await syncBulkReplace(knex, entityId, searchEntries);
19
+ }
20
+ }
21
+ async function syncPostgres(knex, entityId, searchEntries) {
22
+ const filtered = filterSentinelValues(searchEntries);
23
+ const keys = filtered.map((r) => r.key);
24
+ const values = filtered.map((r) => r.value);
25
+ const originalValues = filtered.map((r) => r.original_value);
26
+ await knex.raw(
27
+ `
28
+ WITH desired AS (
29
+ SELECT *
30
+ FROM unnest(?::text[], ?::text[], ?::text[])
31
+ AS d(key, value, original_value)
32
+ ),
33
+ deleted AS (
34
+ DELETE FROM "search" s
35
+ WHERE s.entity_id = ?
36
+ AND NOT EXISTS (
37
+ SELECT 1 FROM desired d
38
+ WHERE d.key = s.key
39
+ AND COALESCE(d.value, chr(1)) = COALESCE(s.value, chr(1))
40
+ AND COALESCE(d.original_value, chr(1)) = COALESCE(s.original_value, chr(1))
41
+ )
42
+ )
43
+ INSERT INTO "search" (entity_id, key, value, original_value)
44
+ SELECT ?, d.key, d.value, d.original_value
45
+ FROM desired d
46
+ WHERE NOT EXISTS (
47
+ SELECT 1 FROM "search" s
48
+ WHERE s.entity_id = ?
49
+ AND s.key = d.key
50
+ AND COALESCE(s.value, chr(1)) = COALESCE(d.value, chr(1))
51
+ AND COALESCE(s.original_value, chr(1)) = COALESCE(d.original_value, chr(1))
52
+ )
53
+ `,
54
+ [keys, values, originalValues, entityId, entityId, entityId]
55
+ );
56
+ }
57
+ const MYSQL_DEADLOCK_MAX_RETRIES = 3;
58
+ async function syncMysql(knex, entityId, searchEntries) {
59
+ for (let attempt = 1; ; attempt++) {
60
+ try {
61
+ await knex.transaction(async (trx) => {
62
+ await trx.raw(
63
+ "CREATE TEMPORARY TABLE IF NOT EXISTS `_desired_search` (`key` VARCHAR(255) NOT NULL, `value` VARCHAR(255) NULL, `original_value` VARCHAR(255) NULL)"
64
+ );
65
+ await trx.raw("DELETE FROM `_desired_search`");
66
+ if (searchEntries.length > 0) {
67
+ await trx.batchInsert(
68
+ "_desired_search",
69
+ searchEntries.map((r) => ({
70
+ key: r.key,
71
+ value: r.value,
72
+ original_value: r.original_value
73
+ })),
74
+ util.BATCH_SIZE
75
+ );
76
+ }
77
+ await trx.raw(
78
+ "DELETE s FROM `search` s WHERE s.entity_id = ? AND NOT EXISTS ( SELECT 1 FROM `_desired_search` d WHERE d.`key` = s.`key` AND d.`value` <=> s.`value` AND BINARY d.`original_value` <=> BINARY s.`original_value`)",
79
+ [entityId]
80
+ );
81
+ await trx.raw(
82
+ "INSERT INTO `search` (entity_id, `key`, `value`, `original_value`) SELECT ?, d.`key`, d.`value`, d.`original_value` FROM `_desired_search` d WHERE NOT EXISTS ( SELECT 1 FROM `search` s WHERE s.entity_id = ? AND s.`key` = d.`key` AND s.`value` <=> d.`value` AND BINARY s.`original_value` <=> BINARY d.`original_value`)",
83
+ [entityId, entityId]
84
+ );
85
+ });
86
+ return;
87
+ } catch (error) {
88
+ if (error?.errno === 1213 && attempt < MYSQL_DEADLOCK_MAX_RETRIES) {
89
+ continue;
90
+ }
91
+ throw error;
92
+ }
93
+ }
94
+ }
95
+ async function syncBulkReplace(knex, entityId, searchEntries) {
96
+ await knex.transaction(async (trx) => {
97
+ await trx("search").where({ entity_id: entityId }).delete();
98
+ await trx.batchInsert("search", searchEntries, util.BATCH_SIZE);
99
+ });
100
+ }
101
+
102
+ exports.syncSearchRows = syncSearchRows;
103
+ //# sourceMappingURL=syncSearchRows.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"syncSearchRows.cjs.js","sources":["../../../../src/database/operations/stitcher/syncSearchRows.ts"],"sourcesContent":["/*\n * Copyright 2026 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Knex } from 'knex';\nimport { DbSearchRow } from '../../tables';\nimport { BATCH_SIZE } from './util';\n\n// The Postgres sync uses COALESCE(x, NULL_SENTINEL) to allow Postgres to\n// include nullable columns in the Hash Cond of anti-joins (IS NOT DISTINCT\n// FROM prevents this). As a consequence, values that are exactly this\n// sentinel character are not searchable — they would be treated as NULL.\n// This is the SOH (Start of Heading) control character which does not\n// appear in real entity metadata.\nconst NULL_SENTINEL = '\\x01';\n\nfunction filterSentinelValues(entries: DbSearchRow[]): DbSearchRow[] {\n return entries.filter(\n r => r.value !== NULL_SENTINEL && r.original_value !== NULL_SENTINEL,\n );\n}\n\n/**\n * Synchronizes the search table rows for a given entity, applying only the\n * minimal set of changes needed. Rows that already exist with the correct\n * values are left untouched, new rows are inserted, and stale rows are\n * deleted — minimizing write churn, dead tuples, and WAL traffic.\n *\n * Uses database-specific strategies:\n * - Postgres: Single writable CTE with unnest (one round-trip, no DDL)\n * - MySQL: Temporary table merge (two queries in a transaction)\n * - SQLite: Simple bulk replace (sufficient for dev/test)\n */\nexport async function syncSearchRows(\n knex: Knex | Knex.Transaction,\n entityId: string,\n searchEntries: DbSearchRow[],\n): Promise<void> {\n const client = knex.client.config.client;\n\n if (client === 'pg') {\n await syncPostgres(knex, entityId, searchEntries);\n } else if (client.includes('mysql')) {\n await syncMysql(knex, entityId, searchEntries);\n } else {\n await syncBulkReplace(knex, entityId, searchEntries);\n }\n}\n\n// ---------------------------------------------------------------------------\n// Postgres: writable CTE + unnest\n//\n// All CTE branches see the same pre-modification snapshot, so the DELETE\n// and INSERT do not interfere with each other. This is a single atomic\n// statement — no explicit transaction wrapper needed.\n//\n// Nullable columns use COALESCE(x, chr(1)) instead of IS NOT DISTINCT FROM\n// so that Postgres can include all three columns in the Hash Cond of the\n// anti-join, rather than pushing nullable comparisons into a Join Filter\n// that degrades to O(n*m) when many rows share the same key. chr(1) (SOH\n// control character) is used as the NULL sentinel — it cannot appear in\n// real entity values since they are human-readable strings.\n// ---------------------------------------------------------------------------\nasync function syncPostgres(\n knex: Knex | Knex.Transaction,\n entityId: string,\n searchEntries: DbSearchRow[],\n): Promise<void> {\n const filtered = filterSentinelValues(searchEntries);\n const keys = filtered.map(r => r.key);\n const values = filtered.map(r => r.value);\n const originalValues = filtered.map(r => r.original_value);\n\n await knex.raw(\n `\n WITH desired AS (\n SELECT *\n FROM unnest(?::text[], ?::text[], ?::text[])\n AS d(key, value, original_value)\n ),\n deleted AS (\n DELETE FROM \"search\" s\n WHERE s.entity_id = ?\n AND NOT EXISTS (\n SELECT 1 FROM desired d\n WHERE d.key = s.key\n AND COALESCE(d.value, chr(1)) = COALESCE(s.value, chr(1))\n AND COALESCE(d.original_value, chr(1)) = COALESCE(s.original_value, chr(1))\n )\n )\n INSERT INTO \"search\" (entity_id, key, value, original_value)\n SELECT ?, d.key, d.value, d.original_value\n FROM desired d\n WHERE NOT EXISTS (\n SELECT 1 FROM \"search\" s\n WHERE s.entity_id = ?\n AND s.key = d.key\n AND COALESCE(s.value, chr(1)) = COALESCE(d.value, chr(1))\n AND COALESCE(s.original_value, chr(1)) = COALESCE(d.original_value, chr(1))\n )\n `,\n [keys, values, originalValues, entityId, entityId, entityId],\n );\n}\n\n// ---------------------------------------------------------------------------\n// MySQL: temporary table merge with deadlock retry\n//\n// MySQL does not support data-modifying CTEs, so we materialize the desired\n// state into a session-scoped temporary table and then merge it into the\n// real table with two queries. The temp table is created inside the\n// transaction to guarantee it exists on the same pooled connection.\n// CREATE/DROP TEMPORARY TABLE does not cause an implicit commit in MySQL\n// (unlike regular DDL), so this is transaction-safe.\n//\n// InnoDB's next-key (gap) locking can cause deadlocks between concurrent\n// transactions operating on different entity_ids when their gap locks\n// overlap on shared index pages. We retry on deadlock (error 1213) since\n// the operation is idempotent.\n// ---------------------------------------------------------------------------\nconst MYSQL_DEADLOCK_MAX_RETRIES = 3;\n\nasync function syncMysql(\n knex: Knex | Knex.Transaction,\n entityId: string,\n searchEntries: DbSearchRow[],\n): Promise<void> {\n for (let attempt = 1; ; attempt++) {\n try {\n await knex.transaction(async trx => {\n // Create the temp table inside the transaction so it's guaranteed\n // to be on the same pooled connection as the merge queries.\n // CREATE TEMPORARY TABLE does not cause an implicit commit in\n // MySQL (unlike regular CREATE TABLE), so this is safe.\n await trx.raw(\n 'CREATE TEMPORARY TABLE IF NOT EXISTS `_desired_search` (' +\n '`key` VARCHAR(255) NOT NULL, ' +\n '`value` VARCHAR(255) NULL, ' +\n '`original_value` VARCHAR(255) NULL' +\n ')',\n );\n // Clear stale data from any previous call on this connection.\n // Uses DELETE (DML) instead of TRUNCATE (DDL) to avoid an\n // implicit commit that would break transaction atomicity.\n await trx.raw('DELETE FROM `_desired_search`');\n\n if (searchEntries.length > 0) {\n await trx.batchInsert(\n '_desired_search',\n searchEntries.map(r => ({\n key: r.key,\n value: r.value,\n original_value: r.original_value,\n })),\n BATCH_SIZE,\n );\n }\n\n // Delete rows that are no longer in the desired set\n await trx.raw(\n 'DELETE s FROM `search` s ' +\n 'WHERE s.entity_id = ? ' +\n 'AND NOT EXISTS (' +\n ' SELECT 1 FROM `_desired_search` d' +\n ' WHERE d.`key` = s.`key`' +\n ' AND d.`value` <=> s.`value`' +\n ' AND BINARY d.`original_value` <=> BINARY s.`original_value`' +\n ')',\n [entityId],\n );\n\n // Insert rows that are new in the desired set. The original_value\n // column preserves the original casing and must be compared with\n // BINARY to avoid MySQL's default case-insensitive collation\n // treating e.g. \"Team-A\" and \"team-a\" as equal.\n await trx.raw(\n 'INSERT INTO `search` (entity_id, `key`, `value`, `original_value`) ' +\n 'SELECT ?, d.`key`, d.`value`, d.`original_value` ' +\n 'FROM `_desired_search` d ' +\n 'WHERE NOT EXISTS (' +\n ' SELECT 1 FROM `search` s' +\n ' WHERE s.entity_id = ?' +\n ' AND s.`key` = d.`key`' +\n ' AND s.`value` <=> d.`value`' +\n ' AND BINARY s.`original_value` <=> BINARY d.`original_value`' +\n ')',\n [entityId, entityId],\n );\n });\n return;\n } catch (error) {\n // MySQL error 1213: ER_LOCK_DEADLOCK\n if (\n (error as any)?.errno === 1213 &&\n attempt < MYSQL_DEADLOCK_MAX_RETRIES\n ) {\n continue;\n }\n throw error;\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// SQLite (and fallback): bulk replace\n// ---------------------------------------------------------------------------\nasync function syncBulkReplace(\n knex: Knex | Knex.Transaction,\n entityId: string,\n searchEntries: DbSearchRow[],\n): Promise<void> {\n await knex.transaction(async trx => {\n await trx<DbSearchRow>('search').where({ entity_id: entityId }).delete();\n await trx.batchInsert('search', searchEntries, BATCH_SIZE);\n });\n}\n"],"names":["BATCH_SIZE"],"mappings":";;;;AA0BA,MAAM,aAAA,GAAgB,GAAA;AAEtB,SAAS,qBAAqB,OAAA,EAAuC;AACnE,EAAA,OAAO,OAAA,CAAQ,MAAA;AAAA,IACb,CAAA,CAAA,KAAK,CAAA,CAAE,KAAA,KAAU,aAAA,IAAiB,EAAE,cAAA,KAAmB;AAAA,GACzD;AACF;AAaA,eAAsB,cAAA,CACpB,IAAA,EACA,QAAA,EACA,aAAA,EACe;AACf,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,MAAA,CAAO,MAAA,CAAO,MAAA;AAElC,EAAA,IAAI,WAAW,IAAA,EAAM;AACnB,IAAA,MAAM,YAAA,CAAa,IAAA,EAAM,QAAA,EAAU,aAAa,CAAA;AAAA,EAClD,CAAA,MAAA,IAAW,MAAA,CAAO,QAAA,CAAS,OAAO,CAAA,EAAG;AACnC,IAAA,MAAM,SAAA,CAAU,IAAA,EAAM,QAAA,EAAU,aAAa,CAAA;AAAA,EAC/C,CAAA,MAAO;AACL,IAAA,MAAM,eAAA,CAAgB,IAAA,EAAM,QAAA,EAAU,aAAa,CAAA;AAAA,EACrD;AACF;AAgBA,eAAe,YAAA,CACb,IAAA,EACA,QAAA,EACA,aAAA,EACe;AACf,EAAA,MAAM,QAAA,GAAW,qBAAqB,aAAa,CAAA;AACnD,EAAA,MAAM,IAAA,GAAO,QAAA,CAAS,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,GAAG,CAAA;AACpC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,KAAK,CAAA;AACxC,EAAA,MAAM,cAAA,GAAiB,QAAA,CAAS,GAAA,CAAI,CAAA,CAAA,KAAK,EAAE,cAAc,CAAA;AAEzD,EAAA,MAAM,IAAA,CAAK,GAAA;AAAA,IACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA,CAAA;AAAA,IA2BA,CAAC,IAAA,EAAM,MAAA,EAAQ,cAAA,EAAgB,QAAA,EAAU,UAAU,QAAQ;AAAA,GAC7D;AACF;AAiBA,MAAM,0BAAA,GAA6B,CAAA;AAEnC,eAAe,SAAA,CACb,IAAA,EACA,QAAA,EACA,aAAA,EACe;AACf,EAAA,KAAA,IAAS,OAAA,GAAU,KAAK,OAAA,EAAA,EAAW;AACjC,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,WAAA,CAAY,OAAM,GAAA,KAAO;AAKlC,QAAA,MAAM,GAAA,CAAI,GAAA;AAAA,UACR;AAAA,SAKF;AAIA,QAAA,MAAM,GAAA,CAAI,IAAI,+BAA+B,CAAA;AAE7C,QAAA,IAAI,aAAA,CAAc,SAAS,CAAA,EAAG;AAC5B,UAAA,MAAM,GAAA,CAAI,WAAA;AAAA,YACR,iBAAA;AAAA,YACA,aAAA,CAAc,IAAI,CAAA,CAAA,MAAM;AAAA,cACtB,KAAK,CAAA,CAAE,GAAA;AAAA,cACP,OAAO,CAAA,CAAE,KAAA;AAAA,cACT,gBAAgB,CAAA,CAAE;AAAA,aACpB,CAAE,CAAA;AAAA,YACFA;AAAA,WACF;AAAA,QACF;AAGA,QAAA,MAAM,GAAA,CAAI,GAAA;AAAA,UACR,4NAAA;AAAA,UAQA,CAAC,QAAQ;AAAA,SACX;AAMA,QAAA,MAAM,GAAA,CAAI,GAAA;AAAA,UACR,0UAAA;AAAA,UAUA,CAAC,UAAU,QAAQ;AAAA,SACrB;AAAA,MACF,CAAC,CAAA;AACD,MAAA;AAAA,IACF,SAAS,KAAA,EAAO;AAEd,MAAA,IACG,KAAA,EAAe,KAAA,KAAU,IAAA,IAC1B,OAAA,GAAU,0BAAA,EACV;AACA,QAAA;AAAA,MACF;AACA,MAAA,MAAM,KAAA;AAAA,IACR;AAAA,EACF;AACF;AAKA,eAAe,eAAA,CACb,IAAA,EACA,QAAA,EACA,aAAA,EACe;AACf,EAAA,MAAM,IAAA,CAAK,WAAA,CAAY,OAAM,GAAA,KAAO;AAClC,IAAA,MAAM,GAAA,CAAiB,QAAQ,CAAA,CAAE,KAAA,CAAM,EAAE,SAAA,EAAW,QAAA,EAAU,CAAA,CAAE,MAAA,EAAO;AACvE,IAAA,MAAM,GAAA,CAAI,WAAA,CAAY,QAAA,EAAU,aAAA,EAAeA,eAAU,CAAA;AAAA,EAC3D,CAAC,CAAA;AACH;;;;"}
@@ -1,18 +1,15 @@
1
1
  'use strict';
2
2
 
3
3
  var fs = require('fs-extra');
4
- var g = require('glob');
4
+ var glob = require('glob');
5
5
  var path = require('node:path');
6
- var node_util = require('node:util');
7
6
  var pluginCatalogNode = require('@backstage/plugin-catalog-node');
8
7
 
9
8
  function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
10
9
 
11
10
  var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
12
- var g__default = /*#__PURE__*/_interopDefaultCompat(g);
13
11
  var path__default = /*#__PURE__*/_interopDefaultCompat(path);
14
12
 
15
- const glob = node_util.promisify(g__default.default);
16
13
  const LOCATION_TYPE = "file";
17
14
  class FileReaderProcessor {
18
15
  getProcessorName() {
@@ -23,7 +20,9 @@ class FileReaderProcessor {
23
20
  return false;
24
21
  }
25
22
  try {
26
- const fileMatches = await glob(location.target);
23
+ const fileMatches = await glob.glob(location.target, {
24
+ windowsPathsNoEscape: true
25
+ });
27
26
  if (fileMatches.length > 0) {
28
27
  for (const fileMatch of fileMatches) {
29
28
  const data = await fs__default.default.readFile(fileMatch);
@@ -1 +1 @@
1
- {"version":3,"file":"FileReaderProcessor.cjs.js","sources":["../../src/processors/FileReaderProcessor.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport fs from 'fs-extra';\nimport g from 'glob';\nimport path from 'node:path';\nimport { promisify } from 'node:util';\nimport { LocationSpec } from '@backstage/plugin-catalog-common';\nimport {\n CatalogProcessor,\n CatalogProcessorEmit,\n CatalogProcessorParser,\n processingResult,\n} from '@backstage/plugin-catalog-node';\n\nconst glob = promisify(g);\n\nconst LOCATION_TYPE = 'file';\n\n/** @public */\nexport class FileReaderProcessor implements CatalogProcessor {\n getProcessorName(): string {\n return 'FileReaderProcessor';\n }\n\n async readLocation(\n location: LocationSpec,\n optional: boolean,\n emit: CatalogProcessorEmit,\n parser: CatalogProcessorParser,\n ): Promise<boolean> {\n if (location.type !== LOCATION_TYPE) {\n return false;\n }\n\n try {\n const fileMatches = await glob(location.target);\n\n if (fileMatches.length > 0) {\n for (const fileMatch of fileMatches) {\n const data = await fs.readFile(fileMatch);\n const normalizedFilePath = path.normalize(fileMatch);\n\n // The normalize converts to native slashes; the glob library returns\n // forward slashes even on windows\n for await (const parseResult of parser({\n data: data,\n location: {\n type: LOCATION_TYPE,\n target: normalizedFilePath,\n },\n })) {\n emit(parseResult);\n emit(\n processingResult.refresh(\n `${LOCATION_TYPE}:${normalizedFilePath}`,\n ),\n );\n }\n }\n } else if (!optional) {\n const message = `${location.type} ${location.target} does not exist`;\n emit(processingResult.notFoundError(location, message));\n }\n } catch (e) {\n const message = `${location.type} ${location.target} could not be read, ${e}`;\n emit(processingResult.generalError(location, message));\n }\n\n return true;\n }\n}\n"],"names":["promisify","g","fs","path","processingResult"],"mappings":";;;;;;;;;;;;;;AA4BA,MAAM,IAAA,GAAOA,oBAAUC,kBAAC,CAAA;AAExB,MAAM,aAAA,GAAgB,MAAA;AAGf,MAAM,mBAAA,CAAgD;AAAA,EAC3D,gBAAA,GAA2B;AACzB,IAAA,OAAO,qBAAA;AAAA,EACT;AAAA,EAEA,MAAM,YAAA,CACJ,QAAA,EACA,QAAA,EACA,MACA,MAAA,EACkB;AAClB,IAAA,IAAI,QAAA,CAAS,SAAS,aAAA,EAAe;AACnC,MAAA,OAAO,KAAA;AAAA,IACT;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,WAAA,GAAc,MAAM,IAAA,CAAK,QAAA,CAAS,MAAM,CAAA;AAE9C,MAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,QAAA,KAAA,MAAW,aAAa,WAAA,EAAa;AACnC,UAAA,MAAM,IAAA,GAAO,MAAMC,mBAAA,CAAG,QAAA,CAAS,SAAS,CAAA;AACxC,UAAA,MAAM,kBAAA,GAAqBC,qBAAA,CAAK,SAAA,CAAU,SAAS,CAAA;AAInD,UAAA,WAAA,MAAiB,eAAe,MAAA,CAAO;AAAA,YACrC,IAAA;AAAA,YACA,QAAA,EAAU;AAAA,cACR,IAAA,EAAM,aAAA;AAAA,cACN,MAAA,EAAQ;AAAA;AACV,WACD,CAAA,EAAG;AACF,YAAA,IAAA,CAAK,WAAW,CAAA;AAChB,YAAA,IAAA;AAAA,cACEC,kCAAA,CAAiB,OAAA;AAAA,gBACf,CAAA,EAAG,aAAa,CAAA,CAAA,EAAI,kBAAkB,CAAA;AAAA;AACxC,aACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAA,MAAA,IAAW,CAAC,QAAA,EAAU;AACpB,QAAA,MAAM,UAAU,CAAA,EAAG,QAAA,CAAS,IAAI,CAAA,CAAA,EAAI,SAAS,MAAM,CAAA,eAAA,CAAA;AACnD,QAAA,IAAA,CAAKA,kCAAA,CAAiB,aAAA,CAAc,QAAA,EAAU,OAAO,CAAC,CAAA;AAAA,MACxD;AAAA,IACF,SAAS,CAAA,EAAG;AACV,MAAA,MAAM,OAAA,GAAU,GAAG,QAAA,CAAS,IAAI,IAAI,QAAA,CAAS,MAAM,uBAAuB,CAAC,CAAA,CAAA;AAC3E,MAAA,IAAA,CAAKA,kCAAA,CAAiB,YAAA,CAAa,QAAA,EAAU,OAAO,CAAC,CAAA;AAAA,IACvD;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;;"}
1
+ {"version":3,"file":"FileReaderProcessor.cjs.js","sources":["../../src/processors/FileReaderProcessor.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport fs from 'fs-extra';\nimport { glob } from 'glob';\nimport path from 'node:path';\nimport { LocationSpec } from '@backstage/plugin-catalog-common';\nimport {\n CatalogProcessor,\n CatalogProcessorEmit,\n CatalogProcessorParser,\n processingResult,\n} from '@backstage/plugin-catalog-node';\n\nconst LOCATION_TYPE = 'file';\n\n/** @public */\nexport class FileReaderProcessor implements CatalogProcessor {\n getProcessorName(): string {\n return 'FileReaderProcessor';\n }\n\n async readLocation(\n location: LocationSpec,\n optional: boolean,\n emit: CatalogProcessorEmit,\n parser: CatalogProcessorParser,\n ): Promise<boolean> {\n if (location.type !== LOCATION_TYPE) {\n return false;\n }\n\n try {\n const fileMatches = await glob(location.target, {\n windowsPathsNoEscape: true,\n });\n\n if (fileMatches.length > 0) {\n for (const fileMatch of fileMatches) {\n const data = await fs.readFile(fileMatch);\n const normalizedFilePath = path.normalize(fileMatch);\n\n // The normalize converts to native slashes; the glob library returns\n // forward slashes even on windows\n for await (const parseResult of parser({\n data: data,\n location: {\n type: LOCATION_TYPE,\n target: normalizedFilePath,\n },\n })) {\n emit(parseResult);\n emit(\n processingResult.refresh(\n `${LOCATION_TYPE}:${normalizedFilePath}`,\n ),\n );\n }\n }\n } else if (!optional) {\n const message = `${location.type} ${location.target} does not exist`;\n emit(processingResult.notFoundError(location, message));\n }\n } catch (e) {\n const message = `${location.type} ${location.target} could not be read, ${e}`;\n emit(processingResult.generalError(location, message));\n }\n\n return true;\n }\n}\n"],"names":["glob","fs","path","processingResult"],"mappings":";;;;;;;;;;;;AA2BA,MAAM,aAAA,GAAgB,MAAA;AAGf,MAAM,mBAAA,CAAgD;AAAA,EAC3D,gBAAA,GAA2B;AACzB,IAAA,OAAO,qBAAA;AAAA,EACT;AAAA,EAEA,MAAM,YAAA,CACJ,QAAA,EACA,QAAA,EACA,MACA,MAAA,EACkB;AAClB,IAAA,IAAI,QAAA,CAAS,SAAS,aAAA,EAAe;AACnC,MAAA,OAAO,KAAA;AAAA,IACT;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,WAAA,GAAc,MAAMA,SAAA,CAAK,QAAA,CAAS,MAAA,EAAQ;AAAA,QAC9C,oBAAA,EAAsB;AAAA,OACvB,CAAA;AAED,MAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,QAAA,KAAA,MAAW,aAAa,WAAA,EAAa;AACnC,UAAA,MAAM,IAAA,GAAO,MAAMC,mBAAA,CAAG,QAAA,CAAS,SAAS,CAAA;AACxC,UAAA,MAAM,kBAAA,GAAqBC,qBAAA,CAAK,SAAA,CAAU,SAAS,CAAA;AAInD,UAAA,WAAA,MAAiB,eAAe,MAAA,CAAO;AAAA,YACrC,IAAA;AAAA,YACA,QAAA,EAAU;AAAA,cACR,IAAA,EAAM,aAAA;AAAA,cACN,MAAA,EAAQ;AAAA;AACV,WACD,CAAA,EAAG;AACF,YAAA,IAAA,CAAK,WAAW,CAAA;AAChB,YAAA,IAAA;AAAA,cACEC,kCAAA,CAAiB,OAAA;AAAA,gBACf,CAAA,EAAG,aAAa,CAAA,CAAA,EAAI,kBAAkB,CAAA;AAAA;AACxC,aACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAA,MAAA,IAAW,CAAC,QAAA,EAAU;AACpB,QAAA,MAAM,UAAU,CAAA,EAAG,QAAA,CAAS,IAAI,CAAA,CAAA,EAAI,SAAS,MAAM,CAAA,eAAA,CAAA;AACnD,QAAA,IAAA,CAAKA,kCAAA,CAAiB,aAAA,CAAc,QAAA,EAAU,OAAO,CAAC,CAAA;AAAA,MACxD;AAAA,IACF,SAAS,CAAA,EAAG;AACV,MAAA,MAAM,OAAA,GAAU,GAAG,QAAA,CAAS,IAAI,IAAI,QAAA,CAAS,MAAM,uBAAuB,CAAC,CAAA,CAAA;AAC3E,MAAA,IAAA,CAAKA,kCAAA,CAAiB,YAAA,CAAa,QAAA,EAAU,OAAO,CAAC,CAAA;AAAA,IACvD;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;;"}
@@ -1,6 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  var applyPredicateEntityFilterToQuery = require('./applyPredicateEntityFilterToQuery.cjs.js');
4
+ var searchSubquery = require('./searchSubquery.cjs.js');
4
5
 
5
6
  function isEntitiesSearchFilter(filter) {
6
7
  return filter.hasOwnProperty("key");
@@ -11,9 +12,9 @@ function isOrEntityFilter(filter) {
11
12
  function isNegationEntityFilter(filter) {
12
13
  return filter.hasOwnProperty("not");
13
14
  }
14
- function applyInStrategy(filter, targetQuery, onEntityIdField, knex, negate) {
15
+ function applyExistsStrategy(filter, targetQuery, onEntityIdField, knex, negate) {
15
16
  if (isNegationEntityFilter(filter)) {
16
- return applyInStrategy(
17
+ return applyExistsStrategy(
17
18
  filter.not,
18
19
  targetQuery,
19
20
  onEntityIdField,
@@ -24,31 +25,39 @@ function applyInStrategy(filter, targetQuery, onEntityIdField, knex, negate) {
24
25
  if (isEntitiesSearchFilter(filter)) {
25
26
  const key = filter.key.toLowerCase();
26
27
  const values = filter.values?.map((v) => v.toLowerCase());
27
- const matchQuery = knex("search").select("search.entity_id").where({ key }).andWhere(function keyFilter() {
28
+ const subquery = searchSubquery.searchExists(knex, onEntityIdField).where(`${searchSubquery.SEARCH_FLT_ALIAS}.key`, key).andWhere(function keyFilter() {
28
29
  if (values?.length === 1) {
29
- this.where({ value: values.at(0) });
30
+ this.where(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, values.at(0));
30
31
  } else if (values) {
31
- this.andWhere("value", "in", values);
32
+ this.whereIn(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, values);
32
33
  }
33
34
  });
34
- return targetQuery.andWhere(
35
- onEntityIdField,
36
- negate ? "not in" : "in",
37
- matchQuery
38
- );
35
+ return negate ? targetQuery.whereNotExists(subquery) : targetQuery.whereExists(subquery);
39
36
  }
40
37
  return targetQuery[negate ? "andWhereNot" : "andWhere"](
41
38
  function filterFunction() {
42
39
  if (isOrEntityFilter(filter)) {
43
40
  for (const subFilter of filter.anyOf ?? []) {
44
41
  this.orWhere(
45
- (subQuery) => applyInStrategy(subFilter, subQuery, onEntityIdField, knex, false)
42
+ (subQuery) => applyExistsStrategy(
43
+ subFilter,
44
+ subQuery,
45
+ onEntityIdField,
46
+ knex,
47
+ false
48
+ )
46
49
  );
47
50
  }
48
51
  } else {
49
52
  for (const subFilter of filter.allOf ?? []) {
50
53
  this.andWhere(
51
- (subQuery) => applyInStrategy(subFilter, subQuery, onEntityIdField, knex, false)
54
+ (subQuery) => applyExistsStrategy(
55
+ subFilter,
56
+ subQuery,
57
+ onEntityIdField,
58
+ knex,
59
+ false
60
+ )
52
61
  );
53
62
  }
54
63
  }
@@ -59,7 +68,7 @@ function applyEntityFilterToQuery(options) {
59
68
  const { filter, query, targetQuery, onEntityIdField, knex } = options;
60
69
  let result = targetQuery;
61
70
  if (filter) {
62
- result = applyInStrategy(filter, result, onEntityIdField, knex, false);
71
+ result = applyExistsStrategy(filter, result, onEntityIdField, knex, false);
63
72
  }
64
73
  if (query) {
65
74
  result = applyPredicateEntityFilterToQuery.applyPredicateEntityFilterToQuery({
@@ -1 +1 @@
1
- {"version":3,"file":"applyEntityFilterToQuery.cjs.js","sources":["../../../src/service/request/applyEntityFilterToQuery.ts"],"sourcesContent":["/*\n * Copyright 2024 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n EntitiesSearchFilter,\n EntityFilter,\n} from '@backstage/plugin-catalog-node';\nimport { FilterPredicate } from '@backstage/filter-predicates';\nimport { Knex } from 'knex';\nimport { DbSearchRow } from '../../database/tables';\nimport { applyPredicateEntityFilterToQuery } from './applyPredicateEntityFilterToQuery';\n\nfunction isEntitiesSearchFilter(\n filter: EntitiesSearchFilter | EntityFilter,\n): filter is EntitiesSearchFilter {\n return filter.hasOwnProperty('key');\n}\n\nfunction isOrEntityFilter(\n filter: EntityFilter,\n): filter is { anyOf: EntityFilter[] } {\n return filter.hasOwnProperty('anyOf');\n}\n\nfunction isNegationEntityFilter(\n filter: EntityFilter,\n): filter is { not: EntityFilter } {\n return filter.hasOwnProperty('not');\n}\n\n/**\n * Applies filtering through a number of WHERE IN subqueries. Example:\n *\n * ```\n * SELECT * FROM final_entities\n * WHERE\n * entity_id IN (\n * SELECT entity_id FROM search\n * WHERE key = 'kind' AND value = 'component'\n * )\n * AND entity_id IN (\n * SELECT entity_id FROM search\n * WHERE key = 'spec.lifecycle' AND value = 'production'\n * )\n * AND final_entities.final_entity IS NOT NULL\n * ```\n *\n * This strategy is a good all-rounder, in the sense that it has medium-good\n * performance on most queries on all database engines. However, it does not\n * scale well down to very short runtimes as well as the JOIN strategy.\n */\nfunction applyInStrategy(\n filter: EntityFilter,\n targetQuery: Knex.QueryBuilder,\n onEntityIdField: string,\n knex: Knex,\n negate: boolean,\n): Knex.QueryBuilder {\n if (isNegationEntityFilter(filter)) {\n return applyInStrategy(\n filter.not,\n targetQuery,\n onEntityIdField,\n knex,\n !negate,\n );\n }\n\n if (isEntitiesSearchFilter(filter)) {\n const key = filter.key.toLowerCase();\n const values = filter.values?.map(v => v.toLowerCase());\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({ key })\n .andWhere(function keyFilter() {\n if (values?.length === 1) {\n this.where({ value: values.at(0) });\n } else if (values) {\n this.andWhere('value', 'in', values);\n }\n });\n return targetQuery.andWhere(\n onEntityIdField,\n negate ? 'not in' : 'in',\n matchQuery,\n );\n }\n\n return targetQuery[negate ? 'andWhereNot' : 'andWhere'](\n function filterFunction() {\n if (isOrEntityFilter(filter)) {\n for (const subFilter of filter.anyOf ?? []) {\n this.orWhere(subQuery =>\n applyInStrategy(subFilter, subQuery, onEntityIdField, knex, false),\n );\n }\n } else {\n for (const subFilter of filter.allOf ?? []) {\n this.andWhere(subQuery =>\n applyInStrategy(subFilter, subQuery, onEntityIdField, knex, false),\n );\n }\n }\n },\n );\n}\n\n// The actual exported function\nexport function applyEntityFilterToQuery(options: {\n filter?: EntityFilter;\n query?: FilterPredicate;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n strategy?: 'in' | 'join';\n}): Knex.QueryBuilder {\n const { filter, query, targetQuery, onEntityIdField, knex } = options;\n\n let result = targetQuery;\n\n if (filter) {\n result = applyInStrategy(filter, result, onEntityIdField, knex, false);\n }\n\n if (query) {\n result = applyPredicateEntityFilterToQuery({\n filter: query,\n targetQuery: result,\n onEntityIdField,\n knex,\n });\n }\n\n return result;\n}\n"],"names":["applyPredicateEntityFilterToQuery"],"mappings":";;;;AAyBA,SAAS,uBACP,MAAA,EACgC;AAChC,EAAA,OAAO,MAAA,CAAO,eAAe,KAAK,CAAA;AACpC;AAEA,SAAS,iBACP,MAAA,EACqC;AACrC,EAAA,OAAO,MAAA,CAAO,eAAe,OAAO,CAAA;AACtC;AAEA,SAAS,uBACP,MAAA,EACiC;AACjC,EAAA,OAAO,MAAA,CAAO,eAAe,KAAK,CAAA;AACpC;AAuBA,SAAS,eAAA,CACP,MAAA,EACA,WAAA,EACA,eAAA,EACA,MACA,MAAA,EACmB;AACnB,EAAA,IAAI,sBAAA,CAAuB,MAAM,CAAA,EAAG;AAClC,IAAA,OAAO,eAAA;AAAA,MACL,MAAA,CAAO,GAAA;AAAA,MACP,WAAA;AAAA,MACA,eAAA;AAAA,MACA,IAAA;AAAA,MACA,CAAC;AAAA,KACH;AAAA,EACF;AAEA,EAAA,IAAI,sBAAA,CAAuB,MAAM,CAAA,EAAG;AAClC,IAAA,MAAM,GAAA,GAAM,MAAA,CAAO,GAAA,CAAI,WAAA,EAAY;AACnC,IAAA,MAAM,SAAS,MAAA,CAAO,MAAA,EAAQ,IAAI,CAAA,CAAA,KAAK,CAAA,CAAE,aAAa,CAAA;AACtD,IAAA,MAAM,UAAA,GAAa,IAAA,CAAkB,QAAQ,CAAA,CAC1C,OAAO,kBAAkB,CAAA,CACzB,KAAA,CAAM,EAAE,GAAA,EAAK,CAAA,CACb,QAAA,CAAS,SAAS,SAAA,GAAY;AAC7B,MAAA,IAAI,MAAA,EAAQ,WAAW,CAAA,EAAG;AACxB,QAAA,IAAA,CAAK,MAAM,EAAE,KAAA,EAAO,OAAO,EAAA,CAAG,CAAC,GAAG,CAAA;AAAA,MACpC,WAAW,MAAA,EAAQ;AACjB,QAAA,IAAA,CAAK,QAAA,CAAS,OAAA,EAAS,IAAA,EAAM,MAAM,CAAA;AAAA,MACrC;AAAA,IACF,CAAC,CAAA;AACH,IAAA,OAAO,WAAA,CAAY,QAAA;AAAA,MACjB,eAAA;AAAA,MACA,SAAS,QAAA,GAAW,IAAA;AAAA,MACpB;AAAA,KACF;AAAA,EACF;AAEA,EAAA,OAAO,WAAA,CAAY,MAAA,GAAS,aAAA,GAAgB,UAAU,CAAA;AAAA,IACpD,SAAS,cAAA,GAAiB;AACxB,MAAA,IAAI,gBAAA,CAAiB,MAAM,CAAA,EAAG;AAC5B,QAAA,KAAA,MAAW,SAAA,IAAa,MAAA,CAAO,KAAA,IAAS,EAAC,EAAG;AAC1C,UAAA,IAAA,CAAK,OAAA;AAAA,YAAQ,cACX,eAAA,CAAgB,SAAA,EAAW,QAAA,EAAU,eAAA,EAAiB,MAAM,KAAK;AAAA,WACnE;AAAA,QACF;AAAA,MACF,CAAA,MAAO;AACL,QAAA,KAAA,MAAW,SAAA,IAAa,MAAA,CAAO,KAAA,IAAS,EAAC,EAAG;AAC1C,UAAA,IAAA,CAAK,QAAA;AAAA,YAAS,cACZ,eAAA,CAAgB,SAAA,EAAW,QAAA,EAAU,eAAA,EAAiB,MAAM,KAAK;AAAA,WACnE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,GACF;AACF;AAGO,SAAS,yBAAyB,OAAA,EAOnB;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,KAAA,EAAO,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAE9D,EAAA,IAAI,MAAA,GAAS,WAAA;AAEb,EAAA,IAAI,MAAA,EAAQ;AACV,IAAA,MAAA,GAAS,eAAA,CAAgB,MAAA,EAAQ,MAAA,EAAQ,eAAA,EAAiB,MAAM,KAAK,CAAA;AAAA,EACvE;AAEA,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,MAAA,GAASA,mEAAA,CAAkC;AAAA,MACzC,MAAA,EAAQ,KAAA;AAAA,MACR,WAAA,EAAa,MAAA;AAAA,MACb,eAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAEA,EAAA,OAAO,MAAA;AACT;;;;"}
1
+ {"version":3,"file":"applyEntityFilterToQuery.cjs.js","sources":["../../../src/service/request/applyEntityFilterToQuery.ts"],"sourcesContent":["/*\n * Copyright 2024 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n EntitiesSearchFilter,\n EntityFilter,\n} from '@backstage/plugin-catalog-node';\nimport { FilterPredicate } from '@backstage/filter-predicates';\nimport { Knex } from 'knex';\nimport { applyPredicateEntityFilterToQuery } from './applyPredicateEntityFilterToQuery';\nimport { searchExists, SEARCH_FLT_ALIAS } from './searchSubquery';\n\nfunction isEntitiesSearchFilter(\n filter: EntitiesSearchFilter | EntityFilter,\n): filter is EntitiesSearchFilter {\n return filter.hasOwnProperty('key');\n}\n\nfunction isOrEntityFilter(\n filter: EntityFilter,\n): filter is { anyOf: EntityFilter[] } {\n return filter.hasOwnProperty('anyOf');\n}\n\nfunction isNegationEntityFilter(\n filter: EntityFilter,\n): filter is { not: EntityFilter } {\n return filter.hasOwnProperty('not');\n}\n\n/**\n * Applies filtering through correlated EXISTS subqueries. Example:\n *\n * ```\n * SELECT * FROM final_entities\n * WHERE\n * EXISTS (\n * SELECT 1 FROM search AS search_flt\n * WHERE search_flt.entity_id = final_entities.entity_id\n * AND key = 'kind' AND value = 'component'\n * )\n * AND EXISTS (\n * SELECT 1 FROM search AS search_flt\n * WHERE search_flt.entity_id = final_entities.entity_id\n * AND key = 'spec.lifecycle' AND value = 'production'\n * )\n * AND final_entities.final_entity IS NOT NULL\n * ```\n *\n * The EXISTS strategy enables efficient semi-join plans, particularly on\n * PostgreSQL with large datasets, since the database can stop scanning as\n * soon as the first matching row is found.\n */\nfunction applyExistsStrategy(\n filter: EntityFilter,\n targetQuery: Knex.QueryBuilder,\n onEntityIdField: string,\n knex: Knex,\n negate: boolean,\n): Knex.QueryBuilder {\n if (isNegationEntityFilter(filter)) {\n return applyExistsStrategy(\n filter.not,\n targetQuery,\n onEntityIdField,\n knex,\n !negate,\n );\n }\n\n if (isEntitiesSearchFilter(filter)) {\n const key = filter.key.toLowerCase();\n const values = filter.values?.map(v => v.toLowerCase());\n const subquery = searchExists(knex, onEntityIdField)\n .where(`${SEARCH_FLT_ALIAS}.key`, key)\n .andWhere(function keyFilter() {\n if (values?.length === 1) {\n this.where(`${SEARCH_FLT_ALIAS}.value`, values.at(0));\n } else if (values) {\n this.whereIn(`${SEARCH_FLT_ALIAS}.value`, values);\n }\n });\n return negate\n ? targetQuery.whereNotExists(subquery)\n : targetQuery.whereExists(subquery);\n }\n\n return targetQuery[negate ? 'andWhereNot' : 'andWhere'](\n function filterFunction() {\n if (isOrEntityFilter(filter)) {\n for (const subFilter of filter.anyOf ?? []) {\n this.orWhere(subQuery =>\n applyExistsStrategy(\n subFilter,\n subQuery,\n onEntityIdField,\n knex,\n false,\n ),\n );\n }\n } else {\n for (const subFilter of filter.allOf ?? []) {\n this.andWhere(subQuery =>\n applyExistsStrategy(\n subFilter,\n subQuery,\n onEntityIdField,\n knex,\n false,\n ),\n );\n }\n }\n },\n );\n}\n\n// The actual exported function\nexport function applyEntityFilterToQuery(options: {\n filter?: EntityFilter;\n query?: FilterPredicate;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { filter, query, targetQuery, onEntityIdField, knex } = options;\n\n let result = targetQuery;\n\n if (filter) {\n result = applyExistsStrategy(filter, result, onEntityIdField, knex, false);\n }\n\n if (query) {\n result = applyPredicateEntityFilterToQuery({\n filter: query,\n targetQuery: result,\n onEntityIdField,\n knex,\n });\n }\n\n return result;\n}\n"],"names":["searchExists","SEARCH_FLT_ALIAS","applyPredicateEntityFilterToQuery"],"mappings":";;;;;AAyBA,SAAS,uBACP,MAAA,EACgC;AAChC,EAAA,OAAO,MAAA,CAAO,eAAe,KAAK,CAAA;AACpC;AAEA,SAAS,iBACP,MAAA,EACqC;AACrC,EAAA,OAAO,MAAA,CAAO,eAAe,OAAO,CAAA;AACtC;AAEA,SAAS,uBACP,MAAA,EACiC;AACjC,EAAA,OAAO,MAAA,CAAO,eAAe,KAAK,CAAA;AACpC;AAyBA,SAAS,mBAAA,CACP,MAAA,EACA,WAAA,EACA,eAAA,EACA,MACA,MAAA,EACmB;AACnB,EAAA,IAAI,sBAAA,CAAuB,MAAM,CAAA,EAAG;AAClC,IAAA,OAAO,mBAAA;AAAA,MACL,MAAA,CAAO,GAAA;AAAA,MACP,WAAA;AAAA,MACA,eAAA;AAAA,MACA,IAAA;AAAA,MACA,CAAC;AAAA,KACH;AAAA,EACF;AAEA,EAAA,IAAI,sBAAA,CAAuB,MAAM,CAAA,EAAG;AAClC,IAAA,MAAM,GAAA,GAAM,MAAA,CAAO,GAAA,CAAI,WAAA,EAAY;AACnC,IAAA,MAAM,SAAS,MAAA,CAAO,MAAA,EAAQ,IAAI,CAAA,CAAA,KAAK,CAAA,CAAE,aAAa,CAAA;AACtD,IAAA,MAAM,QAAA,GAAWA,2BAAA,CAAa,IAAA,EAAM,eAAe,CAAA,CAChD,KAAA,CAAM,CAAA,EAAGC,+BAAgB,CAAA,IAAA,CAAA,EAAQ,GAAG,CAAA,CACpC,QAAA,CAAS,SAAS,SAAA,GAAY;AAC7B,MAAA,IAAI,MAAA,EAAQ,WAAW,CAAA,EAAG;AACxB,QAAA,IAAA,CAAK,MAAM,CAAA,EAAGA,+BAAgB,UAAU,MAAA,CAAO,EAAA,CAAG,CAAC,CAAC,CAAA;AAAA,MACtD,WAAW,MAAA,EAAQ;AACjB,QAAA,IAAA,CAAK,OAAA,CAAQ,CAAA,EAAGA,+BAAgB,CAAA,MAAA,CAAA,EAAU,MAAM,CAAA;AAAA,MAClD;AAAA,IACF,CAAC,CAAA;AACH,IAAA,OAAO,SACH,WAAA,CAAY,cAAA,CAAe,QAAQ,CAAA,GACnC,WAAA,CAAY,YAAY,QAAQ,CAAA;AAAA,EACtC;AAEA,EAAA,OAAO,WAAA,CAAY,MAAA,GAAS,aAAA,GAAgB,UAAU,CAAA;AAAA,IACpD,SAAS,cAAA,GAAiB;AACxB,MAAA,IAAI,gBAAA,CAAiB,MAAM,CAAA,EAAG;AAC5B,QAAA,KAAA,MAAW,SAAA,IAAa,MAAA,CAAO,KAAA,IAAS,EAAC,EAAG;AAC1C,UAAA,IAAA,CAAK,OAAA;AAAA,YAAQ,CAAA,QAAA,KACX,mBAAA;AAAA,cACE,SAAA;AAAA,cACA,QAAA;AAAA,cACA,eAAA;AAAA,cACA,IAAA;AAAA,cACA;AAAA;AACF,WACF;AAAA,QACF;AAAA,MACF,CAAA,MAAO;AACL,QAAA,KAAA,MAAW,SAAA,IAAa,MAAA,CAAO,KAAA,IAAS,EAAC,EAAG;AAC1C,UAAA,IAAA,CAAK,QAAA;AAAA,YAAS,CAAA,QAAA,KACZ,mBAAA;AAAA,cACE,SAAA;AAAA,cACA,QAAA;AAAA,cACA,eAAA;AAAA,cACA,IAAA;AAAA,cACA;AAAA;AACF,WACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,GACF;AACF;AAGO,SAAS,yBAAyB,OAAA,EAMnB;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,KAAA,EAAO,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAE9D,EAAA,IAAI,MAAA,GAAS,WAAA;AAEb,EAAA,IAAI,MAAA,EAAQ;AACV,IAAA,MAAA,GAAS,mBAAA,CAAoB,MAAA,EAAQ,MAAA,EAAQ,eAAA,EAAiB,MAAM,KAAK,CAAA;AAAA,EAC3E;AAEA,EAAA,IAAI,KAAA,EAAO;AACT,IAAA,MAAA,GAASC,mEAAA,CAAkC;AAAA,MACzC,MAAA,EAAQ,KAAA;AAAA,MACR,WAAA,EAAa,MAAA;AAAA,MACb,eAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAEA,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -1,6 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  var errors = require('@backstage/errors');
4
+ var searchSubquery = require('./searchSubquery.cjs.js');
4
5
 
5
6
  function isPrimitive(value) {
6
7
  return typeof value === "string" || typeof value === "number" || typeof value === "boolean";
@@ -78,40 +79,41 @@ function applyPredicateEntityFilterToQuery(options) {
78
79
  function applyFieldCondition(options) {
79
80
  const { key, value, targetQuery, onEntityIdField, knex } = options;
80
81
  if (isPrimitive(value)) {
81
- const matchQuery = knex("search").select("search.entity_id").where({
82
- key,
83
- value: String(value).toLocaleLowerCase("en-US")
84
- });
85
- return targetQuery.andWhere(onEntityIdField, "in", matchQuery);
82
+ return targetQuery.whereExists(
83
+ searchSubquery.searchExists(knex, onEntityIdField).where(`${searchSubquery.SEARCH_FLT_ALIAS}.key`, key).where(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, String(value).toLocaleLowerCase("en-US"))
84
+ );
86
85
  }
87
86
  if (isObject(value)) {
88
87
  if ("$exists" in value) {
89
- const existsQuery = knex("search").select("search.entity_id").where({ key });
90
- return targetQuery.andWhere(
91
- onEntityIdField,
92
- value.$exists ? "in" : "not in",
93
- existsQuery
88
+ const subquery = searchSubquery.searchExists(knex, onEntityIdField).where(
89
+ `${searchSubquery.SEARCH_FLT_ALIAS}.key`,
90
+ key
94
91
  );
92
+ return value.$exists ? targetQuery.whereExists(subquery) : targetQuery.whereNotExists(subquery);
95
93
  }
96
94
  if ("$in" in value) {
97
95
  const values = value.$in.map((v) => String(v).toLocaleLowerCase("en-US"));
98
- const matchQuery = knex("search").select("search.entity_id").where({ key }).whereIn("value", values);
99
- return targetQuery.andWhere(onEntityIdField, "in", matchQuery);
96
+ return targetQuery.whereExists(
97
+ searchSubquery.searchExists(knex, onEntityIdField).where(`${searchSubquery.SEARCH_FLT_ALIAS}.key`, key).whereIn(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, values)
98
+ );
100
99
  }
101
100
  if ("$hasPrefix" in value) {
102
101
  const prefix = value.$hasPrefix.toLocaleLowerCase("en-US");
103
102
  const escaped = prefix.replace(/[%_\\]/g, (c) => `\\${c}`);
104
- const matchQuery = knex("search").select("search.entity_id").where({ key }).andWhereRaw("?? like ? escape ?", ["value", `${escaped}%`, "\\"]);
105
- return targetQuery.andWhere(onEntityIdField, "in", matchQuery);
103
+ return targetQuery.whereExists(
104
+ searchSubquery.searchExists(knex, onEntityIdField).where(`${searchSubquery.SEARCH_FLT_ALIAS}.key`, key).andWhereRaw("?? like ? escape ?", [
105
+ `${searchSubquery.SEARCH_FLT_ALIAS}.value`,
106
+ `${escaped}%`,
107
+ "\\"
108
+ ])
109
+ );
106
110
  }
107
111
  if ("$contains" in value) {
108
112
  const target = value.$contains;
109
113
  if (isPrimitive(target)) {
110
- const matchQuery = knex("search").select("search.entity_id").where({
111
- key,
112
- value: String(target).toLocaleLowerCase("en-US")
113
- });
114
- return targetQuery.andWhere(onEntityIdField, "in", matchQuery);
114
+ return targetQuery.whereExists(
115
+ searchSubquery.searchExists(knex, onEntityIdField).where(`${searchSubquery.SEARCH_FLT_ALIAS}.key`, key).where(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, String(target).toLocaleLowerCase("en-US"))
116
+ );
115
117
  }
116
118
  if (isObject(target)) {
117
119
  if (key === "relations") {
@@ -190,11 +192,14 @@ function applyContainsRelation(options) {
190
192
  `The $contains operator for "relations" requires a "type" string property`
191
193
  );
192
194
  }
193
- const matchQuery = knex("search").select("search.entity_id").where({ key: `relations.${type.toLocaleLowerCase("en-US")}` });
195
+ const subquery = searchSubquery.searchExists(knex, onEntityIdField).where(
196
+ `${searchSubquery.SEARCH_FLT_ALIAS}.key`,
197
+ `relations.${type.toLocaleLowerCase("en-US")}`
198
+ );
194
199
  if (targetRef) {
195
- matchQuery.whereIn("value", targetRef);
200
+ subquery.whereIn(`${searchSubquery.SEARCH_FLT_ALIAS}.value`, targetRef);
196
201
  }
197
- return targetQuery.andWhere(onEntityIdField, "in", matchQuery);
202
+ return targetQuery.whereExists(subquery);
198
203
  }
199
204
 
200
205
  exports.applyPredicateEntityFilterToQuery = applyPredicateEntityFilterToQuery;
@@ -1 +1 @@
1
- {"version":3,"file":"applyPredicateEntityFilterToQuery.cjs.js","sources":["../../../src/service/request/applyPredicateEntityFilterToQuery.ts"],"sourcesContent":["/*\n * Copyright 2026 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n FilterPredicate,\n FilterPredicatePrimitive,\n FilterPredicateValue,\n} from '@backstage/filter-predicates';\nimport { InputError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { DbSearchRow } from '../../database/tables';\n\nfunction isPrimitive(value: unknown): value is FilterPredicatePrimitive {\n return (\n typeof value === 'string' ||\n typeof value === 'number' ||\n typeof value === 'boolean'\n );\n}\n\nfunction isObject(value: unknown): value is Record<string, unknown> {\n return typeof value === 'object' && value !== null && !Array.isArray(value);\n}\n\nexport function applyPredicateEntityFilterToQuery(options: {\n filter: FilterPredicate;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { filter, targetQuery, onEntityIdField, knex } = options;\n\n // We do not support top-level primitives; all matching happens through objects\n if (!isObject(filter)) {\n const actual = JSON.stringify(filter);\n throw new InputError(\n `Invalid filter predicate: top-level primitive values are not supported. Wrap the value in a field expression, e.g. { \"kind\": ${actual} }`,\n );\n }\n\n if ('$not' in filter) {\n return targetQuery.andWhereNot(inner =>\n applyPredicateEntityFilterToQuery({\n filter: filter.$not,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n\n if ('$all' in filter) {\n if (filter.$all.length === 0) {\n return targetQuery.andWhereRaw('1 = 1');\n }\n return targetQuery.andWhere(outer => {\n for (const subFilter of filter.$all) {\n outer.andWhere(inner =>\n applyPredicateEntityFilterToQuery({\n filter: subFilter,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n });\n }\n\n if ('$any' in filter) {\n if (filter.$any.length === 0) {\n return targetQuery.andWhereRaw('1 = 0');\n }\n return targetQuery.andWhere(outer => {\n for (const subFilter of filter.$any) {\n outer.orWhere(inner =>\n applyPredicateEntityFilterToQuery({\n filter: subFilter,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n });\n }\n\n // Treat the filter as a field expression like { \"kind\": \"component\" } or { \"spec.type\": { \"$in\": [\"service\", \"website\"] } }\n if (Object.keys(filter).length === 0) {\n return targetQuery.andWhereRaw('1 = 1');\n }\n return targetQuery.andWhere(inner => {\n for (const [keyAnyCase, value] of Object.entries(filter)) {\n applyFieldCondition({\n key: keyAnyCase.toLocaleLowerCase('en-US'),\n value,\n targetQuery: inner,\n onEntityIdField,\n knex,\n });\n }\n });\n}\n\n/**\n * Applies a single { key: value } filter to the target query.\n */\nfunction applyFieldCondition(options: {\n key: string;\n value: FilterPredicateValue;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { key, value, targetQuery, onEntityIdField, knex } = options;\n\n if (isPrimitive(value)) {\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({\n key,\n value: String(value).toLocaleLowerCase('en-US'),\n });\n return targetQuery.andWhere(onEntityIdField, 'in', matchQuery);\n }\n\n if (isObject(value)) {\n if ('$exists' in value) {\n const existsQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({ key });\n return targetQuery.andWhere(\n onEntityIdField,\n value.$exists ? 'in' : 'not in',\n existsQuery,\n );\n }\n\n if ('$in' in value) {\n const values = value.$in.map(v => String(v).toLocaleLowerCase('en-US'));\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({ key })\n .whereIn('value', values);\n return targetQuery.andWhere(onEntityIdField, 'in', matchQuery);\n }\n\n if ('$hasPrefix' in value) {\n const prefix = value.$hasPrefix.toLocaleLowerCase('en-US');\n const escaped = prefix.replace(/[%_\\\\]/g, c => `\\\\${c}`);\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({ key })\n .andWhereRaw('?? like ? escape ?', ['value', `${escaped}%`, '\\\\']);\n return targetQuery.andWhere(onEntityIdField, 'in', matchQuery);\n }\n\n if ('$contains' in value) {\n const target = value.$contains;\n\n // If the target is a primitive, match on the special array syntax.\n //\n // FROM: `{ \"a\": { \"$contains\": \"b\" } }`\n //\n // TO: `{ \"a\": \"b\" }`\n //\n // The search table does not actually show us that \"a\" was an array to\n // begin with, so this can mistakenly also match on an object that had a\n // \"b\" key with a primitive value. We'll consider that an acceptable\n // tradeoff though.\n if (isPrimitive(target)) {\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({\n key,\n value: String(target).toLocaleLowerCase('en-US'),\n });\n return targetQuery.andWhere(onEntityIdField, 'in', matchQuery);\n }\n\n // Object form of $contains - currently only supports relation-style\n // objects with \"type\" and optional \"targetRef\" keys.\n //\n // FROM: `{ \"relations\": { \"$contains\": { \"type\": \"ownedBy\", \"targetRef\": \"group:default/team-a\" } } }`\n //\n // TO: search for key = \"relations.ownedby\" AND value = \"group:default/team-a\"\n if (isObject(target)) {\n if (key === 'relations') {\n return applyContainsRelation({\n target,\n targetQuery,\n onEntityIdField,\n knex,\n });\n }\n\n throw new InputError(\n `Object form of $contains is not supported for field \"${key}\"`,\n );\n }\n\n const actual = JSON.stringify(target);\n throw new InputError(\n `Unsupported $contains target for field \"${key}\": ${actual}`,\n );\n }\n }\n\n const actual = JSON.stringify(value);\n throw new InputError(\n `Invalid filter predicate value for field \"${key}\": expected a primitive value, $exists, $in, $hasPrefix, or $contains operator, but got ${actual}`,\n );\n}\n\n/**\n * Handles expressions on the form\n *\n * ```\n * {\n * \"relations\": {\n * \"$contains\": {\n * \"type\": \"ownedBy\",\n * \"targetRef\": \"group:default/team-a\"\n * }\n * }\n * }\n * ```\n *\n * which map onto the search table's special `relation.<type>: <targetRef>`\n * syntax.\n *\n * Only the keys \"type\" and \"targetRef\" are supported. The \"type\" key is\n * required. If \"targetRef\" is omitted, it becomes an existence check for any\n * relation of that type. The \"targetRef\" value can be a string or an `$in`\n * array.\n */\nfunction applyContainsRelation(options: {\n target: Record<string, unknown>;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { target: rawTarget, targetQuery, onEntityIdField, knex } = options;\n\n function parseStringOrIn(value: unknown): string[] {\n if (typeof value === 'string') {\n return [value.toLocaleLowerCase('en-US')];\n }\n if (\n isObject(value) &&\n Object.keys(value).length === 1 &&\n '$in' in value &&\n Array.isArray(value.$in) &&\n value.$in.every((v): v is string => typeof v === 'string')\n ) {\n if (value.$in.length === 0) {\n throw new InputError(\n `Empty \"$in\" array for $contains on \"relations\" is not allowed`,\n );\n }\n return value.$in.map(v => v.toLocaleLowerCase('en-US'));\n }\n const actual = JSON.stringify(value);\n throw new InputError(\n `Unsupported value in $contains for \"relations\": expected a string or { \"$in\": [strings] }, but got ${actual}`,\n );\n }\n\n let type: string | undefined;\n let targetRef: string[] | undefined;\n\n for (const [rawKey, value] of Object.entries(rawTarget)) {\n const key = rawKey.toLocaleLowerCase('en-US');\n\n if (key === 'type') {\n if (type !== undefined) {\n throw new InputError(\n `Duplicate key \"${rawKey}\" in $contains for \"relations\"`,\n );\n }\n if (typeof value !== 'string') {\n throw new InputError(\n `The $contains operator for \"relations\" requires a \"type\" string property`,\n );\n }\n type = value;\n } else if (key === 'targetref') {\n if (targetRef !== undefined) {\n throw new InputError(\n `Duplicate key \"${rawKey}\" in $contains for \"relations\"`,\n );\n }\n targetRef = parseStringOrIn(value);\n } else {\n throw new InputError(\n `Unsupported key \"${rawKey}\" in $contains for \"relations\". Only \"type\" and \"targetRef\" are supported`,\n );\n }\n }\n\n if (!type) {\n throw new InputError(\n `The $contains operator for \"relations\" requires a \"type\" string property`,\n );\n }\n\n const matchQuery = knex<DbSearchRow>('search')\n .select('search.entity_id')\n .where({ key: `relations.${type.toLocaleLowerCase('en-US')}` });\n\n if (targetRef) {\n matchQuery.whereIn('value', targetRef);\n }\n\n return targetQuery.andWhere(onEntityIdField, 'in', matchQuery);\n}\n"],"names":["InputError","actual"],"mappings":";;;;AAyBA,SAAS,YAAY,KAAA,EAAmD;AACtE,EAAA,OACE,OAAO,KAAA,KAAU,QAAA,IACjB,OAAO,KAAA,KAAU,QAAA,IACjB,OAAO,KAAA,KAAU,SAAA;AAErB;AAEA,SAAS,SAAS,KAAA,EAAkD;AAClE,EAAA,OAAO,OAAO,UAAU,QAAA,IAAY,KAAA,KAAU,QAAQ,CAAC,KAAA,CAAM,QAAQ,KAAK,CAAA;AAC5E;AAEO,SAAS,kCAAkC,OAAA,EAK5B;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAGvD,EAAA,IAAI,CAAC,QAAA,CAAS,MAAM,CAAA,EAAG;AACrB,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AACpC,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,gIAAgI,MAAM,CAAA,EAAA;AAAA,KACxI;AAAA,EACF;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,OAAO,WAAA,CAAY,WAAA;AAAA,MAAY,WAC7B,iCAAA,CAAkC;AAAA,QAChC,QAAQ,MAAA,CAAO,IAAA;AAAA,QACf,WAAA,EAAa,KAAA;AAAA,QACb,eAAA;AAAA,QACA;AAAA,OACD;AAAA,KACH;AAAA,EACF;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,KAAW,CAAA,EAAG;AAC5B,MAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,IACxC;AACA,IAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,MAAA,KAAA,MAAW,SAAA,IAAa,OAAO,IAAA,EAAM;AACnC,QAAA,KAAA,CAAM,QAAA;AAAA,UAAS,WACb,iCAAA,CAAkC;AAAA,YAChC,MAAA,EAAQ,SAAA;AAAA,YACR,WAAA,EAAa,KAAA;AAAA,YACb,eAAA;AAAA,YACA;AAAA,WACD;AAAA,SACH;AAAA,MACF;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,KAAW,CAAA,EAAG;AAC5B,MAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,IACxC;AACA,IAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,MAAA,KAAA,MAAW,SAAA,IAAa,OAAO,IAAA,EAAM;AACnC,QAAA,KAAA,CAAM,OAAA;AAAA,UAAQ,WACZ,iCAAA,CAAkC;AAAA,YAChC,MAAA,EAAQ,SAAA;AAAA,YACR,WAAA,EAAa,KAAA;AAAA,YACb,eAAA;AAAA,YACA;AAAA,WACD;AAAA,SACH;AAAA,MACF;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAGA,EAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAM,CAAA,CAAE,WAAW,CAAA,EAAG;AACpC,IAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,EACxC;AACA,EAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,IAAA,KAAA,MAAW,CAAC,UAAA,EAAY,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACxD,MAAA,mBAAA,CAAoB;AAAA,QAClB,GAAA,EAAK,UAAA,CAAW,iBAAA,CAAkB,OAAO,CAAA;AAAA,QACzC,KAAA;AAAA,QACA,WAAA,EAAa,KAAA;AAAA,QACb,eAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH;AAAA,EACF,CAAC,CAAA;AACH;AAKA,SAAS,oBAAoB,OAAA,EAMP;AACpB,EAAA,MAAM,EAAE,GAAA,EAAK,KAAA,EAAO,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAE3D,EAAA,IAAI,WAAA,CAAY,KAAK,CAAA,EAAG;AACtB,IAAA,MAAM,aAAa,IAAA,CAAkB,QAAQ,EAC1C,MAAA,CAAO,kBAAkB,EACzB,KAAA,CAAM;AAAA,MACL,GAAA;AAAA,MACA,KAAA,EAAO,MAAA,CAAO,KAAK,CAAA,CAAE,kBAAkB,OAAO;AAAA,KAC/C,CAAA;AACH,IAAA,OAAO,WAAA,CAAY,QAAA,CAAS,eAAA,EAAiB,IAAA,EAAM,UAAU,CAAA;AAAA,EAC/D;AAEA,EAAA,IAAI,QAAA,CAAS,KAAK,CAAA,EAAG;AACnB,IAAA,IAAI,aAAa,KAAA,EAAO;AACtB,MAAA,MAAM,WAAA,GAAc,IAAA,CAAkB,QAAQ,CAAA,CAC3C,MAAA,CAAO,kBAAkB,CAAA,CACzB,KAAA,CAAM,EAAE,GAAA,EAAK,CAAA;AAChB,MAAA,OAAO,WAAA,CAAY,QAAA;AAAA,QACjB,eAAA;AAAA,QACA,KAAA,CAAM,UAAU,IAAA,GAAO,QAAA;AAAA,QACvB;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,SAAS,KAAA,EAAO;AAClB,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAA,CAAI,CAAA,CAAA,KAAK,OAAO,CAAC,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA;AACtE,MAAA,MAAM,UAAA,GAAa,IAAA,CAAkB,QAAQ,CAAA,CAC1C,OAAO,kBAAkB,CAAA,CACzB,KAAA,CAAM,EAAE,GAAA,EAAK,CAAA,CACb,OAAA,CAAQ,SAAS,MAAM,CAAA;AAC1B,MAAA,OAAO,WAAA,CAAY,QAAA,CAAS,eAAA,EAAiB,IAAA,EAAM,UAAU,CAAA;AAAA,IAC/D;AAEA,IAAA,IAAI,gBAAgB,KAAA,EAAO;AACzB,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,UAAA,CAAW,iBAAA,CAAkB,OAAO,CAAA;AACzD,MAAA,MAAM,UAAU,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAA,CAAA,KAAK,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AACvD,MAAA,MAAM,UAAA,GAAa,KAAkB,QAAQ,CAAA,CAC1C,OAAO,kBAAkB,CAAA,CACzB,MAAM,EAAE,GAAA,EAAK,CAAA,CACb,WAAA,CAAY,sBAAsB,CAAC,OAAA,EAAS,GAAG,OAAO,CAAA,CAAA,CAAA,EAAK,IAAI,CAAC,CAAA;AACnE,MAAA,OAAO,WAAA,CAAY,QAAA,CAAS,eAAA,EAAiB,IAAA,EAAM,UAAU,CAAA;AAAA,IAC/D;AAEA,IAAA,IAAI,eAAe,KAAA,EAAO;AACxB,MAAA,MAAM,SAAS,KAAA,CAAM,SAAA;AAYrB,MAAA,IAAI,WAAA,CAAY,MAAM,CAAA,EAAG;AACvB,QAAA,MAAM,aAAa,IAAA,CAAkB,QAAQ,EAC1C,MAAA,CAAO,kBAAkB,EACzB,KAAA,CAAM;AAAA,UACL,GAAA;AAAA,UACA,KAAA,EAAO,MAAA,CAAO,MAAM,CAAA,CAAE,kBAAkB,OAAO;AAAA,SAChD,CAAA;AACH,QAAA,OAAO,WAAA,CAAY,QAAA,CAAS,eAAA,EAAiB,IAAA,EAAM,UAAU,CAAA;AAAA,MAC/D;AAQA,MAAA,IAAI,QAAA,CAAS,MAAM,CAAA,EAAG;AACpB,QAAA,IAAI,QAAQ,WAAA,EAAa;AACvB,UAAA,OAAO,qBAAA,CAAsB;AAAA,YAC3B,MAAA;AAAA,YACA,WAAA;AAAA,YACA,eAAA;AAAA,YACA;AAAA,WACD,CAAA;AAAA,QACH;AAEA,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,wDAAwD,GAAG,CAAA,CAAA;AAAA,SAC7D;AAAA,MACF;AAEA,MAAA,MAAMC,OAAAA,GAAS,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AACpC,MAAA,MAAM,IAAID,iBAAA;AAAA,QACR,CAAA,wCAAA,EAA2C,GAAG,CAAA,GAAA,EAAMC,OAAM,CAAA;AAAA,OAC5D;AAAA,IACF;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,KAAK,CAAA;AACnC,EAAA,MAAM,IAAID,iBAAA;AAAA,IACR,CAAA,0CAAA,EAA6C,GAAG,CAAA,wFAAA,EAA2F,MAAM,CAAA;AAAA,GACnJ;AACF;AAwBA,SAAS,sBAAsB,OAAA,EAKT;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,SAAA,EAAW,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAElE,EAAA,SAAS,gBAAgB,KAAA,EAA0B;AACjD,IAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,MAAA,OAAO,CAAC,KAAA,CAAM,iBAAA,CAAkB,OAAO,CAAC,CAAA;AAAA,IAC1C;AACA,IAAA,IACE,QAAA,CAAS,KAAK,CAAA,IACd,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA,CAAE,MAAA,KAAW,CAAA,IAC9B,KAAA,IAAS,KAAA,IACT,KAAA,CAAM,QAAQ,KAAA,CAAM,GAAG,CAAA,IACvB,KAAA,CAAM,GAAA,CAAI,KAAA,CAAM,CAAC,CAAA,KAAmB,OAAO,CAAA,KAAM,QAAQ,CAAA,EACzD;AACA,MAAA,IAAI,KAAA,CAAM,GAAA,CAAI,MAAA,KAAW,CAAA,EAAG;AAC1B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,CAAA,6DAAA;AAAA,SACF;AAAA,MACF;AACA,MAAA,OAAO,MAAM,GAAA,CAAI,GAAA,CAAI,OAAK,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA;AAAA,IACxD;AACA,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,KAAK,CAAA;AACnC,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,sGAAsG,MAAM,CAAA;AAAA,KAC9G;AAAA,EACF;AAEA,EAAA,IAAI,IAAA;AACJ,EAAA,IAAI,SAAA;AAEJ,EAAA,KAAA,MAAW,CAAC,MAAA,EAAQ,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,SAAS,CAAA,EAAG;AACvD,IAAA,MAAM,GAAA,GAAM,MAAA,CAAO,iBAAA,CAAkB,OAAO,CAAA;AAE5C,IAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,MAAA,IAAI,SAAS,MAAA,EAAW;AACtB,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,kBAAkB,MAAM,CAAA,8BAAA;AAAA,SAC1B;AAAA,MACF;AACA,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,CAAA,wEAAA;AAAA,SACF;AAAA,MACF;AACA,MAAA,IAAA,GAAO,KAAA;AAAA,IACT,CAAA,MAAA,IAAW,QAAQ,WAAA,EAAa;AAC9B,MAAA,IAAI,cAAc,MAAA,EAAW;AAC3B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,kBAAkB,MAAM,CAAA,8BAAA;AAAA,SAC1B;AAAA,MACF;AACA,MAAA,SAAA,GAAY,gBAAgB,KAAK,CAAA;AAAA,IACnC,CAAA,MAAO;AACL,MAAA,MAAM,IAAIA,iBAAA;AAAA,QACR,oBAAoB,MAAM,CAAA,yEAAA;AAAA,OAC5B;AAAA,IACF;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,IAAA,EAAM;AACT,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,CAAA,wEAAA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,aAAa,IAAA,CAAkB,QAAQ,CAAA,CAC1C,MAAA,CAAO,kBAAkB,CAAA,CACzB,KAAA,CAAM,EAAE,GAAA,EAAK,aAAa,IAAA,CAAK,iBAAA,CAAkB,OAAO,CAAC,IAAI,CAAA;AAEhE,EAAA,IAAI,SAAA,EAAW;AACb,IAAA,UAAA,CAAW,OAAA,CAAQ,SAAS,SAAS,CAAA;AAAA,EACvC;AAEA,EAAA,OAAO,WAAA,CAAY,QAAA,CAAS,eAAA,EAAiB,IAAA,EAAM,UAAU,CAAA;AAC/D;;;;"}
1
+ {"version":3,"file":"applyPredicateEntityFilterToQuery.cjs.js","sources":["../../../src/service/request/applyPredicateEntityFilterToQuery.ts"],"sourcesContent":["/*\n * Copyright 2026 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n FilterPredicate,\n FilterPredicatePrimitive,\n FilterPredicateValue,\n} from '@backstage/filter-predicates';\nimport { InputError } from '@backstage/errors';\nimport { Knex } from 'knex';\nimport { searchExists, SEARCH_FLT_ALIAS as S } from './searchSubquery';\n\nfunction isPrimitive(value: unknown): value is FilterPredicatePrimitive {\n return (\n typeof value === 'string' ||\n typeof value === 'number' ||\n typeof value === 'boolean'\n );\n}\n\nfunction isObject(value: unknown): value is Record<string, unknown> {\n return typeof value === 'object' && value !== null && !Array.isArray(value);\n}\n\nexport function applyPredicateEntityFilterToQuery(options: {\n filter: FilterPredicate;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { filter, targetQuery, onEntityIdField, knex } = options;\n\n // We do not support top-level primitives; all matching happens through objects\n if (!isObject(filter)) {\n const actual = JSON.stringify(filter);\n throw new InputError(\n `Invalid filter predicate: top-level primitive values are not supported. Wrap the value in a field expression, e.g. { \"kind\": ${actual} }`,\n );\n }\n\n if ('$not' in filter) {\n return targetQuery.andWhereNot(inner =>\n applyPredicateEntityFilterToQuery({\n filter: filter.$not,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n\n if ('$all' in filter) {\n if (filter.$all.length === 0) {\n return targetQuery.andWhereRaw('1 = 1');\n }\n return targetQuery.andWhere(outer => {\n for (const subFilter of filter.$all) {\n outer.andWhere(inner =>\n applyPredicateEntityFilterToQuery({\n filter: subFilter,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n });\n }\n\n if ('$any' in filter) {\n if (filter.$any.length === 0) {\n return targetQuery.andWhereRaw('1 = 0');\n }\n return targetQuery.andWhere(outer => {\n for (const subFilter of filter.$any) {\n outer.orWhere(inner =>\n applyPredicateEntityFilterToQuery({\n filter: subFilter,\n targetQuery: inner,\n onEntityIdField,\n knex,\n }),\n );\n }\n });\n }\n\n // Treat the filter as a field expression like { \"kind\": \"component\" } or { \"spec.type\": { \"$in\": [\"service\", \"website\"] } }\n if (Object.keys(filter).length === 0) {\n return targetQuery.andWhereRaw('1 = 1');\n }\n return targetQuery.andWhere(inner => {\n for (const [keyAnyCase, value] of Object.entries(filter)) {\n applyFieldCondition({\n key: keyAnyCase.toLocaleLowerCase('en-US'),\n value,\n targetQuery: inner,\n onEntityIdField,\n knex,\n });\n }\n });\n}\n\n/**\n * Applies a single { key: value } filter to the target query.\n */\nfunction applyFieldCondition(options: {\n key: string;\n value: FilterPredicateValue;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { key, value, targetQuery, onEntityIdField, knex } = options;\n\n if (isPrimitive(value)) {\n return targetQuery.whereExists(\n searchExists(knex, onEntityIdField)\n .where(`${S}.key`, key)\n .where(`${S}.value`, String(value).toLocaleLowerCase('en-US')),\n );\n }\n\n if (isObject(value)) {\n if ('$exists' in value) {\n const subquery = searchExists(knex, onEntityIdField).where(\n `${S}.key`,\n key,\n );\n return value.$exists\n ? targetQuery.whereExists(subquery)\n : targetQuery.whereNotExists(subquery);\n }\n\n if ('$in' in value) {\n const values = value.$in.map(v => String(v).toLocaleLowerCase('en-US'));\n return targetQuery.whereExists(\n searchExists(knex, onEntityIdField)\n .where(`${S}.key`, key)\n .whereIn(`${S}.value`, values),\n );\n }\n\n if ('$hasPrefix' in value) {\n const prefix = value.$hasPrefix.toLocaleLowerCase('en-US');\n const escaped = prefix.replace(/[%_\\\\]/g, c => `\\\\${c}`);\n return targetQuery.whereExists(\n searchExists(knex, onEntityIdField)\n .where(`${S}.key`, key)\n .andWhereRaw('?? like ? escape ?', [\n `${S}.value`,\n `${escaped}%`,\n '\\\\',\n ]),\n );\n }\n\n if ('$contains' in value) {\n const target = value.$contains;\n\n // If the target is a primitive, match on the special array syntax.\n //\n // FROM: `{ \"a\": { \"$contains\": \"b\" } }`\n //\n // TO: `{ \"a\": \"b\" }`\n //\n // The search table does not actually show us that \"a\" was an array to\n // begin with, so this can mistakenly also match on an object that had a\n // \"b\" key with a primitive value. We'll consider that an acceptable\n // tradeoff though.\n if (isPrimitive(target)) {\n return targetQuery.whereExists(\n searchExists(knex, onEntityIdField)\n .where(`${S}.key`, key)\n .where(`${S}.value`, String(target).toLocaleLowerCase('en-US')),\n );\n }\n\n // Object form of $contains - currently only supports relation-style\n // objects with \"type\" and optional \"targetRef\" keys.\n //\n // FROM: `{ \"relations\": { \"$contains\": { \"type\": \"ownedBy\", \"targetRef\": \"group:default/team-a\" } } }`\n //\n // TO: search for key = \"relations.ownedby\" AND value = \"group:default/team-a\"\n if (isObject(target)) {\n if (key === 'relations') {\n return applyContainsRelation({\n target,\n targetQuery,\n onEntityIdField,\n knex,\n });\n }\n\n throw new InputError(\n `Object form of $contains is not supported for field \"${key}\"`,\n );\n }\n\n const actual = JSON.stringify(target);\n throw new InputError(\n `Unsupported $contains target for field \"${key}\": ${actual}`,\n );\n }\n }\n\n const actual = JSON.stringify(value);\n throw new InputError(\n `Invalid filter predicate value for field \"${key}\": expected a primitive value, $exists, $in, $hasPrefix, or $contains operator, but got ${actual}`,\n );\n}\n\n/**\n * Handles expressions on the form\n *\n * ```\n * {\n * \"relations\": {\n * \"$contains\": {\n * \"type\": \"ownedBy\",\n * \"targetRef\": \"group:default/team-a\"\n * }\n * }\n * }\n * ```\n *\n * which map onto the search table's special `relation.<type>: <targetRef>`\n * syntax.\n *\n * Only the keys \"type\" and \"targetRef\" are supported. The \"type\" key is\n * required. If \"targetRef\" is omitted, it becomes an existence check for any\n * relation of that type. The \"targetRef\" value can be a string or an `$in`\n * array.\n */\nfunction applyContainsRelation(options: {\n target: Record<string, unknown>;\n targetQuery: Knex.QueryBuilder;\n onEntityIdField: string;\n knex: Knex;\n}): Knex.QueryBuilder {\n const { target: rawTarget, targetQuery, onEntityIdField, knex } = options;\n\n function parseStringOrIn(value: unknown): string[] {\n if (typeof value === 'string') {\n return [value.toLocaleLowerCase('en-US')];\n }\n if (\n isObject(value) &&\n Object.keys(value).length === 1 &&\n '$in' in value &&\n Array.isArray(value.$in) &&\n value.$in.every((v): v is string => typeof v === 'string')\n ) {\n if (value.$in.length === 0) {\n throw new InputError(\n `Empty \"$in\" array for $contains on \"relations\" is not allowed`,\n );\n }\n return value.$in.map(v => v.toLocaleLowerCase('en-US'));\n }\n const actual = JSON.stringify(value);\n throw new InputError(\n `Unsupported value in $contains for \"relations\": expected a string or { \"$in\": [strings] }, but got ${actual}`,\n );\n }\n\n let type: string | undefined;\n let targetRef: string[] | undefined;\n\n for (const [rawKey, value] of Object.entries(rawTarget)) {\n const key = rawKey.toLocaleLowerCase('en-US');\n\n if (key === 'type') {\n if (type !== undefined) {\n throw new InputError(\n `Duplicate key \"${rawKey}\" in $contains for \"relations\"`,\n );\n }\n if (typeof value !== 'string') {\n throw new InputError(\n `The $contains operator for \"relations\" requires a \"type\" string property`,\n );\n }\n type = value;\n } else if (key === 'targetref') {\n if (targetRef !== undefined) {\n throw new InputError(\n `Duplicate key \"${rawKey}\" in $contains for \"relations\"`,\n );\n }\n targetRef = parseStringOrIn(value);\n } else {\n throw new InputError(\n `Unsupported key \"${rawKey}\" in $contains for \"relations\". Only \"type\" and \"targetRef\" are supported`,\n );\n }\n }\n\n if (!type) {\n throw new InputError(\n `The $contains operator for \"relations\" requires a \"type\" string property`,\n );\n }\n\n const subquery = searchExists(knex, onEntityIdField).where(\n `${S}.key`,\n `relations.${type.toLocaleLowerCase('en-US')}`,\n );\n\n if (targetRef) {\n subquery.whereIn(`${S}.value`, targetRef);\n }\n\n return targetQuery.whereExists(subquery);\n}\n"],"names":["InputError","searchExists","S","actual"],"mappings":";;;;;AAyBA,SAAS,YAAY,KAAA,EAAmD;AACtE,EAAA,OACE,OAAO,KAAA,KAAU,QAAA,IACjB,OAAO,KAAA,KAAU,QAAA,IACjB,OAAO,KAAA,KAAU,SAAA;AAErB;AAEA,SAAS,SAAS,KAAA,EAAkD;AAClE,EAAA,OAAO,OAAO,UAAU,QAAA,IAAY,KAAA,KAAU,QAAQ,CAAC,KAAA,CAAM,QAAQ,KAAK,CAAA;AAC5E;AAEO,SAAS,kCAAkC,OAAA,EAK5B;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAGvD,EAAA,IAAI,CAAC,QAAA,CAAS,MAAM,CAAA,EAAG;AACrB,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AACpC,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,gIAAgI,MAAM,CAAA,EAAA;AAAA,KACxI;AAAA,EACF;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,OAAO,WAAA,CAAY,WAAA;AAAA,MAAY,WAC7B,iCAAA,CAAkC;AAAA,QAChC,QAAQ,MAAA,CAAO,IAAA;AAAA,QACf,WAAA,EAAa,KAAA;AAAA,QACb,eAAA;AAAA,QACA;AAAA,OACD;AAAA,KACH;AAAA,EACF;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,KAAW,CAAA,EAAG;AAC5B,MAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,IACxC;AACA,IAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,MAAA,KAAA,MAAW,SAAA,IAAa,OAAO,IAAA,EAAM;AACnC,QAAA,KAAA,CAAM,QAAA;AAAA,UAAS,WACb,iCAAA,CAAkC;AAAA,YAChC,MAAA,EAAQ,SAAA;AAAA,YACR,WAAA,EAAa,KAAA;AAAA,YACb,eAAA;AAAA,YACA;AAAA,WACD;AAAA,SACH;AAAA,MACF;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAEA,EAAA,IAAI,UAAU,MAAA,EAAQ;AACpB,IAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAA,KAAW,CAAA,EAAG;AAC5B,MAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,IACxC;AACA,IAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,MAAA,KAAA,MAAW,SAAA,IAAa,OAAO,IAAA,EAAM;AACnC,QAAA,KAAA,CAAM,OAAA;AAAA,UAAQ,WACZ,iCAAA,CAAkC;AAAA,YAChC,MAAA,EAAQ,SAAA;AAAA,YACR,WAAA,EAAa,KAAA;AAAA,YACb,eAAA;AAAA,YACA;AAAA,WACD;AAAA,SACH;AAAA,MACF;AAAA,IACF,CAAC,CAAA;AAAA,EACH;AAGA,EAAA,IAAI,MAAA,CAAO,IAAA,CAAK,MAAM,CAAA,CAAE,WAAW,CAAA,EAAG;AACpC,IAAA,OAAO,WAAA,CAAY,YAAY,OAAO,CAAA;AAAA,EACxC;AACA,EAAA,OAAO,WAAA,CAAY,SAAS,CAAA,KAAA,KAAS;AACnC,IAAA,KAAA,MAAW,CAAC,UAAA,EAAY,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACxD,MAAA,mBAAA,CAAoB;AAAA,QAClB,GAAA,EAAK,UAAA,CAAW,iBAAA,CAAkB,OAAO,CAAA;AAAA,QACzC,KAAA;AAAA,QACA,WAAA,EAAa,KAAA;AAAA,QACb,eAAA;AAAA,QACA;AAAA,OACD,CAAA;AAAA,IACH;AAAA,EACF,CAAC,CAAA;AACH;AAKA,SAAS,oBAAoB,OAAA,EAMP;AACpB,EAAA,MAAM,EAAE,GAAA,EAAK,KAAA,EAAO,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAE3D,EAAA,IAAI,WAAA,CAAY,KAAK,CAAA,EAAG;AACtB,IAAA,OAAO,WAAA,CAAY,WAAA;AAAA,MACjBC,4BAAa,IAAA,EAAM,eAAe,EAC/B,KAAA,CAAM,CAAA,EAAGC,+BAAC,CAAA,IAAA,CAAA,EAAQ,GAAG,EACrB,KAAA,CAAM,CAAA,EAAGA,+BAAC,CAAA,MAAA,CAAA,EAAU,MAAA,CAAO,KAAK,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC;AAAA,KACjE;AAAA,EACF;AAEA,EAAA,IAAI,QAAA,CAAS,KAAK,CAAA,EAAG;AACnB,IAAA,IAAI,aAAa,KAAA,EAAO;AACtB,MAAA,MAAM,QAAA,GAAWD,2BAAA,CAAa,IAAA,EAAM,eAAe,CAAA,CAAE,KAAA;AAAA,QACnD,GAAGC,+BAAC,CAAA,IAAA,CAAA;AAAA,QACJ;AAAA,OACF;AACA,MAAA,OAAO,KAAA,CAAM,UACT,WAAA,CAAY,WAAA,CAAY,QAAQ,CAAA,GAChC,WAAA,CAAY,eAAe,QAAQ,CAAA;AAAA,IACzC;AAEA,IAAA,IAAI,SAAS,KAAA,EAAO;AAClB,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,GAAA,CAAI,GAAA,CAAI,CAAA,CAAA,KAAK,OAAO,CAAC,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA;AACtE,MAAA,OAAO,WAAA,CAAY,WAAA;AAAA,QACjBD,2BAAA,CAAa,IAAA,EAAM,eAAe,CAAA,CAC/B,MAAM,CAAA,EAAGC,+BAAC,CAAA,IAAA,CAAA,EAAQ,GAAG,CAAA,CACrB,OAAA,CAAQ,CAAA,EAAGA,+BAAC,UAAU,MAAM;AAAA,OACjC;AAAA,IACF;AAEA,IAAA,IAAI,gBAAgB,KAAA,EAAO;AACzB,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,UAAA,CAAW,iBAAA,CAAkB,OAAO,CAAA;AACzD,MAAA,MAAM,UAAU,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAA,CAAA,KAAK,CAAA,EAAA,EAAK,CAAC,CAAA,CAAE,CAAA;AACvD,MAAA,OAAO,WAAA,CAAY,WAAA;AAAA,QACjBD,2BAAA,CAAa,IAAA,EAAM,eAAe,CAAA,CAC/B,KAAA,CAAM,CAAA,EAAGC,+BAAC,CAAA,IAAA,CAAA,EAAQ,GAAG,CAAA,CACrB,WAAA,CAAY,oBAAA,EAAsB;AAAA,UACjC,GAAGA,+BAAC,CAAA,MAAA,CAAA;AAAA,UACJ,GAAG,OAAO,CAAA,CAAA,CAAA;AAAA,UACV;AAAA,SACD;AAAA,OACL;AAAA,IACF;AAEA,IAAA,IAAI,eAAe,KAAA,EAAO;AACxB,MAAA,MAAM,SAAS,KAAA,CAAM,SAAA;AAYrB,MAAA,IAAI,WAAA,CAAY,MAAM,CAAA,EAAG;AACvB,QAAA,OAAO,WAAA,CAAY,WAAA;AAAA,UACjBD,4BAAa,IAAA,EAAM,eAAe,EAC/B,KAAA,CAAM,CAAA,EAAGC,+BAAC,CAAA,IAAA,CAAA,EAAQ,GAAG,EACrB,KAAA,CAAM,CAAA,EAAGA,+BAAC,CAAA,MAAA,CAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC;AAAA,SAClE;AAAA,MACF;AAQA,MAAA,IAAI,QAAA,CAAS,MAAM,CAAA,EAAG;AACpB,QAAA,IAAI,QAAQ,WAAA,EAAa;AACvB,UAAA,OAAO,qBAAA,CAAsB;AAAA,YAC3B,MAAA;AAAA,YACA,WAAA;AAAA,YACA,eAAA;AAAA,YACA;AAAA,WACD,CAAA;AAAA,QACH;AAEA,QAAA,MAAM,IAAIF,iBAAA;AAAA,UACR,wDAAwD,GAAG,CAAA,CAAA;AAAA,SAC7D;AAAA,MACF;AAEA,MAAA,MAAMG,OAAAA,GAAS,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AACpC,MAAA,MAAM,IAAIH,iBAAA;AAAA,QACR,CAAA,wCAAA,EAA2C,GAAG,CAAA,GAAA,EAAMG,OAAM,CAAA;AAAA,OAC5D;AAAA,IACF;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,KAAK,CAAA;AACnC,EAAA,MAAM,IAAIH,iBAAA;AAAA,IACR,CAAA,0CAAA,EAA6C,GAAG,CAAA,wFAAA,EAA2F,MAAM,CAAA;AAAA,GACnJ;AACF;AAwBA,SAAS,sBAAsB,OAAA,EAKT;AACpB,EAAA,MAAM,EAAE,MAAA,EAAQ,SAAA,EAAW,WAAA,EAAa,eAAA,EAAiB,MAAK,GAAI,OAAA;AAElE,EAAA,SAAS,gBAAgB,KAAA,EAA0B;AACjD,IAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,MAAA,OAAO,CAAC,KAAA,CAAM,iBAAA,CAAkB,OAAO,CAAC,CAAA;AAAA,IAC1C;AACA,IAAA,IACE,QAAA,CAAS,KAAK,CAAA,IACd,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA,CAAE,MAAA,KAAW,CAAA,IAC9B,KAAA,IAAS,KAAA,IACT,KAAA,CAAM,QAAQ,KAAA,CAAM,GAAG,CAAA,IACvB,KAAA,CAAM,GAAA,CAAI,KAAA,CAAM,CAAC,CAAA,KAAmB,OAAO,CAAA,KAAM,QAAQ,CAAA,EACzD;AACA,MAAA,IAAI,KAAA,CAAM,GAAA,CAAI,MAAA,KAAW,CAAA,EAAG;AAC1B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,CAAA,6DAAA;AAAA,SACF;AAAA,MACF;AACA,MAAA,OAAO,MAAM,GAAA,CAAI,GAAA,CAAI,OAAK,CAAA,CAAE,iBAAA,CAAkB,OAAO,CAAC,CAAA;AAAA,IACxD;AACA,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,SAAA,CAAU,KAAK,CAAA;AACnC,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,sGAAsG,MAAM,CAAA;AAAA,KAC9G;AAAA,EACF;AAEA,EAAA,IAAI,IAAA;AACJ,EAAA,IAAI,SAAA;AAEJ,EAAA,KAAA,MAAW,CAAC,MAAA,EAAQ,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,SAAS,CAAA,EAAG;AACvD,IAAA,MAAM,GAAA,GAAM,MAAA,CAAO,iBAAA,CAAkB,OAAO,CAAA;AAE5C,IAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,MAAA,IAAI,SAAS,MAAA,EAAW;AACtB,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,kBAAkB,MAAM,CAAA,8BAAA;AAAA,SAC1B;AAAA,MACF;AACA,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,CAAA,wEAAA;AAAA,SACF;AAAA,MACF;AACA,MAAA,IAAA,GAAO,KAAA;AAAA,IACT,CAAA,MAAA,IAAW,QAAQ,WAAA,EAAa;AAC9B,MAAA,IAAI,cAAc,MAAA,EAAW;AAC3B,QAAA,MAAM,IAAIA,iBAAA;AAAA,UACR,kBAAkB,MAAM,CAAA,8BAAA;AAAA,SAC1B;AAAA,MACF;AACA,MAAA,SAAA,GAAY,gBAAgB,KAAK,CAAA;AAAA,IACnC,CAAA,MAAO;AACL,MAAA,MAAM,IAAIA,iBAAA;AAAA,QACR,oBAAoB,MAAM,CAAA,yEAAA;AAAA,OAC5B;AAAA,IACF;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,IAAA,EAAM;AACT,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,CAAA,wEAAA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,QAAA,GAAWC,2BAAA,CAAa,IAAA,EAAM,eAAe,CAAA,CAAE,KAAA;AAAA,IACnD,GAAGC,+BAAC,CAAA,IAAA,CAAA;AAAA,IACJ,CAAA,UAAA,EAAa,IAAA,CAAK,iBAAA,CAAkB,OAAO,CAAC,CAAA;AAAA,GAC9C;AAEA,EAAA,IAAI,SAAA,EAAW;AACb,IAAA,QAAA,CAAS,OAAA,CAAQ,CAAA,EAAGA,+BAAC,CAAA,MAAA,CAAA,EAAU,SAAS,CAAA;AAAA,EAC1C;AAEA,EAAA,OAAO,WAAA,CAAY,YAAY,QAAQ,CAAA;AACzC;;;;"}
@@ -0,0 +1,10 @@
1
+ 'use strict';
2
+
3
+ const SEARCH_FLT_ALIAS = "search_flt";
4
+ function searchExists(knex, onEntityIdField) {
5
+ return knex(`search as ${SEARCH_FLT_ALIAS}`).select(knex.raw("1")).whereRaw("?? = ??", [`${SEARCH_FLT_ALIAS}.entity_id`, onEntityIdField]);
6
+ }
7
+
8
+ exports.SEARCH_FLT_ALIAS = SEARCH_FLT_ALIAS;
9
+ exports.searchExists = searchExists;
10
+ //# sourceMappingURL=searchSubquery.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"searchSubquery.cjs.js","sources":["../../../src/service/request/searchSubquery.ts"],"sourcesContent":["/*\n * Copyright 2026 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Knex } from 'knex';\n\n/**\n * Alias used for the search table in EXISTS subqueries, to avoid ambiguity\n * when the outer query is also on the search table (e.g. facets queries).\n */\nexport const SEARCH_FLT_ALIAS = 'search_flt';\n\n/**\n * Creates an EXISTS subquery base against the search table, correlated on\n * entity_id with the outer query's entity id field.\n */\nexport function searchExists(\n knex: Knex,\n onEntityIdField: string,\n): Knex.QueryBuilder {\n return knex(`search as ${SEARCH_FLT_ALIAS}`)\n .select(knex.raw('1'))\n .whereRaw('?? = ??', [`${SEARCH_FLT_ALIAS}.entity_id`, onEntityIdField]);\n}\n"],"names":[],"mappings":";;AAsBO,MAAM,gBAAA,GAAmB;AAMzB,SAAS,YAAA,CACd,MACA,eAAA,EACmB;AACnB,EAAA,OAAO,KAAK,CAAA,UAAA,EAAa,gBAAgB,EAAE,CAAA,CACxC,MAAA,CAAO,KAAK,GAAA,CAAI,GAAG,CAAC,CAAA,CACpB,SAAS,SAAA,EAAW,CAAC,GAAG,gBAAgB,CAAA,UAAA,CAAA,EAAc,eAAe,CAAC,CAAA;AAC3E;;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-catalog-backend",
3
- "version": "3.5.1-next.0",
3
+ "version": "3.5.1-next.1",
4
4
  "description": "The Backstage backend plugin that provides the Backstage catalog",
5
5
  "backstage": {
6
6
  "role": "backend-plugin",
@@ -76,8 +76,8 @@
76
76
  "test": "backstage-cli package test"
77
77
  },
78
78
  "dependencies": {
79
- "@backstage/backend-openapi-utils": "0.6.8-next.0",
80
- "@backstage/backend-plugin-api": "1.8.1-next.0",
79
+ "@backstage/backend-openapi-utils": "0.6.8-next.1",
80
+ "@backstage/backend-plugin-api": "1.9.0-next.1",
81
81
  "@backstage/catalog-client": "1.14.0",
82
82
  "@backstage/catalog-model": "1.7.7",
83
83
  "@backstage/config": "1.3.6",
@@ -85,10 +85,10 @@
85
85
  "@backstage/filter-predicates": "0.1.1",
86
86
  "@backstage/integration": "2.0.0",
87
87
  "@backstage/plugin-catalog-common": "1.1.8",
88
- "@backstage/plugin-catalog-node": "2.1.1-next.0",
89
- "@backstage/plugin-events-node": "0.4.21-next.0",
88
+ "@backstage/plugin-catalog-node": "2.1.1-next.1",
89
+ "@backstage/plugin-events-node": "0.4.21-next.1",
90
90
  "@backstage/plugin-permission-common": "0.9.7",
91
- "@backstage/plugin-permission-node": "0.10.12-next.0",
91
+ "@backstage/plugin-permission-node": "0.10.12-next.1",
92
92
  "@backstage/types": "1.2.2",
93
93
  "@opentelemetry/api": "^1.9.0",
94
94
  "codeowners-utils": "^1.0.2",
@@ -97,7 +97,7 @@
97
97
  "fast-json-stable-stringify": "^2.1.0",
98
98
  "fs-extra": "^11.2.0",
99
99
  "git-url-parse": "^15.0.0",
100
- "glob": "^7.1.6",
100
+ "glob": "^13.0.0",
101
101
  "knex": "^3.0.0",
102
102
  "lodash": "^4.17.21",
103
103
  "luxon": "^3.0.0",
@@ -111,15 +111,14 @@
111
111
  "zod-validation-error": "^4.0.2"
112
112
  },
113
113
  "devDependencies": {
114
- "@backstage/backend-defaults": "0.16.1-next.0",
115
- "@backstage/backend-test-utils": "1.11.2-next.0",
116
- "@backstage/cli": "0.36.1-next.0",
114
+ "@backstage/backend-defaults": "0.16.1-next.1",
115
+ "@backstage/backend-test-utils": "1.11.2-next.1",
116
+ "@backstage/cli": "0.36.1-next.1",
117
117
  "@backstage/plugin-permission-common": "0.9.7",
118
- "@backstage/repo-tools": "0.17.1-next.0",
118
+ "@backstage/repo-tools": "0.17.1-next.1",
119
119
  "@types/core-js": "^2.5.4",
120
120
  "@types/express": "^4.17.6",
121
121
  "@types/git-url-parse": "^9.0.0",
122
- "@types/glob": "^8.0.0",
123
122
  "@types/lodash": "^4.14.151",
124
123
  "@types/supertest": "^2.0.8",
125
124
  "better-sqlite3": "^12.0.0",