@sweetoburrito/backstage-plugin-ai-assistant-backend 0.0.0-snapshot-20260108133622 → 0.0.0-snapshot-20260109124956

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,6 @@
1
1
  'use strict';
2
2
 
3
3
  var crypto = require('crypto');
4
- var uuid = require('uuid');
5
4
 
6
5
  class PgVectorStore {
7
6
  // Seconds in a day for timestamp conversion
@@ -115,9 +114,8 @@ class PgVectorStore {
115
114
  const hash = crypto.createHash("sha256").update(doc.content).digest("hex");
116
115
  return {
117
116
  hash,
118
- id: doc.id ?? uuid.v4(),
119
117
  metadata: doc.metadata,
120
- lastUpdated: /* @__PURE__ */ new Date(),
118
+ lastUpdated: doc.lastUpdated ?? /* @__PURE__ */ new Date(),
121
119
  content: doc.content.replace(/\0/g, ""),
122
120
  vector: `[${vector.join(",")}]`
123
121
  };
@@ -1 +1 @@
1
- {"version":3,"file":"pg-vector-store.cjs.js","sources":["../../src/database/pg-vector-store.ts"],"sourcesContent":["import {\n DatabaseService,\n LoggerService,\n RootConfigService,\n} from '@backstage/backend-plugin-api';\nimport {\n VectorStore,\n EmbeddingDocument,\n EmbeddingDocumentMetadata,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\nimport { Embeddings } from '@langchain/core/embeddings';\nimport { Knex } from 'knex';\nimport { createHash } from 'crypto';\nimport { v4 as uuid } from 'uuid';\n\nexport type PgVectorStoreOptions = {\n database: DatabaseService;\n logger: LoggerService;\n config: RootConfigService;\n};\n\nexport class PgVectorStore implements VectorStore {\n private readonly tableName: string = 'embeddings';\n private embeddings?: Omit<Embeddings, 'caller'>;\n\n // Recency bias configuration\n private readonly RECENCY_WEIGHT = 0.3; // Weight for document recency (0-1)\n private readonly SIMILARITY_WEIGHT = 1 - this.RECENCY_WEIGHT; // Weight for vector similarity (0-1)\n private readonly RECENCY_HALF_LIFE_DAYS = 180; // Days until recency boost is halved (6 months)\n private readonly AGE_SCALE_FACTOR = 86400; // Seconds in a day for timestamp conversion\n\n /**\n * Creates an instance of PgVectorStore.\n * @param client - The Knex client to interact with the PostgreSQL database.\n * @param [amount=4] - The number of embeddings to store.\n * @param [chunkSize=500] - The size of each chunk of embeddings.\n */\n constructor(\n private readonly client: Knex,\n private readonly logger: LoggerService,\n private readonly amount: number = 4,\n private readonly chunkSize: number = 500,\n ) {}\n\n static async fromConfig({ config, database, logger }: PgVectorStoreOptions) {\n const client = await database.getClient();\n const chunkSize = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.chunkSize',\n );\n const amount = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.amount',\n );\n\n return new PgVectorStore(client, logger, amount, chunkSize);\n }\n\n connectEmbeddings(embeddings: Omit<Embeddings, 'caller'>) {\n if (this.embeddings) {\n this.logger.warn('Embeddings already connected, overwriting.');\n }\n this.embeddings = embeddings;\n }\n\n table() {\n return this.client(this.tableName);\n }\n\n /**\n * Add documents to the vector store.\n *\n * @param {EmbeddingDocument[]} documents - The array of documents to be added.\n * @throws {Error} When no embeddings are configured for the vector store.\n * @returns {Promise<void>} Resolves when the documents have been added successfully.\n */\n async addDocuments(documents: EmbeddingDocument[]): Promise<void> {\n if (documents.length === 0) {\n return;\n }\n\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n\n // Fetch existing documents with matching (id, source) pairs\n const conditions = documents\n .map(() => `(metadata->>'id' = ? AND metadata->>'source' = ?)`)\n .join(' OR ');\n\n const params = documents.flatMap(doc => [\n doc.metadata.id,\n doc.metadata.source,\n ]);\n\n const existingDocuments: EmbeddingDocument[] = await this.client\n .select('*')\n .from(this.tableName)\n .whereRaw(conditions, params);\n\n // Build a map for quick lookups\n const existingMap = new Map(\n existingDocuments.map(doc => [\n `${doc.metadata.id}:${doc.metadata.source}`,\n doc,\n ]),\n );\n\n // Categorize documents\n const newDocuments: EmbeddingDocument[] = [];\n const documentsToUpdate: Array<EmbeddingDocument & { id: string }> = [];\n\n for (const doc of documents) {\n const key = `${doc.metadata.id}:${doc.metadata.source}`;\n const existing = existingMap.get(key);\n\n if (!existing) {\n newDocuments.push(doc);\n continue;\n }\n\n // Check if content changed\n const newHash = createHash('sha256').update(doc.content).digest('hex');\n if (!existing.hash || newHash !== existing.hash) {\n documentsToUpdate.push({ ...doc, id: existing.id! });\n }\n }\n\n const allDocumentsToAdd = [...newDocuments, ...documentsToUpdate];\n\n if (allDocumentsToAdd.length === 0) {\n this.logger.debug('No new or updated documents to add.');\n return;\n }\n\n // Delete old versions before re-adding\n if (documentsToUpdate.length > 0) {\n const uniqueDocKeys = new Set(\n documentsToUpdate.map(\n doc => `${doc.metadata.id}:${doc.metadata.source}`,\n ),\n );\n\n for (const key of uniqueDocKeys) {\n const [id, source] = key.split(':');\n await this.client(this.tableName)\n .delete()\n .whereRaw(`metadata->>'id' = ? AND metadata->>'source' = ?`, [\n id,\n source,\n ]);\n }\n\n this.logger.info(\n `Deleted all chunks for ${uniqueDocKeys.size} updated documents`,\n );\n }\n\n const contents = allDocumentsToAdd.map(doc => doc.content);\n const vectors = await this.embeddings!.embedDocuments(contents);\n\n const rows = allDocumentsToAdd.map((doc, index) => {\n const vector = vectors[index];\n const hash = createHash('sha256').update(doc.content).digest('hex');\n\n return {\n hash,\n id: doc.id ?? uuid(),\n metadata: doc.metadata,\n lastUpdated: new Date(),\n content: doc.content.replace(/\\0/g, ''),\n vector: `[${vector.join(',')}]`,\n };\n });\n this.logger.info(\n `Adding ${rows.length} documents (${newDocuments.length} new, ${documentsToUpdate.length} updated).`,\n );\n\n await this.client.batchInsert(this.tableName, rows, this.chunkSize);\n }\n\n /**\n * Deletes records from the database table by their ids.\n *\n * @param {string[]} ids - The array of ids of the records to be deleted.\n * @returns {Promise<void>} - A promise that resolves when the deletion is complete.\n */\n private async deleteById(ids: string[]) {\n await this.table().delete().whereIn('id', ids);\n }\n\n /**\n * Deletes rows from the table based on the specified filter.\n *\n * @param {EmbeddingDocMetadata} filter - The filter to apply for deletion.\n * @returns {Promise} - A Promise that resolves when the deletion is complete.\n */\n private async deleteByFilter(filter: EmbeddingDocumentMetadata) {\n const queryString = `\n DELETE FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n `;\n return this.client.raw(queryString, { filter });\n }\n\n /**\n * Deletes documents based on the provided deletion parameters.\n * Either `ids` or `filter` must be specified.\n *\n * @param {Object} deletionParams - The deletion parameters.\n * @param {Array<string>} [deletionParams.ids] - The document IDs to delete.\n * @param {EmbeddingDocMetadata} [deletionParams.filter] - The filter to match documents to be deleted.\n *\n * @return {Promise<void>} - A Promise that resolves once the documents have been deleted.\n */\n async deleteDocuments(deletionParams: {\n ids?: string[];\n filter?: EmbeddingDocumentMetadata;\n }): Promise<void> {\n const { ids, filter } = deletionParams;\n\n if (!(ids || filter)) {\n throw new Error(\n 'You must specify either ids or a filter when deleting documents.',\n );\n }\n\n if (ids && filter) {\n throw new Error(\n 'You cannot specify both ids and a filter when deleting documents.',\n );\n }\n\n if (ids) {\n await this.deleteById(ids);\n } else if (filter) {\n await this.deleteByFilter(filter);\n }\n }\n\n /**\n * Finds the most similar documents to a given query vector, along with their similarity scores.\n * Results are ranked by a weighted combination of vector similarity and document recency.\n * i.e newer documents are favored in the ranking but if no new documents exist, older but more similar documents will still be returned.\n *\n * @param {number[]} query - The query vector to compare against.\n * @param {number} amount - The maximum number of results to return.\n * @param {EmbeddingDocumentMetadata} [filter] - Optional filter to limit the search results.\n * @returns {Promise<[EmbeddingDocument, number][]>} - An array of document similarity results, where each\n * result is a tuple containing the document and its similarity score.\n */\n private async similaritySearchVectorWithScore(\n query: number[],\n amount: number,\n filter?: EmbeddingDocumentMetadata,\n ): Promise<[EmbeddingDocument, number][]> {\n const embeddingString = `[${query.join(',')}]`;\n\n const queryString = `\n SELECT\n *,\n (vector <=> :embeddingString) as \"_distance\",\n (EXTRACT(EPOCH FROM (NOW() - COALESCE(\"lastUpdated\", NOW()))) / :ageScaleFactor) as \"_age_days\",\n (\n ((vector <=> :embeddingString) * :similarityWeight) -\n (EXP(-0.693 * (EXTRACT(EPOCH FROM (NOW() - COALESCE(\"lastUpdated\", NOW()))) / :ageScaleFactor) / :recencyHalfLife) * :recencyWeight)\n ) as \"_combined_score\"\n FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n ORDER BY \"_combined_score\" ASC\n LIMIT :amount\n `;\n\n const documents = (\n await this.client.raw(queryString, {\n embeddingString,\n filter: JSON.stringify(filter ?? {}),\n amount,\n similarityWeight: this.SIMILARITY_WEIGHT,\n recencyWeight: this.RECENCY_WEIGHT,\n recencyHalfLife: this.RECENCY_HALF_LIFE_DAYS,\n ageScaleFactor: this.AGE_SCALE_FACTOR,\n })\n ).rows;\n\n const results = [] as [EmbeddingDocument, number][];\n for (const doc of documents) {\n // eslint-ignore-next-line\n if (doc._distance !== null && doc.content !== null) {\n const document: EmbeddingDocument = {\n content: doc.content,\n metadata: {\n ...doc.metadata,\n ageInDays: Math.round(doc._age_days),\n lastUpdated: doc.lastUpdated,\n },\n };\n results.push([document, doc._distance]);\n }\n }\n return results;\n }\n\n /**\n * Performs a similarity search using the given query and filter.\n *\n * @param {string} query - The query to perform the similarity search on.\n * @param {EmbeddingDocMetadata} filter - The filter to apply to the search results.\n * @param {number} [amount=4] - The number of results to return.\n * @return {Promise<EmbeddingDoc[]>} - A promise that resolves to an array of RoadieEmbeddingDoc objects representing the search results.\n * @throws {Error} - Throws an error if there are no embeddings configured for the vector store.\n */\n async similaritySearch(\n query: string,\n filter?: EmbeddingDocumentMetadata,\n amount: number = this.amount,\n ): Promise<EmbeddingDocument[]> {\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n const results = await this.similaritySearchVectorWithScore(\n await this.embeddings.embedQuery(query),\n amount,\n filter,\n );\n\n return results.map(result => result[0]);\n }\n}\n"],"names":["createHash","uuid"],"mappings":";;;;;AAqBO,MAAM,aAAA,CAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBhD,YACmB,MAAA,EACA,MAAA,EACA,MAAA,GAAiB,CAAA,EACjB,YAAoB,GAAA,EACrC;AAJiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,SAAA,GAAA,SAAA;AAAA,EAChB;AAAA,EApBc,SAAA,GAAoB,YAAA;AAAA,EAC7B,UAAA;AAAA;AAAA,EAGS,cAAA,GAAiB,GAAA;AAAA;AAAA,EACjB,iBAAA,GAAoB,IAAI,IAAA,CAAK,cAAA;AAAA;AAAA,EAC7B,sBAAA,GAAyB,GAAA;AAAA;AAAA,EACzB,gBAAA,GAAmB,KAAA;AAAA,EAepC,aAAa,UAAA,CAAW,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAO,EAAyB;AAC1E,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,EAAU;AACxC,IAAA,MAAM,YAAY,MAAA,CAAO,iBAAA;AAAA,MACvB;AAAA,KACF;AACA,IAAA,MAAM,SAAS,MAAA,CAAO,iBAAA;AAAA,MACpB;AAAA,KACF;AAEA,IAAA,OAAO,IAAI,aAAA,CAAc,MAAA,EAAQ,MAAA,EAAQ,QAAQ,SAAS,CAAA;AAAA,EAC5D;AAAA,EAEA,kBAAkB,UAAA,EAAwC;AACxD,IAAA,IAAI,KAAK,UAAA,EAAY;AACnB,MAAA,IAAA,CAAK,MAAA,CAAO,KAAK,4CAA4C,CAAA;AAAA,IAC/D;AACA,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAAA,EACpB;AAAA,EAEA,KAAA,GAAQ;AACN,IAAA,OAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,SAAS,CAAA;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aAAa,SAAA,EAA+C;AAChE,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AAGA,IAAA,MAAM,aAAa,SAAA,CAChB,GAAA,CAAI,MAAM,CAAA,iDAAA,CAAmD,CAAA,CAC7D,KAAK,MAAM,CAAA;AAEd,IAAA,MAAM,MAAA,GAAS,SAAA,CAAU,OAAA,CAAQ,CAAA,GAAA,KAAO;AAAA,MACtC,IAAI,QAAA,CAAS,EAAA;AAAA,MACb,IAAI,QAAA,CAAS;AAAA,KACd,CAAA;AAED,IAAA,MAAM,iBAAA,GAAyC,MAAM,IAAA,CAAK,MAAA,CACvD,MAAA,CAAO,GAAG,CAAA,CACV,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,CACnB,QAAA,CAAS,YAAY,MAAM,CAAA;AAG9B,IAAA,MAAM,cAAc,IAAI,GAAA;AAAA,MACtB,iBAAA,CAAkB,IAAI,CAAA,GAAA,KAAO;AAAA,QAC3B,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA,CAAA;AAAA,QACzC;AAAA,OACD;AAAA,KACH;AAGA,IAAA,MAAM,eAAoC,EAAC;AAC3C,IAAA,MAAM,oBAA+D,EAAC;AAEtE,IAAA,KAAA,MAAW,OAAO,SAAA,EAAW;AAC3B,MAAA,MAAM,GAAA,GAAM,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA,CAAA;AACrD,MAAA,MAAM,QAAA,GAAW,WAAA,CAAY,GAAA,CAAI,GAAG,CAAA;AAEpC,MAAA,IAAI,CAAC,QAAA,EAAU;AACb,QAAA,YAAA,CAAa,KAAK,GAAG,CAAA;AACrB,QAAA;AAAA,MACF;AAGA,MAAA,MAAM,OAAA,GAAUA,kBAAW,QAAQ,CAAA,CAAE,OAAO,GAAA,CAAI,OAAO,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AACrE,MAAA,IAAI,CAAC,QAAA,CAAS,IAAA,IAAQ,OAAA,KAAY,SAAS,IAAA,EAAM;AAC/C,QAAA,iBAAA,CAAkB,KAAK,EAAE,GAAG,KAAK,EAAA,EAAI,QAAA,CAAS,IAAK,CAAA;AAAA,MACrD;AAAA,IACF;AAEA,IAAA,MAAM,iBAAA,GAAoB,CAAC,GAAG,YAAA,EAAc,GAAG,iBAAiB,CAAA;AAEhE,IAAA,IAAI,iBAAA,CAAkB,WAAW,CAAA,EAAG;AAClC,MAAA,IAAA,CAAK,MAAA,CAAO,MAAM,qCAAqC,CAAA;AACvD,MAAA;AAAA,IACF;AAGA,IAAA,IAAI,iBAAA,CAAkB,SAAS,CAAA,EAAG;AAChC,MAAA,MAAM,gBAAgB,IAAI,GAAA;AAAA,QACxB,iBAAA,CAAkB,GAAA;AAAA,UAChB,CAAA,GAAA,KAAO,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA;AAAA;AAClD,OACF;AAEA,MAAA,KAAA,MAAW,OAAO,aAAA,EAAe;AAC/B,QAAA,MAAM,CAAC,EAAA,EAAI,MAAM,CAAA,GAAI,GAAA,CAAI,MAAM,GAAG,CAAA;AAClC,QAAA,MAAM,IAAA,CAAK,OAAO,IAAA,CAAK,SAAS,EAC7B,MAAA,EAAO,CACP,SAAS,CAAA,+CAAA,CAAA,EAAmD;AAAA,UAC3D,EAAA;AAAA,UACA;AAAA,SACD,CAAA;AAAA,MACL;AAEA,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,QACV,CAAA,uBAAA,EAA0B,cAAc,IAAI,CAAA,kBAAA;AAAA,OAC9C;AAAA,IACF;AAEA,IAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,GAAA,CAAI,CAAA,GAAA,KAAO,IAAI,OAAO,CAAA;AACzD,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,UAAA,CAAY,eAAe,QAAQ,CAAA;AAE9D,IAAA,MAAM,IAAA,GAAO,iBAAA,CAAkB,GAAA,CAAI,CAAC,KAAK,KAAA,KAAU;AACjD,MAAA,MAAM,MAAA,GAAS,QAAQ,KAAK,CAAA;AAC5B,MAAA,MAAM,IAAA,GAAOA,kBAAW,QAAQ,CAAA,CAAE,OAAO,GAAA,CAAI,OAAO,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AAElE,MAAA,OAAO;AAAA,QACL,IAAA;AAAA,QACA,EAAA,EAAI,GAAA,CAAI,EAAA,IAAMC,OAAA,EAAK;AAAA,QACnB,UAAU,GAAA,CAAI,QAAA;AAAA,QACd,WAAA,sBAAiB,IAAA,EAAK;AAAA,QACtB,OAAA,EAAS,GAAA,CAAI,OAAA,CAAQ,OAAA,CAAQ,OAAO,EAAE,CAAA;AAAA,QACtC,MAAA,EAAQ,CAAA,CAAA,EAAI,MAAA,CAAO,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA;AAAA,OAC9B;AAAA,IACF,CAAC,CAAA;AACD,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,CAAA,OAAA,EAAU,KAAK,MAAM,CAAA,YAAA,EAAe,aAAa,MAAM,CAAA,MAAA,EAAS,kBAAkB,MAAM,CAAA,UAAA;AAAA,KAC1F;AAEA,IAAA,MAAM,KAAK,MAAA,CAAO,WAAA,CAAY,KAAK,SAAA,EAAW,IAAA,EAAM,KAAK,SAAS,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,WAAW,GAAA,EAAe;AACtC,IAAA,MAAM,KAAK,KAAA,EAAM,CAAE,QAAO,CAAE,OAAA,CAAQ,MAAM,GAAG,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,eAAe,MAAA,EAAmC;AAC9D,IAAA,MAAM,WAAA,GAAc;AAAA,kBAAA,EACJ,KAAK,SAAS;AAAA;AAAA,IAAA,CAAA;AAG9B,IAAA,OAAO,KAAK,MAAA,CAAO,GAAA,CAAI,WAAA,EAAa,EAAE,QAAQ,CAAA;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,gBAAgB,cAAA,EAGJ;AAChB,IAAA,MAAM,EAAE,GAAA,EAAK,MAAA,EAAO,GAAI,cAAA;AAExB,IAAA,IAAI,EAAE,OAAO,MAAA,CAAA,EAAS;AACpB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,GAAA,EAAK;AACP,MAAA,MAAM,IAAA,CAAK,WAAW,GAAG,CAAA;AAAA,IAC3B,WAAW,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAA,CAAK,eAAe,MAAM,CAAA;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAc,+BAAA,CACZ,KAAA,EACA,MAAA,EACA,MAAA,EACwC;AACxC,IAAA,MAAM,eAAA,GAAkB,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA,CAAA;AAE3C,IAAA,MAAM,WAAA,GAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAA,EASb,KAAK,SAAS;AAAA;AAAA;AAAA;AAAA,EAAA,CAAA;AAMrB,IAAA,MAAM,SAAA,GAAA,CACJ,MAAM,IAAA,CAAK,MAAA,CAAO,IAAI,WAAA,EAAa;AAAA,MACjC,eAAA;AAAA,MACA,MAAA,EAAQ,IAAA,CAAK,SAAA,CAAU,MAAA,IAAU,EAAE,CAAA;AAAA,MACnC,MAAA;AAAA,MACA,kBAAkB,IAAA,CAAK,iBAAA;AAAA,MACvB,eAAe,IAAA,CAAK,cAAA;AAAA,MACpB,iBAAiB,IAAA,CAAK,sBAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK;AAAA,KACtB,CAAA,EACD,IAAA;AAEF,IAAA,MAAM,UAAU,EAAC;AACjB,IAAA,KAAA,MAAW,OAAO,SAAA,EAAW;AAE3B,MAAA,IAAI,GAAA,CAAI,SAAA,KAAc,IAAA,IAAQ,GAAA,CAAI,YAAY,IAAA,EAAM;AAClD,QAAA,MAAM,QAAA,GAA8B;AAAA,UAClC,SAAS,GAAA,CAAI,OAAA;AAAA,UACb,QAAA,EAAU;AAAA,YACR,GAAG,GAAA,CAAI,QAAA;AAAA,YACP,SAAA,EAAW,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,SAAS,CAAA;AAAA,YACnC,aAAa,GAAA,CAAI;AAAA;AACnB,SACF;AACA,QAAA,OAAA,CAAQ,IAAA,CAAK,CAAC,QAAA,EAAU,GAAA,CAAI,SAAS,CAAC,CAAA;AAAA,MACxC;AAAA,IACF;AACA,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,gBAAA,CACJ,KAAA,EACA,MAAA,EACA,MAAA,GAAiB,KAAK,MAAA,EACQ;AAC9B,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AACA,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,+BAAA;AAAA,MACzB,MAAM,IAAA,CAAK,UAAA,CAAW,UAAA,CAAW,KAAK,CAAA;AAAA,MACtC,MAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAA,MAAA,KAAU,MAAA,CAAO,CAAC,CAAC,CAAA;AAAA,EACxC;AACF;;;;"}
1
+ {"version":3,"file":"pg-vector-store.cjs.js","sources":["../../src/database/pg-vector-store.ts"],"sourcesContent":["import {\n DatabaseService,\n LoggerService,\n RootConfigService,\n} from '@backstage/backend-plugin-api';\nimport {\n VectorStore,\n EmbeddingDocument,\n EmbeddingDocumentMetadata,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\nimport { Embeddings } from '@langchain/core/embeddings';\nimport { Knex } from 'knex';\nimport { createHash } from 'crypto';\n\nexport type PgVectorStoreOptions = {\n database: DatabaseService;\n logger: LoggerService;\n config: RootConfigService;\n};\n\nexport class PgVectorStore implements VectorStore {\n private readonly tableName: string = 'embeddings';\n private embeddings?: Omit<Embeddings, 'caller'>;\n\n // Recency bias configuration\n private readonly RECENCY_WEIGHT = 0.3; // Weight for document recency (0-1)\n private readonly SIMILARITY_WEIGHT = 1 - this.RECENCY_WEIGHT; // Weight for vector similarity (0-1)\n private readonly RECENCY_HALF_LIFE_DAYS = 180; // Days until recency boost is halved (6 months)\n private readonly AGE_SCALE_FACTOR = 86400; // Seconds in a day for timestamp conversion\n\n /**\n * Creates an instance of PgVectorStore.\n * @param client - The Knex client to interact with the PostgreSQL database.\n * @param [amount=4] - The number of embeddings to store.\n * @param [chunkSize=500] - The size of each chunk of embeddings.\n */\n constructor(\n private readonly client: Knex,\n private readonly logger: LoggerService,\n private readonly amount: number = 4,\n private readonly chunkSize: number = 500,\n ) {}\n\n static async fromConfig({ config, database, logger }: PgVectorStoreOptions) {\n const client = await database.getClient();\n const chunkSize = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.chunkSize',\n );\n const amount = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.amount',\n );\n\n return new PgVectorStore(client, logger, amount, chunkSize);\n }\n\n connectEmbeddings(embeddings: Omit<Embeddings, 'caller'>) {\n if (this.embeddings) {\n this.logger.warn('Embeddings already connected, overwriting.');\n }\n this.embeddings = embeddings;\n }\n\n table() {\n return this.client(this.tableName);\n }\n\n /**\n * Add documents to the vector store.\n *\n * @param {EmbeddingDocument[]} documents - The array of documents to be added.\n * @throws {Error} When no embeddings are configured for the vector store.\n * @returns {Promise<void>} Resolves when the documents have been added successfully.\n */\n async addDocuments(documents: EmbeddingDocument[]): Promise<void> {\n if (documents.length === 0) {\n return;\n }\n\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n\n // Fetch existing documents with matching (id, source) pairs\n const conditions = documents\n .map(() => `(metadata->>'id' = ? AND metadata->>'source' = ?)`)\n .join(' OR ');\n\n const params = documents.flatMap(doc => [\n doc.metadata.id,\n doc.metadata.source,\n ]);\n\n const existingDocuments: EmbeddingDocument[] = await this.client\n .select('*')\n .from(this.tableName)\n .whereRaw(conditions, params);\n\n // Build a map for quick lookups\n const existingMap = new Map(\n existingDocuments.map(doc => [\n `${doc.metadata.id}:${doc.metadata.source}`,\n doc,\n ]),\n );\n\n // Categorize documents\n const newDocuments: EmbeddingDocument[] = [];\n const documentsToUpdate: Array<EmbeddingDocument & { id: string }> = [];\n\n for (const doc of documents) {\n const key = `${doc.metadata.id}:${doc.metadata.source}`;\n const existing = existingMap.get(key);\n\n if (!existing) {\n newDocuments.push(doc);\n continue;\n }\n\n // Check if content changed\n const newHash = createHash('sha256').update(doc.content).digest('hex');\n if (!existing.hash || newHash !== existing.hash) {\n documentsToUpdate.push({ ...doc, id: existing.id! });\n }\n }\n\n const allDocumentsToAdd = [...newDocuments, ...documentsToUpdate];\n\n if (allDocumentsToAdd.length === 0) {\n this.logger.debug('No new or updated documents to add.');\n return;\n }\n\n // Delete old versions before re-adding\n if (documentsToUpdate.length > 0) {\n const uniqueDocKeys = new Set(\n documentsToUpdate.map(\n doc => `${doc.metadata.id}:${doc.metadata.source}`,\n ),\n );\n\n for (const key of uniqueDocKeys) {\n const [id, source] = key.split(':');\n await this.client(this.tableName)\n .delete()\n .whereRaw(`metadata->>'id' = ? AND metadata->>'source' = ?`, [\n id,\n source,\n ]);\n }\n\n this.logger.info(\n `Deleted all chunks for ${uniqueDocKeys.size} updated documents`,\n );\n }\n\n const contents = allDocumentsToAdd.map(doc => doc.content);\n const vectors = await this.embeddings!.embedDocuments(contents);\n\n const rows = allDocumentsToAdd.map((doc, index) => {\n const vector = vectors[index];\n const hash = createHash('sha256').update(doc.content).digest('hex');\n\n return {\n hash,\n metadata: doc.metadata,\n lastUpdated: doc.lastUpdated ?? new Date(),\n content: doc.content.replace(/\\0/g, ''),\n vector: `[${vector.join(',')}]`,\n };\n });\n this.logger.info(\n `Adding ${rows.length} documents (${newDocuments.length} new, ${documentsToUpdate.length} updated).`,\n );\n\n await this.client.batchInsert(this.tableName, rows, this.chunkSize);\n }\n\n /**\n * Deletes records from the database table by their ids.\n *\n * @param {string[]} ids - The array of ids of the records to be deleted.\n * @returns {Promise<void>} - A promise that resolves when the deletion is complete.\n */\n private async deleteById(ids: string[]) {\n await this.table().delete().whereIn('id', ids);\n }\n\n /**\n * Deletes rows from the table based on the specified filter.\n *\n * @param {EmbeddingDocMetadata} filter - The filter to apply for deletion.\n * @returns {Promise} - A Promise that resolves when the deletion is complete.\n */\n private async deleteByFilter(filter: EmbeddingDocumentMetadata) {\n const queryString = `\n DELETE FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n `;\n return this.client.raw(queryString, { filter });\n }\n\n /**\n * Deletes documents based on the provided deletion parameters.\n * Either `ids` or `filter` must be specified.\n *\n * @param {Object} deletionParams - The deletion parameters.\n * @param {Array<string>} [deletionParams.ids] - The document IDs to delete.\n * @param {EmbeddingDocMetadata} [deletionParams.filter] - The filter to match documents to be deleted.\n *\n * @return {Promise<void>} - A Promise that resolves once the documents have been deleted.\n */\n async deleteDocuments(deletionParams: {\n ids?: string[];\n filter?: EmbeddingDocumentMetadata;\n }): Promise<void> {\n const { ids, filter } = deletionParams;\n\n if (!(ids || filter)) {\n throw new Error(\n 'You must specify either ids or a filter when deleting documents.',\n );\n }\n\n if (ids && filter) {\n throw new Error(\n 'You cannot specify both ids and a filter when deleting documents.',\n );\n }\n\n if (ids) {\n await this.deleteById(ids);\n } else if (filter) {\n await this.deleteByFilter(filter);\n }\n }\n\n /**\n * Finds the most similar documents to a given query vector, along with their similarity scores.\n * Results are ranked by a weighted combination of vector similarity and document recency.\n * i.e newer documents are favored in the ranking but if no new documents exist, older but more similar documents will still be returned.\n *\n * @param {number[]} query - The query vector to compare against.\n * @param {number} amount - The maximum number of results to return.\n * @param {EmbeddingDocumentMetadata} [filter] - Optional filter to limit the search results.\n * @returns {Promise<[EmbeddingDocument, number][]>} - An array of document similarity results, where each\n * result is a tuple containing the document and its similarity score.\n */\n private async similaritySearchVectorWithScore(\n query: number[],\n amount: number,\n filter?: EmbeddingDocumentMetadata,\n ): Promise<[EmbeddingDocument, number][]> {\n const embeddingString = `[${query.join(',')}]`;\n\n const queryString = `\n SELECT\n *,\n (vector <=> :embeddingString) as \"_distance\",\n (EXTRACT(EPOCH FROM (NOW() - COALESCE(\"lastUpdated\", NOW()))) / :ageScaleFactor) as \"_age_days\",\n (\n ((vector <=> :embeddingString) * :similarityWeight) -\n (EXP(-0.693 * (EXTRACT(EPOCH FROM (NOW() - COALESCE(\"lastUpdated\", NOW()))) / :ageScaleFactor) / :recencyHalfLife) * :recencyWeight)\n ) as \"_combined_score\"\n FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n ORDER BY \"_combined_score\" ASC\n LIMIT :amount\n `;\n\n const documents = (\n await this.client.raw(queryString, {\n embeddingString,\n filter: JSON.stringify(filter ?? {}),\n amount,\n similarityWeight: this.SIMILARITY_WEIGHT,\n recencyWeight: this.RECENCY_WEIGHT,\n recencyHalfLife: this.RECENCY_HALF_LIFE_DAYS,\n ageScaleFactor: this.AGE_SCALE_FACTOR,\n })\n ).rows;\n\n const results = [] as [EmbeddingDocument, number][];\n for (const doc of documents) {\n // eslint-ignore-next-line\n if (doc._distance !== null && doc.content !== null) {\n const document: EmbeddingDocument = {\n content: doc.content,\n metadata: {\n ...doc.metadata,\n ageInDays: Math.round(doc._age_days),\n lastUpdated: doc.lastUpdated,\n },\n };\n results.push([document, doc._distance]);\n }\n }\n return results;\n }\n\n /**\n * Performs a similarity search using the given query and filter.\n *\n * @param {string} query - The query to perform the similarity search on.\n * @param {EmbeddingDocMetadata} filter - The filter to apply to the search results.\n * @param {number} [amount=4] - The number of results to return.\n * @return {Promise<EmbeddingDoc[]>} - A promise that resolves to an array of RoadieEmbeddingDoc objects representing the search results.\n * @throws {Error} - Throws an error if there are no embeddings configured for the vector store.\n */\n async similaritySearch(\n query: string,\n filter?: EmbeddingDocumentMetadata,\n amount: number = this.amount,\n ): Promise<EmbeddingDocument[]> {\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n const results = await this.similaritySearchVectorWithScore(\n await this.embeddings.embedQuery(query),\n amount,\n filter,\n );\n\n return results.map(result => result[0]);\n }\n}\n"],"names":["createHash"],"mappings":";;;;AAoBO,MAAM,aAAA,CAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBhD,YACmB,MAAA,EACA,MAAA,EACA,MAAA,GAAiB,CAAA,EACjB,YAAoB,GAAA,EACrC;AAJiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,SAAA,GAAA,SAAA;AAAA,EAChB;AAAA,EApBc,SAAA,GAAoB,YAAA;AAAA,EAC7B,UAAA;AAAA;AAAA,EAGS,cAAA,GAAiB,GAAA;AAAA;AAAA,EACjB,iBAAA,GAAoB,IAAI,IAAA,CAAK,cAAA;AAAA;AAAA,EAC7B,sBAAA,GAAyB,GAAA;AAAA;AAAA,EACzB,gBAAA,GAAmB,KAAA;AAAA,EAepC,aAAa,UAAA,CAAW,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAO,EAAyB;AAC1E,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,EAAU;AACxC,IAAA,MAAM,YAAY,MAAA,CAAO,iBAAA;AAAA,MACvB;AAAA,KACF;AACA,IAAA,MAAM,SAAS,MAAA,CAAO,iBAAA;AAAA,MACpB;AAAA,KACF;AAEA,IAAA,OAAO,IAAI,aAAA,CAAc,MAAA,EAAQ,MAAA,EAAQ,QAAQ,SAAS,CAAA;AAAA,EAC5D;AAAA,EAEA,kBAAkB,UAAA,EAAwC;AACxD,IAAA,IAAI,KAAK,UAAA,EAAY;AACnB,MAAA,IAAA,CAAK,MAAA,CAAO,KAAK,4CAA4C,CAAA;AAAA,IAC/D;AACA,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAAA,EACpB;AAAA,EAEA,KAAA,GAAQ;AACN,IAAA,OAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,SAAS,CAAA;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aAAa,SAAA,EAA+C;AAChE,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AAGA,IAAA,MAAM,aAAa,SAAA,CAChB,GAAA,CAAI,MAAM,CAAA,iDAAA,CAAmD,CAAA,CAC7D,KAAK,MAAM,CAAA;AAEd,IAAA,MAAM,MAAA,GAAS,SAAA,CAAU,OAAA,CAAQ,CAAA,GAAA,KAAO;AAAA,MACtC,IAAI,QAAA,CAAS,EAAA;AAAA,MACb,IAAI,QAAA,CAAS;AAAA,KACd,CAAA;AAED,IAAA,MAAM,iBAAA,GAAyC,MAAM,IAAA,CAAK,MAAA,CACvD,MAAA,CAAO,GAAG,CAAA,CACV,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,CACnB,QAAA,CAAS,YAAY,MAAM,CAAA;AAG9B,IAAA,MAAM,cAAc,IAAI,GAAA;AAAA,MACtB,iBAAA,CAAkB,IAAI,CAAA,GAAA,KAAO;AAAA,QAC3B,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA,CAAA;AAAA,QACzC;AAAA,OACD;AAAA,KACH;AAGA,IAAA,MAAM,eAAoC,EAAC;AAC3C,IAAA,MAAM,oBAA+D,EAAC;AAEtE,IAAA,KAAA,MAAW,OAAO,SAAA,EAAW;AAC3B,MAAA,MAAM,GAAA,GAAM,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA,CAAA;AACrD,MAAA,MAAM,QAAA,GAAW,WAAA,CAAY,GAAA,CAAI,GAAG,CAAA;AAEpC,MAAA,IAAI,CAAC,QAAA,EAAU;AACb,QAAA,YAAA,CAAa,KAAK,GAAG,CAAA;AACrB,QAAA;AAAA,MACF;AAGA,MAAA,MAAM,OAAA,GAAUA,kBAAW,QAAQ,CAAA,CAAE,OAAO,GAAA,CAAI,OAAO,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AACrE,MAAA,IAAI,CAAC,QAAA,CAAS,IAAA,IAAQ,OAAA,KAAY,SAAS,IAAA,EAAM;AAC/C,QAAA,iBAAA,CAAkB,KAAK,EAAE,GAAG,KAAK,EAAA,EAAI,QAAA,CAAS,IAAK,CAAA;AAAA,MACrD;AAAA,IACF;AAEA,IAAA,MAAM,iBAAA,GAAoB,CAAC,GAAG,YAAA,EAAc,GAAG,iBAAiB,CAAA;AAEhE,IAAA,IAAI,iBAAA,CAAkB,WAAW,CAAA,EAAG;AAClC,MAAA,IAAA,CAAK,MAAA,CAAO,MAAM,qCAAqC,CAAA;AACvD,MAAA;AAAA,IACF;AAGA,IAAA,IAAI,iBAAA,CAAkB,SAAS,CAAA,EAAG;AAChC,MAAA,MAAM,gBAAgB,IAAI,GAAA;AAAA,QACxB,iBAAA,CAAkB,GAAA;AAAA,UAChB,CAAA,GAAA,KAAO,GAAG,GAAA,CAAI,QAAA,CAAS,EAAE,CAAA,CAAA,EAAI,GAAA,CAAI,SAAS,MAAM,CAAA;AAAA;AAClD,OACF;AAEA,MAAA,KAAA,MAAW,OAAO,aAAA,EAAe;AAC/B,QAAA,MAAM,CAAC,EAAA,EAAI,MAAM,CAAA,GAAI,GAAA,CAAI,MAAM,GAAG,CAAA;AAClC,QAAA,MAAM,IAAA,CAAK,OAAO,IAAA,CAAK,SAAS,EAC7B,MAAA,EAAO,CACP,SAAS,CAAA,+CAAA,CAAA,EAAmD;AAAA,UAC3D,EAAA;AAAA,UACA;AAAA,SACD,CAAA;AAAA,MACL;AAEA,MAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,QACV,CAAA,uBAAA,EAA0B,cAAc,IAAI,CAAA,kBAAA;AAAA,OAC9C;AAAA,IACF;AAEA,IAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,GAAA,CAAI,CAAA,GAAA,KAAO,IAAI,OAAO,CAAA;AACzD,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,UAAA,CAAY,eAAe,QAAQ,CAAA;AAE9D,IAAA,MAAM,IAAA,GAAO,iBAAA,CAAkB,GAAA,CAAI,CAAC,KAAK,KAAA,KAAU;AACjD,MAAA,MAAM,MAAA,GAAS,QAAQ,KAAK,CAAA;AAC5B,MAAA,MAAM,IAAA,GAAOA,kBAAW,QAAQ,CAAA,CAAE,OAAO,GAAA,CAAI,OAAO,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AAElE,MAAA,OAAO;AAAA,QACL,IAAA;AAAA,QACA,UAAU,GAAA,CAAI,QAAA;AAAA,QACd,WAAA,EAAa,GAAA,CAAI,WAAA,oBAAe,IAAI,IAAA,EAAK;AAAA,QACzC,OAAA,EAAS,GAAA,CAAI,OAAA,CAAQ,OAAA,CAAQ,OAAO,EAAE,CAAA;AAAA,QACtC,MAAA,EAAQ,CAAA,CAAA,EAAI,MAAA,CAAO,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA;AAAA,OAC9B;AAAA,IACF,CAAC,CAAA;AACD,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,CAAA,OAAA,EAAU,KAAK,MAAM,CAAA,YAAA,EAAe,aAAa,MAAM,CAAA,MAAA,EAAS,kBAAkB,MAAM,CAAA,UAAA;AAAA,KAC1F;AAEA,IAAA,MAAM,KAAK,MAAA,CAAO,WAAA,CAAY,KAAK,SAAA,EAAW,IAAA,EAAM,KAAK,SAAS,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,WAAW,GAAA,EAAe;AACtC,IAAA,MAAM,KAAK,KAAA,EAAM,CAAE,QAAO,CAAE,OAAA,CAAQ,MAAM,GAAG,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,eAAe,MAAA,EAAmC;AAC9D,IAAA,MAAM,WAAA,GAAc;AAAA,kBAAA,EACJ,KAAK,SAAS;AAAA;AAAA,IAAA,CAAA;AAG9B,IAAA,OAAO,KAAK,MAAA,CAAO,GAAA,CAAI,WAAA,EAAa,EAAE,QAAQ,CAAA;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,gBAAgB,cAAA,EAGJ;AAChB,IAAA,MAAM,EAAE,GAAA,EAAK,MAAA,EAAO,GAAI,cAAA;AAExB,IAAA,IAAI,EAAE,OAAO,MAAA,CAAA,EAAS;AACpB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,GAAA,EAAK;AACP,MAAA,MAAM,IAAA,CAAK,WAAW,GAAG,CAAA;AAAA,IAC3B,WAAW,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAA,CAAK,eAAe,MAAM,CAAA;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAc,+BAAA,CACZ,KAAA,EACA,MAAA,EACA,MAAA,EACwC;AACxC,IAAA,MAAM,eAAA,GAAkB,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA,CAAA;AAE3C,IAAA,MAAM,WAAA,GAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAA,EASb,KAAK,SAAS;AAAA;AAAA;AAAA;AAAA,EAAA,CAAA;AAMrB,IAAA,MAAM,SAAA,GAAA,CACJ,MAAM,IAAA,CAAK,MAAA,CAAO,IAAI,WAAA,EAAa;AAAA,MACjC,eAAA;AAAA,MACA,MAAA,EAAQ,IAAA,CAAK,SAAA,CAAU,MAAA,IAAU,EAAE,CAAA;AAAA,MACnC,MAAA;AAAA,MACA,kBAAkB,IAAA,CAAK,iBAAA;AAAA,MACvB,eAAe,IAAA,CAAK,cAAA;AAAA,MACpB,iBAAiB,IAAA,CAAK,sBAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK;AAAA,KACtB,CAAA,EACD,IAAA;AAEF,IAAA,MAAM,UAAU,EAAC;AACjB,IAAA,KAAA,MAAW,OAAO,SAAA,EAAW;AAE3B,MAAA,IAAI,GAAA,CAAI,SAAA,KAAc,IAAA,IAAQ,GAAA,CAAI,YAAY,IAAA,EAAM;AAClD,QAAA,MAAM,QAAA,GAA8B;AAAA,UAClC,SAAS,GAAA,CAAI,OAAA;AAAA,UACb,QAAA,EAAU;AAAA,YACR,GAAG,GAAA,CAAI,QAAA;AAAA,YACP,SAAA,EAAW,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,SAAS,CAAA;AAAA,YACnC,aAAa,GAAA,CAAI;AAAA;AACnB,SACF;AACA,QAAA,OAAA,CAAQ,IAAA,CAAK,CAAC,QAAA,EAAU,GAAA,CAAI,SAAS,CAAC,CAAA;AAAA,MACxC;AAAA,IACF;AACA,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,gBAAA,CACJ,KAAA,EACA,MAAA,EACA,MAAA,GAAiB,KAAK,MAAA,EACQ;AAC9B,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AACA,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,+BAAA;AAAA,MACzB,MAAM,IAAA,CAAK,UAAA,CAAW,UAAA,CAAW,KAAK,CAAA;AAAA,MACtC,MAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAA,MAAA,KAAU,MAAA,CAAO,CAAC,CAAC,CAAA;AAAA,EACxC;AACF;;;;"}
@@ -54,7 +54,8 @@ const createDataIngestionPipeline = ({
54
54
  const docChunks = chunks.flatMap(
55
55
  (chunk, i) => ({
56
56
  metadata: { ...document.metadata, chunk: String(i) },
57
- content: chunk
57
+ content: chunk,
58
+ lastUpdated: document.lastUpdated
58
59
  })
59
60
  );
60
61
  return docChunks;
@@ -1 +1 @@
1
- {"version":3,"file":"ingestor.cjs.js","sources":["../../src/services/ingestor.ts"],"sourcesContent":["import {\n DataIngestionPipeline,\n DataIngestionPipelineOptions,\n EmbeddingDocument,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\n\nimport {\n SchedulerServiceTaskScheduleDefinition,\n readSchedulerServiceTaskScheduleDefinitionFromConfig,\n} from '@backstage/backend-plugin-api';\n\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\n\nconst DEFAULT_DATA_INGESTION_SCHEDULE: SchedulerServiceTaskScheduleDefinition =\n {\n frequency: {\n hours: 24,\n },\n timeout: {\n hours: 3,\n },\n };\n\nconst DEFAULT_MAX_CHUNK_PROCESSING_SIZE = 100;\nconst DEFAULT_CHUNK_SIZE = 1000;\nconst DEFAULT_CHUNK_OVERLAP = 100;\n\nexport const createDataIngestionPipeline = ({\n config,\n logger,\n scheduler,\n ingestors,\n vectorStore,\n}: DataIngestionPipelineOptions): DataIngestionPipeline => {\n const schedule = config.has('aiAssistant.ingestion.schedule')\n ? readSchedulerServiceTaskScheduleDefinitionFromConfig(\n config.getConfig('aiAssistant.ingestion.schedule'),\n )\n : DEFAULT_DATA_INGESTION_SCHEDULE;\n\n const chunkSize =\n config.getOptionalNumber('aiAssistant.ingestion.chunking.chunkSize') ??\n DEFAULT_CHUNK_SIZE;\n\n const chunkOverlap =\n config.getOptionalNumber('aiAssistant.ingestion.chunking.chunkOverlap') ??\n DEFAULT_CHUNK_OVERLAP;\n\n const maxChunkProcessingSize =\n config.getOptionalNumber(\n 'aiAssistant.ingestion.chunking.maxChunkProcessingSize',\n ) ?? DEFAULT_MAX_CHUNK_PROCESSING_SIZE;\n\n const taskRunner = scheduler.createScheduledTaskRunner(schedule);\n\n const taskId = `ai-assistant.data-ingestion:start`;\n\n const dataIngestion = async () => {\n logger.info('Starting data ingestion...');\n\n if (ingestors.length === 0) {\n logger.warn('No ingestors available for data ingestion.');\n return;\n }\n\n logger.info(`Ingestors available: ${ingestors.map(i => i.id).join(', ')}`);\n\n for await (const ingestor of ingestors) {\n logger.info(`Running ingestor: ${ingestor.id}`);\n\n const saveDocumentsBatch = async (documents: EmbeddingDocument[]) => {\n logger.debug(\n `Ingested documents for ${ingestor.id}: ${documents.length}`,\n );\n\n const splitter = new RecursiveCharacterTextSplitter({\n chunkSize,\n chunkOverlap,\n });\n\n const documentChunks = await Promise.all(\n documents.map(async document => {\n const chunks = await splitter.splitText(document.content);\n\n const docChunks: EmbeddingDocument[] = chunks.flatMap(\n (chunk, i) => ({\n metadata: { ...document.metadata, chunk: String(i) },\n content: chunk,\n }),\n );\n\n return docChunks;\n }),\n );\n\n logger.debug(`Adding documents to vector store...`);\n const allChunks = documentChunks.flat();\n\n logger.debug(\n `Total document chunks for batch to add for ${ingestor.id}: ${allChunks.length}`,\n );\n\n for (let i = 0; i < allChunks.length; i += maxChunkProcessingSize) {\n const chunkBatch = allChunks.slice(i, i + maxChunkProcessingSize);\n logger.debug(\n `Adding batch of ${chunkBatch.length} document chunks to vector store for ${ingestor.id}`,\n );\n\n await vectorStore.addDocuments(chunkBatch);\n }\n\n logger.debug(`Added documents to vector store for ${ingestor.id}`);\n };\n\n const documents = await ingestor.ingest({\n saveDocumentsBatch,\n });\n\n if (documents) {\n saveDocumentsBatch(documents);\n }\n\n logger.info(`Finished processing ingestor: ${ingestor.id}`);\n }\n\n logger.info('Data ingestion completed.');\n };\n\n const start = async () => {\n taskRunner.run({\n id: taskId,\n fn: dataIngestion,\n });\n };\n\n return {\n start,\n };\n};\n"],"names":["readSchedulerServiceTaskScheduleDefinitionFromConfig","documents","RecursiveCharacterTextSplitter"],"mappings":";;;;;AAaA,MAAM,+BAAA,GACJ;AAAA,EACE,SAAA,EAAW;AAAA,IACT,KAAA,EAAO;AAAA,GACT;AAAA,EACA,OAAA,EAAS;AAAA,IACP,KAAA,EAAO;AAAA;AAEX,CAAA;AAEF,MAAM,iCAAA,GAAoC,GAAA;AAC1C,MAAM,kBAAA,GAAqB,GAAA;AAC3B,MAAM,qBAAA,GAAwB,GAAA;AAEvB,MAAM,8BAA8B,CAAC;AAAA,EAC1C,MAAA;AAAA,EACA,MAAA;AAAA,EACA,SAAA;AAAA,EACA,SAAA;AAAA,EACA;AACF,CAAA,KAA2D;AACzD,EAAA,MAAM,QAAA,GAAW,MAAA,CAAO,GAAA,CAAI,gCAAgC,CAAA,GACxDA,qEAAA;AAAA,IACE,MAAA,CAAO,UAAU,gCAAgC;AAAA,GACnD,GACA,+BAAA;AAEJ,EAAA,MAAM,SAAA,GACJ,MAAA,CAAO,iBAAA,CAAkB,0CAA0C,CAAA,IACnE,kBAAA;AAEF,EAAA,MAAM,YAAA,GACJ,MAAA,CAAO,iBAAA,CAAkB,6CAA6C,CAAA,IACtE,qBAAA;AAEF,EAAA,MAAM,yBACJ,MAAA,CAAO,iBAAA;AAAA,IACL;AAAA,GACF,IAAK,iCAAA;AAEP,EAAA,MAAM,UAAA,GAAa,SAAA,CAAU,yBAAA,CAA0B,QAAQ,CAAA;AAE/D,EAAA,MAAM,MAAA,GAAS,CAAA,iCAAA,CAAA;AAEf,EAAA,MAAM,gBAAgB,YAAY;AAChC,IAAA,MAAA,CAAO,KAAK,4BAA4B,CAAA;AAExC,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA,MAAA,CAAO,KAAK,4CAA4C,CAAA;AACxD,MAAA;AAAA,IACF;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,qBAAA,EAAwB,SAAA,CAAU,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAEzE,IAAA,WAAA,MAAiB,YAAY,SAAA,EAAW;AACtC,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,kBAAA,EAAqB,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAE9C,MAAA,MAAM,kBAAA,GAAqB,OAAOC,UAAAA,KAAmC;AACnE,QAAA,MAAA,CAAO,KAAA;AAAA,UACL,CAAA,uBAAA,EAA0B,QAAA,CAAS,EAAE,CAAA,EAAA,EAAKA,WAAU,MAAM,CAAA;AAAA,SAC5D;AAEA,QAAA,MAAM,QAAA,GAAW,IAAIC,4CAAA,CAA+B;AAAA,UAClD,SAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,MAAM,cAAA,GAAiB,MAAM,OAAA,CAAQ,GAAA;AAAA,UACnCD,UAAAA,CAAU,GAAA,CAAI,OAAM,QAAA,KAAY;AAC9B,YAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,CAAU,SAAS,OAAO,CAAA;AAExD,YAAA,MAAM,YAAiC,MAAA,CAAO,OAAA;AAAA,cAC5C,CAAC,OAAO,CAAA,MAAO;AAAA,gBACb,QAAA,EAAU,EAAE,GAAG,QAAA,CAAS,UAAU,KAAA,EAAO,MAAA,CAAO,CAAC,CAAA,EAAE;AAAA,gBACnD,OAAA,EAAS;AAAA,eACX;AAAA,aACF;AAEA,YAAA,OAAO,SAAA;AAAA,UACT,CAAC;AAAA,SACH;AAEA,QAAA,MAAA,CAAO,MAAM,CAAA,mCAAA,CAAqC,CAAA;AAClD,QAAA,MAAM,SAAA,GAAY,eAAe,IAAA,EAAK;AAEtC,QAAA,MAAA,CAAO,KAAA;AAAA,UACL,CAAA,2CAAA,EAA8C,QAAA,CAAS,EAAE,CAAA,EAAA,EAAK,UAAU,MAAM,CAAA;AAAA,SAChF;AAEA,QAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,MAAA,EAAQ,KAAK,sBAAA,EAAwB;AACjE,UAAA,MAAM,UAAA,GAAa,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,IAAI,sBAAsB,CAAA;AAChE,UAAA,MAAA,CAAO,KAAA;AAAA,YACL,CAAA,gBAAA,EAAmB,UAAA,CAAW,MAAM,CAAA,qCAAA,EAAwC,SAAS,EAAE,CAAA;AAAA,WACzF;AAEA,UAAA,MAAM,WAAA,CAAY,aAAa,UAAU,CAAA;AAAA,QAC3C;AAEA,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,oCAAA,EAAuC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,MACnE,CAAA;AAEA,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,MAAA,CAAO;AAAA,QACtC;AAAA,OACD,CAAA;AAED,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,kBAAA,CAAmB,SAAS,CAAA;AAAA,MAC9B;AAEA,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,8BAAA,EAAiC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAA,CAAO,KAAK,2BAA2B,CAAA;AAAA,EACzC,CAAA;AAEA,EAAA,MAAM,QAAQ,YAAY;AACxB,IAAA,UAAA,CAAW,GAAA,CAAI;AAAA,MACb,EAAA,EAAI,MAAA;AAAA,MACJ,EAAA,EAAI;AAAA,KACL,CAAA;AAAA,EACH,CAAA;AAEA,EAAA,OAAO;AAAA,IACL;AAAA,GACF;AACF;;;;"}
1
+ {"version":3,"file":"ingestor.cjs.js","sources":["../../src/services/ingestor.ts"],"sourcesContent":["import {\n DataIngestionPipeline,\n DataIngestionPipelineOptions,\n EmbeddingDocument,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\n\nimport {\n SchedulerServiceTaskScheduleDefinition,\n readSchedulerServiceTaskScheduleDefinitionFromConfig,\n} from '@backstage/backend-plugin-api';\n\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\n\nconst DEFAULT_DATA_INGESTION_SCHEDULE: SchedulerServiceTaskScheduleDefinition =\n {\n frequency: {\n hours: 24,\n },\n timeout: {\n hours: 3,\n },\n };\n\nconst DEFAULT_MAX_CHUNK_PROCESSING_SIZE = 100;\nconst DEFAULT_CHUNK_SIZE = 1000;\nconst DEFAULT_CHUNK_OVERLAP = 100;\n\nexport const createDataIngestionPipeline = ({\n config,\n logger,\n scheduler,\n ingestors,\n vectorStore,\n}: DataIngestionPipelineOptions): DataIngestionPipeline => {\n const schedule = config.has('aiAssistant.ingestion.schedule')\n ? readSchedulerServiceTaskScheduleDefinitionFromConfig(\n config.getConfig('aiAssistant.ingestion.schedule'),\n )\n : DEFAULT_DATA_INGESTION_SCHEDULE;\n\n const chunkSize =\n config.getOptionalNumber('aiAssistant.ingestion.chunking.chunkSize') ??\n DEFAULT_CHUNK_SIZE;\n\n const chunkOverlap =\n config.getOptionalNumber('aiAssistant.ingestion.chunking.chunkOverlap') ??\n DEFAULT_CHUNK_OVERLAP;\n\n const maxChunkProcessingSize =\n config.getOptionalNumber(\n 'aiAssistant.ingestion.chunking.maxChunkProcessingSize',\n ) ?? DEFAULT_MAX_CHUNK_PROCESSING_SIZE;\n\n const taskRunner = scheduler.createScheduledTaskRunner(schedule);\n\n const taskId = `ai-assistant.data-ingestion:start`;\n\n const dataIngestion = async () => {\n logger.info('Starting data ingestion...');\n\n if (ingestors.length === 0) {\n logger.warn('No ingestors available for data ingestion.');\n return;\n }\n\n logger.info(`Ingestors available: ${ingestors.map(i => i.id).join(', ')}`);\n\n for await (const ingestor of ingestors) {\n logger.info(`Running ingestor: ${ingestor.id}`);\n\n const saveDocumentsBatch = async (documents: EmbeddingDocument[]) => {\n logger.debug(\n `Ingested documents for ${ingestor.id}: ${documents.length}`,\n );\n\n const splitter = new RecursiveCharacterTextSplitter({\n chunkSize,\n chunkOverlap,\n });\n\n const documentChunks = await Promise.all(\n documents.map(async document => {\n const chunks = await splitter.splitText(document.content);\n\n const docChunks: EmbeddingDocument[] = chunks.flatMap(\n (chunk, i) => ({\n metadata: { ...document.metadata, chunk: String(i) },\n content: chunk,\n lastUpdated: document.lastUpdated,\n }),\n );\n\n return docChunks;\n }),\n );\n\n logger.debug(`Adding documents to vector store...`);\n const allChunks = documentChunks.flat();\n\n logger.debug(\n `Total document chunks for batch to add for ${ingestor.id}: ${allChunks.length}`,\n );\n\n for (let i = 0; i < allChunks.length; i += maxChunkProcessingSize) {\n const chunkBatch = allChunks.slice(i, i + maxChunkProcessingSize);\n logger.debug(\n `Adding batch of ${chunkBatch.length} document chunks to vector store for ${ingestor.id}`,\n );\n\n await vectorStore.addDocuments(chunkBatch);\n }\n\n logger.debug(`Added documents to vector store for ${ingestor.id}`);\n };\n\n const documents = await ingestor.ingest({\n saveDocumentsBatch,\n });\n\n if (documents) {\n saveDocumentsBatch(documents);\n }\n\n logger.info(`Finished processing ingestor: ${ingestor.id}`);\n }\n\n logger.info('Data ingestion completed.');\n };\n\n const start = async () => {\n taskRunner.run({\n id: taskId,\n fn: dataIngestion,\n });\n };\n\n return {\n start,\n };\n};\n"],"names":["readSchedulerServiceTaskScheduleDefinitionFromConfig","documents","RecursiveCharacterTextSplitter"],"mappings":";;;;;AAaA,MAAM,+BAAA,GACJ;AAAA,EACE,SAAA,EAAW;AAAA,IACT,KAAA,EAAO;AAAA,GACT;AAAA,EACA,OAAA,EAAS;AAAA,IACP,KAAA,EAAO;AAAA;AAEX,CAAA;AAEF,MAAM,iCAAA,GAAoC,GAAA;AAC1C,MAAM,kBAAA,GAAqB,GAAA;AAC3B,MAAM,qBAAA,GAAwB,GAAA;AAEvB,MAAM,8BAA8B,CAAC;AAAA,EAC1C,MAAA;AAAA,EACA,MAAA;AAAA,EACA,SAAA;AAAA,EACA,SAAA;AAAA,EACA;AACF,CAAA,KAA2D;AACzD,EAAA,MAAM,QAAA,GAAW,MAAA,CAAO,GAAA,CAAI,gCAAgC,CAAA,GACxDA,qEAAA;AAAA,IACE,MAAA,CAAO,UAAU,gCAAgC;AAAA,GACnD,GACA,+BAAA;AAEJ,EAAA,MAAM,SAAA,GACJ,MAAA,CAAO,iBAAA,CAAkB,0CAA0C,CAAA,IACnE,kBAAA;AAEF,EAAA,MAAM,YAAA,GACJ,MAAA,CAAO,iBAAA,CAAkB,6CAA6C,CAAA,IACtE,qBAAA;AAEF,EAAA,MAAM,yBACJ,MAAA,CAAO,iBAAA;AAAA,IACL;AAAA,GACF,IAAK,iCAAA;AAEP,EAAA,MAAM,UAAA,GAAa,SAAA,CAAU,yBAAA,CAA0B,QAAQ,CAAA;AAE/D,EAAA,MAAM,MAAA,GAAS,CAAA,iCAAA,CAAA;AAEf,EAAA,MAAM,gBAAgB,YAAY;AAChC,IAAA,MAAA,CAAO,KAAK,4BAA4B,CAAA;AAExC,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA,MAAA,CAAO,KAAK,4CAA4C,CAAA;AACxD,MAAA;AAAA,IACF;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,qBAAA,EAAwB,SAAA,CAAU,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAEzE,IAAA,WAAA,MAAiB,YAAY,SAAA,EAAW;AACtC,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,kBAAA,EAAqB,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAE9C,MAAA,MAAM,kBAAA,GAAqB,OAAOC,UAAAA,KAAmC;AACnE,QAAA,MAAA,CAAO,KAAA;AAAA,UACL,CAAA,uBAAA,EAA0B,QAAA,CAAS,EAAE,CAAA,EAAA,EAAKA,WAAU,MAAM,CAAA;AAAA,SAC5D;AAEA,QAAA,MAAM,QAAA,GAAW,IAAIC,4CAAA,CAA+B;AAAA,UAClD,SAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,MAAM,cAAA,GAAiB,MAAM,OAAA,CAAQ,GAAA;AAAA,UACnCD,UAAAA,CAAU,GAAA,CAAI,OAAM,QAAA,KAAY;AAC9B,YAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,CAAU,SAAS,OAAO,CAAA;AAExD,YAAA,MAAM,YAAiC,MAAA,CAAO,OAAA;AAAA,cAC5C,CAAC,OAAO,CAAA,MAAO;AAAA,gBACb,QAAA,EAAU,EAAE,GAAG,QAAA,CAAS,UAAU,KAAA,EAAO,MAAA,CAAO,CAAC,CAAA,EAAE;AAAA,gBACnD,OAAA,EAAS,KAAA;AAAA,gBACT,aAAa,QAAA,CAAS;AAAA,eACxB;AAAA,aACF;AAEA,YAAA,OAAO,SAAA;AAAA,UACT,CAAC;AAAA,SACH;AAEA,QAAA,MAAA,CAAO,MAAM,CAAA,mCAAA,CAAqC,CAAA;AAClD,QAAA,MAAM,SAAA,GAAY,eAAe,IAAA,EAAK;AAEtC,QAAA,MAAA,CAAO,KAAA;AAAA,UACL,CAAA,2CAAA,EAA8C,QAAA,CAAS,EAAE,CAAA,EAAA,EAAK,UAAU,MAAM,CAAA;AAAA,SAChF;AAEA,QAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,MAAA,EAAQ,KAAK,sBAAA,EAAwB;AACjE,UAAA,MAAM,UAAA,GAAa,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,IAAI,sBAAsB,CAAA;AAChE,UAAA,MAAA,CAAO,KAAA;AAAA,YACL,CAAA,gBAAA,EAAmB,UAAA,CAAW,MAAM,CAAA,qCAAA,EAAwC,SAAS,EAAE,CAAA;AAAA,WACzF;AAEA,UAAA,MAAM,WAAA,CAAY,aAAa,UAAU,CAAA;AAAA,QAC3C;AAEA,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,oCAAA,EAAuC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,MACnE,CAAA;AAEA,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,MAAA,CAAO;AAAA,QACtC;AAAA,OACD,CAAA;AAED,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,kBAAA,CAAmB,SAAS,CAAA;AAAA,MAC9B;AAEA,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,8BAAA,EAAiC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAA,CAAO,KAAK,2BAA2B,CAAA;AAAA,EACzC,CAAA;AAEA,EAAA,MAAM,QAAQ,YAAY;AACxB,IAAA,UAAA,CAAW,GAAA,CAAI;AAAA,MACb,EAAA,EAAI,MAAA;AAAA,MACJ,EAAA,EAAI;AAAA,KACL,CAAA;AAAA,EACH,CAAA;AAEA,EAAA,OAAO;AAAA,IACL;AAAA,GACF;AACF;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sweetoburrito/backstage-plugin-ai-assistant-backend",
3
- "version": "0.0.0-snapshot-20260108133622",
3
+ "version": "0.0.0-snapshot-20260109124956",
4
4
  "license": "Apache-2.0",
5
5
  "main": "dist/index.cjs.js",
6
6
  "types": "dist/index.d.ts",
@@ -44,7 +44,7 @@
44
44
  "@langchain/mcp-adapters": "^1.0.0",
45
45
  "@langchain/textsplitters": "^0.1.0",
46
46
  "@sweetoburrito/backstage-plugin-ai-assistant-common": "^0.8.0",
47
- "@sweetoburrito/backstage-plugin-ai-assistant-node": "0.0.0-snapshot-20260108133622",
47
+ "@sweetoburrito/backstage-plugin-ai-assistant-node": "^0.10.0",
48
48
  "express": "^4.17.1",
49
49
  "express-promise-router": "^4.1.0",
50
50
  "knex": "^3.1.0",