@sweetoburrito/backstage-plugin-ai-assistant-backend 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,28 @@
1
+ # ai-assistant
2
+
3
+ This plugin backend was templated using the Backstage CLI. You should replace this text with a description of your plugin backend.
4
+
5
+ ## Installation
6
+
7
+ This plugin is installed via the `@internal/plugin-ai-assistant-backend` package. To install it to your backend package, run the following command:
8
+
9
+ ```bash
10
+ # From your root directory
11
+ yarn --cwd packages/backend add @internal/plugin-ai-assistant-backend
12
+ ```
13
+
14
+ Then add the plugin to your backend in `packages/backend/src/index.ts`:
15
+
16
+ ```ts
17
+ const backend = createBackend();
18
+ // ...
19
+ backend.add(import('@internal/plugin-ai-assistant-backend'));
20
+ ```
21
+
22
+ ## Development
23
+
24
+ This plugin backend can be started in a standalone mode from directly in this
25
+ package with `yarn start`. It is a limited setup that is most convenient when
26
+ developing the plugin backend itself.
27
+
28
+ If you want to run the entire project, including the frontend, run `yarn start` from the root directory.
package/config.d.ts ADDED
@@ -0,0 +1,28 @@
1
+ import { HumanDuration } from '@backstage/types';
2
+ import { SchedulerServiceTaskScheduleDefinitionConfig } from '@backstage/backend-plugin-api';
3
+
4
+ export interface Config {
5
+ aiAssistant: {
6
+ prompt?: {
7
+ system?: string;
8
+ prefix?: string;
9
+ suffix?: string;
10
+ };
11
+ storage?: {
12
+ pgVector?: {
13
+ /**
14
+ * The size of the chunk to flush when storing embeddings to the DB
15
+ */
16
+ chunkSize?: number;
17
+
18
+ /**
19
+ * The default amount of embeddings to return when querying vectors with similarity search
20
+ */
21
+ amount?: number;
22
+ };
23
+ };
24
+ ingestion?: {
25
+ schedule?: SchedulerServiceTaskScheduleDefinitionConfig;
26
+ };
27
+ };
28
+ }
@@ -0,0 +1,46 @@
1
+ 'use strict';
2
+
3
+ const TABLE_NAME = "conversation";
4
+ class ChatStore {
5
+ /**
6
+ * Creates an instance of ChatStore.
7
+ * @param client - The Knex client to interact with the PostgreSQL database.
8
+ */
9
+ constructor(client) {
10
+ this.client = client;
11
+ }
12
+ static async fromConfig({ database }) {
13
+ const client = await database.getClient();
14
+ return new ChatStore(client);
15
+ }
16
+ table() {
17
+ return this.client(TABLE_NAME);
18
+ }
19
+ async getChatMessages(conversationId, userRef, limit) {
20
+ let query = this.table().where({ conversation_id: conversationId, userRef }).select("*");
21
+ if (typeof limit === "number") {
22
+ query = query.limit(limit).orderBy("created_at", "desc");
23
+ }
24
+ const rows = await query;
25
+ const chatMessages = rows.map((row) => ({
26
+ role: row.role,
27
+ content: row.content,
28
+ id: row.id
29
+ }));
30
+ return chatMessages;
31
+ }
32
+ async addChatMessage(messages, userRef, conversationId) {
33
+ const rows = messages.map((msg) => ({
34
+ id: msg.id,
35
+ conversation_id: conversationId,
36
+ role: msg.role,
37
+ content: msg.content,
38
+ userRef,
39
+ created_at: this.client.fn.now()
40
+ }));
41
+ await this.table().insert(rows);
42
+ }
43
+ }
44
+
45
+ exports.ChatStore = ChatStore;
46
+ //# sourceMappingURL=chat-store.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chat-store.cjs.js","sources":["../../src/database/chat-store.ts"],"sourcesContent":["import { DatabaseService } from '@backstage/backend-plugin-api';\nimport { Message } from '@sweetoburrito/backstage-plugin-ai-assistant-node';\n\nimport { Knex } from 'knex';\n\nconst TABLE_NAME = 'conversation';\n\nexport type ChatStoreOptions = {\n database: DatabaseService;\n};\n\nexport class ChatStore {\n /**\n * Creates an instance of ChatStore.\n * @param client - The Knex client to interact with the PostgreSQL database.\n */\n constructor(private readonly client: Knex) {}\n\n static async fromConfig({ database }: ChatStoreOptions) {\n const client = await database.getClient();\n return new ChatStore(client);\n }\n\n table() {\n return this.client(TABLE_NAME);\n }\n\n async getChatMessages(\n conversationId: string,\n userRef: string,\n limit?: number,\n ): Promise<Required<Message>[]> {\n let query = this.table()\n .where({ conversation_id: conversationId, userRef })\n .select('*');\n\n if (typeof limit === 'number') {\n query = query.limit(limit).orderBy('created_at', 'desc');\n }\n\n const rows = await query;\n\n const chatMessages: Required<Message>[] = rows.map(row => ({\n role: row.role,\n content: row.content,\n id: row.id,\n }));\n\n return chatMessages;\n }\n\n async addChatMessage(\n messages: Message[],\n userRef: string,\n conversationId: string,\n ): Promise<void> {\n const rows = messages.map(msg => ({\n id: msg.id,\n conversation_id: conversationId,\n role: msg.role,\n content: msg.content,\n userRef,\n created_at: this.client.fn.now(),\n }));\n\n await this.table().insert(rows);\n }\n}\n"],"names":[],"mappings":";;AAKA,MAAM,UAAA,GAAa,cAAA;AAMZ,MAAM,SAAA,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAKrB,YAA6B,MAAA,EAAc;AAAd,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAAA,EAAe;AAAA,EAE5C,aAAa,UAAA,CAAW,EAAE,QAAA,EAAS,EAAqB;AACtD,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,EAAU;AACxC,IAAA,OAAO,IAAI,UAAU,MAAM,CAAA;AAAA,EAC7B;AAAA,EAEA,KAAA,GAAQ;AACN,IAAA,OAAO,IAAA,CAAK,OAAO,UAAU,CAAA;AAAA,EAC/B;AAAA,EAEA,MAAM,eAAA,CACJ,cAAA,EACA,OAAA,EACA,KAAA,EAC8B;AAC9B,IAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,KAAA,EAAM,CACpB,KAAA,CAAM,EAAE,eAAA,EAAiB,cAAA,EAAgB,OAAA,EAAS,CAAA,CAClD,MAAA,CAAO,GAAG,CAAA;AAEb,IAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,MAAA,KAAA,GAAQ,MAAM,KAAA,CAAM,KAAK,CAAA,CAAE,OAAA,CAAQ,cAAc,MAAM,CAAA;AAAA,IACzD;AAEA,IAAA,MAAM,OAAO,MAAM,KAAA;AAEnB,IAAA,MAAM,YAAA,GAAoC,IAAA,CAAK,GAAA,CAAI,CAAA,GAAA,MAAQ;AAAA,MACzD,MAAM,GAAA,CAAI,IAAA;AAAA,MACV,SAAS,GAAA,CAAI,OAAA;AAAA,MACb,IAAI,GAAA,CAAI;AAAA,KACV,CAAE,CAAA;AAEF,IAAA,OAAO,YAAA;AAAA,EACT;AAAA,EAEA,MAAM,cAAA,CACJ,QAAA,EACA,OAAA,EACA,cAAA,EACe;AACf,IAAA,MAAM,IAAA,GAAO,QAAA,CAAS,GAAA,CAAI,CAAA,GAAA,MAAQ;AAAA,MAChC,IAAI,GAAA,CAAI,EAAA;AAAA,MACR,eAAA,EAAiB,cAAA;AAAA,MACjB,MAAM,GAAA,CAAI,IAAA;AAAA,MACV,SAAS,GAAA,CAAI,OAAA;AAAA,MACb,OAAA;AAAA,MACA,UAAA,EAAY,IAAA,CAAK,MAAA,CAAO,EAAA,CAAG,GAAA;AAAI,KACjC,CAAE,CAAA;AAEF,IAAA,MAAM,IAAA,CAAK,KAAA,EAAM,CAAE,MAAA,CAAO,IAAI,CAAA;AAAA,EAChC;AACF;;;;"}
@@ -0,0 +1,16 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+
5
+ async function applyDatabaseMigrations(knex) {
6
+ const migrationsDir = backendPluginApi.resolvePackagePath(
7
+ "@sweetoburrito/backstage-plugin-ai-assistant-backend",
8
+ "migrations"
9
+ );
10
+ await knex.migrate.latest({
11
+ directory: migrationsDir
12
+ });
13
+ }
14
+
15
+ exports.applyDatabaseMigrations = applyDatabaseMigrations;
16
+ //# sourceMappingURL=migrations.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"migrations.cjs.js","sources":["../../src/database/migrations.ts"],"sourcesContent":["import { resolvePackagePath } from '@backstage/backend-plugin-api';\nimport { Knex } from 'knex';\n\nexport async function applyDatabaseMigrations(knex: Knex): Promise<void> {\n const migrationsDir = resolvePackagePath(\n '@sweetoburrito/backstage-plugin-ai-assistant-backend',\n 'migrations',\n );\n\n await knex.migrate.latest({\n directory: migrationsDir,\n });\n}\n"],"names":["resolvePackagePath"],"mappings":";;;;AAGA,eAAsB,wBAAwB,IAAA,EAA2B;AACvE,EAAA,MAAM,aAAA,GAAgBA,mCAAA;AAAA,IACpB,sDAAA;AAAA,IACA;AAAA,GACF;AAEA,EAAA,MAAM,IAAA,CAAK,QAAQ,MAAA,CAAO;AAAA,IACxB,SAAA,EAAW;AAAA,GACZ,CAAA;AACH;;;;"}
@@ -0,0 +1,193 @@
1
+ 'use strict';
2
+
3
+ class PgVectorStore {
4
+ /**
5
+ * Creates an instance of PgVectorStore.
6
+ * @param client - The Knex client to interact with the PostgreSQL database.
7
+ * @param [amount=4] - The number of embeddings to store.
8
+ * @param [chunkSize=500] - The size of each chunk of embeddings.
9
+ */
10
+ constructor(client, logger, amount = 4, chunkSize = 500) {
11
+ this.client = client;
12
+ this.logger = logger;
13
+ this.amount = amount;
14
+ this.chunkSize = chunkSize;
15
+ }
16
+ tableName = "embeddings";
17
+ embeddings;
18
+ static async fromConfig({ config, database, logger }) {
19
+ const client = await database.getClient();
20
+ const chunkSize = config.getOptionalNumber(
21
+ "aiAssistant.storage.pgVector.chunkSize"
22
+ );
23
+ const amount = config.getOptionalNumber(
24
+ "aiAssistant.storage.pgVector.amount"
25
+ );
26
+ return new PgVectorStore(client, logger, amount, chunkSize);
27
+ }
28
+ connectEmbeddings(embeddings) {
29
+ if (this.embeddings) {
30
+ this.logger.warn("Embeddings already connected, overwriting.");
31
+ }
32
+ this.embeddings = embeddings;
33
+ }
34
+ table() {
35
+ return this.client(this.tableName);
36
+ }
37
+ /**
38
+ * Add documents to the vector store.
39
+ *
40
+ * @param {EmbeddingDocument[]} documents - The array of documents to be added.
41
+ * @throws {Error} When no embeddings are configured for the vector store.
42
+ * @returns {Promise<void>} Resolves when the documents have been added successfully.
43
+ */
44
+ async addDocuments(documents) {
45
+ if (documents.length === 0) {
46
+ return;
47
+ }
48
+ const texts = documents.map(({ content }) => content);
49
+ if (!this.embeddings) {
50
+ throw new Error("No Embeddings configured for the vector store.");
51
+ }
52
+ const vectors = await this.embeddings.embedDocuments(texts);
53
+ this.logger.info(
54
+ `Received ${vectors.length} vectors from embeddings creation.`
55
+ );
56
+ this.addVectors(vectors, documents);
57
+ }
58
+ /**
59
+ * Adds vectors to the database along with corresponding documents.
60
+ *
61
+ * @param {number[][]} vectors - The vectors to be added.
62
+ * @param {EmbeddingDoc[]} documents - The corresponding documents.
63
+ * @return {Promise<void>} - A promise that resolves when the vectors are added successfully.
64
+ * @throws {Error} - If there is an error inserting the vectors.
65
+ */
66
+ async addVectors(vectors, documents) {
67
+ try {
68
+ const rows = [];
69
+ for (let i = 0; i < vectors.length; i += 1) {
70
+ const embedding = vectors[i];
71
+ const embeddingString = `[${embedding.join(",")}]`;
72
+ const values = {
73
+ content: documents[i].content.replace(/\0/g, ""),
74
+ vector: embeddingString.replace(/\0/g, ""),
75
+ metadata: documents[i].metadata
76
+ };
77
+ rows.push(values);
78
+ }
79
+ await this.client.batchInsert(this.tableName, rows, this.chunkSize);
80
+ } catch (e) {
81
+ this.logger.error(e.message);
82
+ throw new Error(`Error inserting: ${e.message}`);
83
+ }
84
+ }
85
+ /**
86
+ * Deletes records from the database table by their ids.
87
+ *
88
+ * @param {string[]} ids - The array of ids of the records to be deleted.
89
+ * @returns {Promise<void>} - A promise that resolves when the deletion is complete.
90
+ */
91
+ async deleteById(ids) {
92
+ await this.table().delete().whereIn("id", ids);
93
+ }
94
+ /**
95
+ * Deletes rows from the table based on the specified filter.
96
+ *
97
+ * @param {EmbeddingDocMetadata} filter - The filter to apply for deletion.
98
+ * @returns {Promise} - A Promise that resolves when the deletion is complete.
99
+ */
100
+ async deleteByFilter(filter) {
101
+ const queryString = `
102
+ DELETE FROM ${this.tableName}
103
+ WHERE metadata::jsonb @> :filter
104
+ `;
105
+ return this.client.raw(queryString, { filter });
106
+ }
107
+ /**
108
+ * Deletes documents based on the provided deletion parameters.
109
+ * Either `ids` or `filter` must be specified.
110
+ *
111
+ * @param {Object} deletionParams - The deletion parameters.
112
+ * @param {Array<string>} [deletionParams.ids] - The document IDs to delete.
113
+ * @param {EmbeddingDocMetadata} [deletionParams.filter] - The filter to match documents to be deleted.
114
+ *
115
+ * @return {Promise<void>} - A Promise that resolves once the documents have been deleted.
116
+ */
117
+ async deleteDocuments(deletionParams) {
118
+ const { ids, filter } = deletionParams;
119
+ if (!(ids || filter)) {
120
+ throw new Error(
121
+ "You must specify either ids or a filter when deleting documents."
122
+ );
123
+ }
124
+ if (ids && filter) {
125
+ throw new Error(
126
+ "You cannot specify both ids and a filter when deleting documents."
127
+ );
128
+ }
129
+ if (ids) {
130
+ await this.deleteById(ids);
131
+ } else if (filter) {
132
+ await this.deleteByFilter(filter);
133
+ }
134
+ }
135
+ /**
136
+ * Finds the most similar documents to a given query vector, along with their similarity scores.
137
+ *
138
+ * @param {number[]} query - The query vector to compare against.
139
+ * @param {number} amount - The maximum number of results to return.
140
+ * @param {EmbeddingDocumentMetadata} [filter] - Optional filter to limit the search results.
141
+ * @returns {Promise<[EmbeddingDocument, number][]>} - An array of document similarity results, where each
142
+ * result is a tuple containing the document and its similarity score.
143
+ */
144
+ async similaritySearchVectorWithScore(query, amount, filter) {
145
+ const embeddingString = `[${query.join(",")}]`;
146
+ const queryString = `
147
+ SELECT *, vector <=> :embeddingString as "_distance"
148
+ FROM ${this.tableName}
149
+ WHERE metadata::jsonb @> :filter
150
+ ORDER BY "_distance" ASC
151
+ LIMIT :amount
152
+ `;
153
+ const documents = (await this.client.raw(queryString, {
154
+ embeddingString,
155
+ filter: JSON.stringify(filter ?? {}),
156
+ amount
157
+ })).rows;
158
+ const results = [];
159
+ for (const doc of documents) {
160
+ if (doc._distance !== null && doc.content !== null) {
161
+ const document = {
162
+ content: doc.content,
163
+ metadata: doc.metadata
164
+ };
165
+ results.push([document, doc._distance]);
166
+ }
167
+ }
168
+ return results;
169
+ }
170
+ /**
171
+ * Performs a similarity search using the given query and filter.
172
+ *
173
+ * @param {string} query - The query to perform the similarity search on.
174
+ * @param {EmbeddingDocMetadata} filter - The filter to apply to the search results.
175
+ * @param {number} [amount=4] - The number of results to return.
176
+ * @return {Promise<EmbeddingDoc[]>} - A promise that resolves to an array of RoadieEmbeddingDoc objects representing the search results.
177
+ * @throws {Error} - Throws an error if there are no embeddings configured for the vector store.
178
+ */
179
+ async similaritySearch(query, filter, amount = this.amount) {
180
+ if (!this.embeddings) {
181
+ throw new Error("No Embeddings configured for the vector store.");
182
+ }
183
+ const results = await this.similaritySearchVectorWithScore(
184
+ await this.embeddings.embedQuery(query),
185
+ amount,
186
+ filter
187
+ );
188
+ return results.map((result) => result[0]);
189
+ }
190
+ }
191
+
192
+ exports.PgVectorStore = PgVectorStore;
193
+ //# sourceMappingURL=pg-vector-store.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pg-vector-store.cjs.js","sources":["../../src/database/pg-vector-store.ts"],"sourcesContent":["import {\n DatabaseService,\n LoggerService,\n RootConfigService,\n} from '@backstage/backend-plugin-api';\nimport {\n VectorStore,\n EmbeddingDocument,\n EmbeddingDocumentMetadata,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\nimport { Embeddings } from '@langchain/core/embeddings';\nimport { Knex } from 'knex';\n\nexport type PgVectorStoreOptions = {\n database: DatabaseService;\n logger: LoggerService;\n config: RootConfigService;\n};\n\nexport class PgVectorStore implements VectorStore {\n private readonly tableName: string = 'embeddings';\n private embeddings?: Embeddings;\n\n /**\n * Creates an instance of PgVectorStore.\n * @param client - The Knex client to interact with the PostgreSQL database.\n * @param [amount=4] - The number of embeddings to store.\n * @param [chunkSize=500] - The size of each chunk of embeddings.\n */\n constructor(\n private readonly client: Knex,\n private readonly logger: LoggerService,\n private readonly amount: number = 4,\n private readonly chunkSize: number = 500,\n ) {}\n\n static async fromConfig({ config, database, logger }: PgVectorStoreOptions) {\n const client = await database.getClient();\n const chunkSize = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.chunkSize',\n );\n const amount = config.getOptionalNumber(\n 'aiAssistant.storage.pgVector.amount',\n );\n\n return new PgVectorStore(client, logger, amount, chunkSize);\n }\n\n connectEmbeddings(embeddings: Embeddings) {\n if (this.embeddings) {\n this.logger.warn('Embeddings already connected, overwriting.');\n }\n this.embeddings = embeddings;\n }\n\n table() {\n return this.client(this.tableName);\n }\n\n /**\n * Add documents to the vector store.\n *\n * @param {EmbeddingDocument[]} documents - The array of documents to be added.\n * @throws {Error} When no embeddings are configured for the vector store.\n * @returns {Promise<void>} Resolves when the documents have been added successfully.\n */\n async addDocuments(documents: EmbeddingDocument[]): Promise<void> {\n if (documents.length === 0) {\n return;\n }\n const texts = documents.map(({ content }) => content);\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n\n const vectors = await this.embeddings.embedDocuments(texts);\n this.logger.info(\n `Received ${vectors.length} vectors from embeddings creation.`,\n );\n this.addVectors(vectors, documents);\n }\n\n /**\n * Adds vectors to the database along with corresponding documents.\n *\n * @param {number[][]} vectors - The vectors to be added.\n * @param {EmbeddingDoc[]} documents - The corresponding documents.\n * @return {Promise<void>} - A promise that resolves when the vectors are added successfully.\n * @throws {Error} - If there is an error inserting the vectors.\n */\n private async addVectors(\n vectors: number[][],\n documents: EmbeddingDocument[],\n ): Promise<void> {\n try {\n const rows = [];\n for (let i = 0; i < vectors.length; i += 1) {\n const embedding = vectors[i];\n const embeddingString = `[${embedding.join(',')}]`;\n const values = {\n content: documents[i].content.replace(/\\0/g, ''),\n vector: embeddingString.replace(/\\0/g, ''),\n metadata: documents[i].metadata,\n };\n rows.push(values);\n }\n\n await this.client.batchInsert(this.tableName, rows, this.chunkSize);\n } catch (e) {\n this.logger.error((e as Error).message);\n throw new Error(`Error inserting: ${(e as Error).message}`);\n }\n }\n\n /**\n * Deletes records from the database table by their ids.\n *\n * @param {string[]} ids - The array of ids of the records to be deleted.\n * @returns {Promise<void>} - A promise that resolves when the deletion is complete.\n */\n private async deleteById(ids: string[]) {\n await this.table().delete().whereIn('id', ids);\n }\n\n /**\n * Deletes rows from the table based on the specified filter.\n *\n * @param {EmbeddingDocMetadata} filter - The filter to apply for deletion.\n * @returns {Promise} - A Promise that resolves when the deletion is complete.\n */\n private async deleteByFilter(filter: EmbeddingDocumentMetadata) {\n const queryString = `\n DELETE FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n `;\n return this.client.raw(queryString, { filter });\n }\n\n /**\n * Deletes documents based on the provided deletion parameters.\n * Either `ids` or `filter` must be specified.\n *\n * @param {Object} deletionParams - The deletion parameters.\n * @param {Array<string>} [deletionParams.ids] - The document IDs to delete.\n * @param {EmbeddingDocMetadata} [deletionParams.filter] - The filter to match documents to be deleted.\n *\n * @return {Promise<void>} - A Promise that resolves once the documents have been deleted.\n */\n async deleteDocuments(deletionParams: {\n ids?: string[];\n filter?: EmbeddingDocumentMetadata;\n }): Promise<void> {\n const { ids, filter } = deletionParams;\n\n if (!(ids || filter)) {\n throw new Error(\n 'You must specify either ids or a filter when deleting documents.',\n );\n }\n\n if (ids && filter) {\n throw new Error(\n 'You cannot specify both ids and a filter when deleting documents.',\n );\n }\n\n if (ids) {\n await this.deleteById(ids);\n } else if (filter) {\n await this.deleteByFilter(filter);\n }\n }\n\n /**\n * Finds the most similar documents to a given query vector, along with their similarity scores.\n *\n * @param {number[]} query - The query vector to compare against.\n * @param {number} amount - The maximum number of results to return.\n * @param {EmbeddingDocumentMetadata} [filter] - Optional filter to limit the search results.\n * @returns {Promise<[EmbeddingDocument, number][]>} - An array of document similarity results, where each\n * result is a tuple containing the document and its similarity score.\n */\n private async similaritySearchVectorWithScore(\n query: number[],\n amount: number,\n filter?: EmbeddingDocumentMetadata,\n ): Promise<[EmbeddingDocument, number][]> {\n const embeddingString = `[${query.join(',')}]`;\n const queryString = `\n SELECT *, vector <=> :embeddingString as \"_distance\"\n FROM ${this.tableName}\n WHERE metadata::jsonb @> :filter\n ORDER BY \"_distance\" ASC\n LIMIT :amount\n `;\n\n const documents = (\n await this.client.raw(queryString, {\n embeddingString,\n filter: JSON.stringify(filter ?? {}),\n amount,\n })\n ).rows;\n\n const results = [] as [EmbeddingDocument, number][];\n for (const doc of documents) {\n // eslint-ignore-next-line\n if (doc._distance !== null && doc.content !== null) {\n const document = {\n content: doc.content,\n metadata: doc.metadata,\n };\n results.push([document, doc._distance]);\n }\n }\n return results;\n }\n\n /**\n * Performs a similarity search using the given query and filter.\n *\n * @param {string} query - The query to perform the similarity search on.\n * @param {EmbeddingDocMetadata} filter - The filter to apply to the search results.\n * @param {number} [amount=4] - The number of results to return.\n * @return {Promise<EmbeddingDoc[]>} - A promise that resolves to an array of RoadieEmbeddingDoc objects representing the search results.\n * @throws {Error} - Throws an error if there are no embeddings configured for the vector store.\n */\n async similaritySearch(\n query: string,\n filter: EmbeddingDocumentMetadata,\n amount: number = this.amount,\n ): Promise<EmbeddingDocument[]> {\n if (!this.embeddings) {\n throw new Error('No Embeddings configured for the vector store.');\n }\n const results = await this.similaritySearchVectorWithScore(\n await this.embeddings.embedQuery(query),\n amount,\n filter,\n );\n\n return results.map(result => result[0]);\n }\n}\n"],"names":[],"mappings":";;AAmBO,MAAM,aAAA,CAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUhD,YACmB,MAAA,EACA,MAAA,EACA,MAAA,GAAiB,CAAA,EACjB,YAAoB,GAAA,EACrC;AAJiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AACA,IAAA,IAAA,CAAA,SAAA,GAAA,SAAA;AAAA,EAChB;AAAA,EAdc,SAAA,GAAoB,YAAA;AAAA,EAC7B,UAAA;AAAA,EAeR,aAAa,UAAA,CAAW,EAAE,MAAA,EAAQ,QAAA,EAAU,QAAO,EAAyB;AAC1E,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,EAAU;AACxC,IAAA,MAAM,YAAY,MAAA,CAAO,iBAAA;AAAA,MACvB;AAAA,KACF;AACA,IAAA,MAAM,SAAS,MAAA,CAAO,iBAAA;AAAA,MACpB;AAAA,KACF;AAEA,IAAA,OAAO,IAAI,aAAA,CAAc,MAAA,EAAQ,MAAA,EAAQ,QAAQ,SAAS,CAAA;AAAA,EAC5D;AAAA,EAEA,kBAAkB,UAAA,EAAwB;AACxC,IAAA,IAAI,KAAK,UAAA,EAAY;AACnB,MAAA,IAAA,CAAK,MAAA,CAAO,KAAK,4CAA4C,CAAA;AAAA,IAC/D;AACA,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAAA,EACpB;AAAA,EAEA,KAAA,GAAQ;AACN,IAAA,OAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,SAAS,CAAA;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aAAa,SAAA,EAA+C;AAChE,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA;AAAA,IACF;AACA,IAAA,MAAM,QAAQ,SAAA,CAAU,GAAA,CAAI,CAAC,EAAE,OAAA,OAAc,OAAO,CAAA;AACpD,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AAEA,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,UAAA,CAAW,eAAe,KAAK,CAAA;AAC1D,IAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,MACV,CAAA,SAAA,EAAY,QAAQ,MAAM,CAAA,kCAAA;AAAA,KAC5B;AACA,IAAA,IAAA,CAAK,UAAA,CAAW,SAAS,SAAS,CAAA;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,UAAA,CACZ,OAAA,EACA,SAAA,EACe;AACf,IAAA,IAAI;AACF,MAAA,MAAM,OAAO,EAAC;AACd,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,OAAA,CAAQ,MAAA,EAAQ,KAAK,CAAA,EAAG;AAC1C,QAAA,MAAM,SAAA,GAAY,QAAQ,CAAC,CAAA;AAC3B,QAAA,MAAM,eAAA,GAAkB,CAAA,CAAA,EAAI,SAAA,CAAU,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA,CAAA;AAC/C,QAAA,MAAM,MAAA,GAAS;AAAA,UACb,SAAS,SAAA,CAAU,CAAC,EAAE,OAAA,CAAQ,OAAA,CAAQ,OAAO,EAAE,CAAA;AAAA,UAC/C,MAAA,EAAQ,eAAA,CAAgB,OAAA,CAAQ,KAAA,EAAO,EAAE,CAAA;AAAA,UACzC,QAAA,EAAU,SAAA,CAAU,CAAC,CAAA,CAAE;AAAA,SACzB;AACA,QAAA,IAAA,CAAK,KAAK,MAAM,CAAA;AAAA,MAClB;AAEA,MAAA,MAAM,KAAK,MAAA,CAAO,WAAA,CAAY,KAAK,SAAA,EAAW,IAAA,EAAM,KAAK,SAAS,CAAA;AAAA,IACpE,SAAS,CAAA,EAAG;AACV,MAAA,IAAA,CAAK,MAAA,CAAO,KAAA,CAAO,CAAA,CAAY,OAAO,CAAA;AACtC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,iBAAA,EAAqB,CAAA,CAAY,OAAO,CAAA,CAAE,CAAA;AAAA,IAC5D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,WAAW,GAAA,EAAe;AACtC,IAAA,MAAM,KAAK,KAAA,EAAM,CAAE,QAAO,CAAE,OAAA,CAAQ,MAAM,GAAG,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,eAAe,MAAA,EAAmC;AAC9D,IAAA,MAAM,WAAA,GAAc;AAAA,kBAAA,EACJ,KAAK,SAAS;AAAA;AAAA,IAAA,CAAA;AAG9B,IAAA,OAAO,KAAK,MAAA,CAAO,GAAA,CAAI,WAAA,EAAa,EAAE,QAAQ,CAAA;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,gBAAgB,cAAA,EAGJ;AAChB,IAAA,MAAM,EAAE,GAAA,EAAK,MAAA,EAAO,GAAI,cAAA;AAExB,IAAA,IAAI,EAAE,OAAO,MAAA,CAAA,EAAS;AACpB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,GAAA,EAAK;AACP,MAAA,MAAM,IAAA,CAAK,WAAW,GAAG,CAAA;AAAA,IAC3B,WAAW,MAAA,EAAQ;AACjB,MAAA,MAAM,IAAA,CAAK,eAAe,MAAM,CAAA;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,+BAAA,CACZ,KAAA,EACA,MAAA,EACA,MAAA,EACwC;AACxC,IAAA,MAAM,eAAA,GAAkB,CAAA,CAAA,EAAI,KAAA,CAAM,IAAA,CAAK,GAAG,CAAC,CAAA,CAAA,CAAA;AAC3C,IAAA,MAAM,WAAA,GAAc;AAAA;AAAA,WAAA,EAEX,KAAK,SAAS;AAAA;AAAA;AAAA;AAAA,IAAA,CAAA;AAMvB,IAAA,MAAM,SAAA,GAAA,CACJ,MAAM,IAAA,CAAK,MAAA,CAAO,IAAI,WAAA,EAAa;AAAA,MACjC,eAAA;AAAA,MACA,MAAA,EAAQ,IAAA,CAAK,SAAA,CAAU,MAAA,IAAU,EAAE,CAAA;AAAA,MACnC;AAAA,KACD,CAAA,EACD,IAAA;AAEF,IAAA,MAAM,UAAU,EAAC;AACjB,IAAA,KAAA,MAAW,OAAO,SAAA,EAAW;AAE3B,MAAA,IAAI,GAAA,CAAI,SAAA,KAAc,IAAA,IAAQ,GAAA,CAAI,YAAY,IAAA,EAAM;AAClD,QAAA,MAAM,QAAA,GAAW;AAAA,UACf,SAAS,GAAA,CAAI,OAAA;AAAA,UACb,UAAU,GAAA,CAAI;AAAA,SAChB;AACA,QAAA,OAAA,CAAQ,IAAA,CAAK,CAAC,QAAA,EAAU,GAAA,CAAI,SAAS,CAAC,CAAA;AAAA,MACxC;AAAA,IACF;AACA,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,gBAAA,CACJ,KAAA,EACA,MAAA,EACA,MAAA,GAAiB,KAAK,MAAA,EACQ;AAC9B,IAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AACpB,MAAA,MAAM,IAAI,MAAM,gDAAgD,CAAA;AAAA,IAClE;AACA,IAAA,MAAM,OAAA,GAAU,MAAM,IAAA,CAAK,+BAAA;AAAA,MACzB,MAAM,IAAA,CAAK,UAAA,CAAW,UAAA,CAAW,KAAK,CAAA;AAAA,MACtC,MAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAA,MAAA,KAAU,MAAA,CAAO,CAAC,CAAC,CAAA;AAAA,EACxC;AACF;;;;"}
@@ -0,0 +1,10 @@
1
+ 'use strict';
2
+
3
+ Object.defineProperty(exports, '__esModule', { value: true });
4
+
5
+ var plugin = require('./plugin.cjs.js');
6
+
7
+
8
+
9
+ exports.default = plugin.aiAssistantPlugin;
10
+ //# sourceMappingURL=index.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
@@ -0,0 +1,10 @@
1
+ import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
2
+
3
+ /**
4
+ * aiAssistantPlugin backend plugin
5
+ *
6
+ * @public
7
+ */
8
+ declare const aiAssistantPlugin: _backstage_backend_plugin_api.BackendFeature;
9
+
10
+ export { aiAssistantPlugin as default };
@@ -0,0 +1,82 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+ var index = require('./services/router/index.cjs.js');
5
+ var backstagePluginAiAssistantNode = require('@sweetoburrito/backstage-plugin-ai-assistant-node');
6
+ var ingestor = require('./services/ingestor.cjs.js');
7
+ var chat = require('./services/chat.cjs.js');
8
+ var prompt = require('./services/prompt.cjs.js');
9
+ var migrations = require('./database/migrations.cjs.js');
10
+ var pgVectorStore = require('./database/pg-vector-store.cjs.js');
11
+
12
+ const aiAssistantPlugin = backendPluginApi.createBackendPlugin({
13
+ pluginId: "ai-assistant",
14
+ register(env) {
15
+ const ingestors = [];
16
+ const models = [];
17
+ let embeddingsProvider;
18
+ env.registerExtensionPoint(backstagePluginAiAssistantNode.dataIngestorExtensionPoint, {
19
+ registerIngestor: (ingestor) => {
20
+ const existingIngestor = ingestors.find((i) => i.id === ingestor.id);
21
+ if (existingIngestor) {
22
+ throw new Error(
23
+ `Ingestor with id ${ingestor.id} is already registered.`
24
+ );
25
+ }
26
+ ingestors.push(ingestor);
27
+ }
28
+ });
29
+ env.registerExtensionPoint(backstagePluginAiAssistantNode.embeddingsProviderExtensionPoint, {
30
+ register: (provider) => {
31
+ embeddingsProvider = provider;
32
+ }
33
+ });
34
+ env.registerExtensionPoint(backstagePluginAiAssistantNode.modelProviderExtensionPoint, {
35
+ register: (model) => {
36
+ const existingModel = models.find((m) => m.id === model.id);
37
+ if (existingModel) {
38
+ throw new Error(`Model with id ${model.id} is already registered.`);
39
+ }
40
+ models.push(model);
41
+ }
42
+ });
43
+ env.registerInit({
44
+ deps: {
45
+ httpRouter: backendPluginApi.coreServices.httpRouter,
46
+ database: backendPluginApi.coreServices.database,
47
+ logger: backendPluginApi.coreServices.logger,
48
+ config: backendPluginApi.coreServices.rootConfig,
49
+ scheduler: backendPluginApi.coreServices.scheduler,
50
+ httpAuth: backendPluginApi.coreServices.httpAuth,
51
+ userInfo: backendPluginApi.coreServices.userInfo
52
+ },
53
+ async init(options) {
54
+ const { httpRouter, database } = options;
55
+ const client = await database.getClient();
56
+ await migrations.applyDatabaseMigrations(client);
57
+ const vectorStore = await pgVectorStore.PgVectorStore.fromConfig(options);
58
+ if (!embeddingsProvider) {
59
+ throw new Error("No Embeddings Provider was registered.");
60
+ }
61
+ vectorStore.connectEmbeddings(await embeddingsProvider.getEmbeddings());
62
+ const dataIngestionPipeline = ingestor.createDataIngestionPipeline({
63
+ ...options,
64
+ vectorStore,
65
+ ingestors
66
+ });
67
+ const promptBuilder = prompt.createPromptBuilder(options);
68
+ const chat$1 = await chat.createChatService({
69
+ ...options,
70
+ models,
71
+ vectorStore,
72
+ promptBuilder
73
+ });
74
+ httpRouter.use(await index.createRouter({ ...options, chat: chat$1 }));
75
+ dataIngestionPipeline.start();
76
+ }
77
+ });
78
+ }
79
+ });
80
+
81
+ exports.aiAssistantPlugin = aiAssistantPlugin;
82
+ //# sourceMappingURL=plugin.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"plugin.cjs.js","sources":["../src/plugin.ts"],"sourcesContent":["import {\n coreServices,\n createBackendPlugin,\n} from '@backstage/backend-plugin-api';\nimport { createRouter } from './services/router';\nimport {\n dataIngestorExtensionPoint,\n EmbeddingsProvider,\n embeddingsProviderExtensionPoint,\n Ingestor,\n Model,\n modelProviderExtensionPoint,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\nimport { createDataIngestionPipeline } from './services/ingestor';\nimport { createChatService } from './services/chat';\nimport { createPromptBuilder } from './services/prompt';\nimport { applyDatabaseMigrations } from './database/migrations';\nimport { PgVectorStore } from './database';\n\n/**\n * aiAssistantPlugin backend plugin\n *\n * @public\n */\nexport const aiAssistantPlugin = createBackendPlugin({\n pluginId: 'ai-assistant',\n register(env) {\n const ingestors: Ingestor[] = [];\n const models: Model[] = [];\n\n let embeddingsProvider: EmbeddingsProvider;\n\n env.registerExtensionPoint(dataIngestorExtensionPoint, {\n registerIngestor: ingestor => {\n const existingIngestor = ingestors.find(i => i.id === ingestor.id);\n if (existingIngestor) {\n throw new Error(\n `Ingestor with id ${ingestor.id} is already registered.`,\n );\n }\n ingestors.push(ingestor);\n },\n });\n\n env.registerExtensionPoint(embeddingsProviderExtensionPoint, {\n register: provider => {\n embeddingsProvider = provider;\n },\n });\n\n env.registerExtensionPoint(modelProviderExtensionPoint, {\n register: model => {\n const existingModel = models.find(m => m.id === model.id);\n if (existingModel) {\n throw new Error(`Model with id ${model.id} is already registered.`);\n }\n models.push(model);\n },\n });\n\n env.registerInit({\n deps: {\n httpRouter: coreServices.httpRouter,\n database: coreServices.database,\n logger: coreServices.logger,\n config: coreServices.rootConfig,\n scheduler: coreServices.scheduler,\n httpAuth: coreServices.httpAuth,\n userInfo: coreServices.userInfo,\n },\n\n async init(options) {\n const { httpRouter, database } = options;\n const client = await database.getClient();\n\n await applyDatabaseMigrations(client);\n\n const vectorStore = await PgVectorStore.fromConfig(options);\n\n if (!embeddingsProvider) {\n throw new Error('No Embeddings Provider was registered.');\n }\n\n vectorStore.connectEmbeddings(await embeddingsProvider.getEmbeddings());\n\n const dataIngestionPipeline = createDataIngestionPipeline({\n ...options,\n vectorStore,\n ingestors,\n });\n\n const promptBuilder = createPromptBuilder(options);\n\n const chat = await createChatService({\n ...options,\n models,\n vectorStore,\n promptBuilder,\n });\n\n httpRouter.use(await createRouter({ ...options, chat }));\n dataIngestionPipeline.start();\n },\n });\n },\n});\n"],"names":["createBackendPlugin","dataIngestorExtensionPoint","embeddingsProviderExtensionPoint","modelProviderExtensionPoint","coreServices","applyDatabaseMigrations","PgVectorStore","createDataIngestionPipeline","createPromptBuilder","chat","createChatService","createRouter"],"mappings":";;;;;;;;;;;AAwBO,MAAM,oBAAoBA,oCAAA,CAAoB;AAAA,EACnD,QAAA,EAAU,cAAA;AAAA,EACV,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,YAAwB,EAAC;AAC/B,IAAA,MAAM,SAAkB,EAAC;AAEzB,IAAA,IAAI,kBAAA;AAEJ,IAAA,GAAA,CAAI,uBAAuBC,yDAAA,EAA4B;AAAA,MACrD,kBAAkB,CAAA,QAAA,KAAY;AAC5B,QAAA,MAAM,mBAAmB,SAAA,CAAU,IAAA,CAAK,OAAK,CAAA,CAAE,EAAA,KAAO,SAAS,EAAE,CAAA;AACjE,QAAA,IAAI,gBAAA,EAAkB;AACpB,UAAA,MAAM,IAAI,KAAA;AAAA,YACR,CAAA,iBAAA,EAAoB,SAAS,EAAE,CAAA,uBAAA;AAAA,WACjC;AAAA,QACF;AACA,QAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,MACzB;AAAA,KACD,CAAA;AAED,IAAA,GAAA,CAAI,uBAAuBC,+DAAA,EAAkC;AAAA,MAC3D,UAAU,CAAA,QAAA,KAAY;AACpB,QAAA,kBAAA,GAAqB,QAAA;AAAA,MACvB;AAAA,KACD,CAAA;AAED,IAAA,GAAA,CAAI,uBAAuBC,0DAAA,EAA6B;AAAA,MACtD,UAAU,CAAA,KAAA,KAAS;AACjB,QAAA,MAAM,gBAAgB,MAAA,CAAO,IAAA,CAAK,OAAK,CAAA,CAAE,EAAA,KAAO,MAAM,EAAE,CAAA;AACxD,QAAA,IAAI,aAAA,EAAe;AACjB,UAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,KAAA,CAAM,EAAE,CAAA,uBAAA,CAAyB,CAAA;AAAA,QACpE;AACA,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,KACD,CAAA;AAED,IAAA,GAAA,CAAI,YAAA,CAAa;AAAA,MACf,IAAA,EAAM;AAAA,QACJ,YAAYC,6BAAA,CAAa,UAAA;AAAA,QACzB,UAAUA,6BAAA,CAAa,QAAA;AAAA,QACvB,QAAQA,6BAAA,CAAa,MAAA;AAAA,QACrB,QAAQA,6BAAA,CAAa,UAAA;AAAA,QACrB,WAAWA,6BAAA,CAAa,SAAA;AAAA,QACxB,UAAUA,6BAAA,CAAa,QAAA;AAAA,QACvB,UAAUA,6BAAA,CAAa;AAAA,OACzB;AAAA,MAEA,MAAM,KAAK,OAAA,EAAS;AAClB,QAAA,MAAM,EAAE,UAAA,EAAY,QAAA,EAAS,GAAI,OAAA;AACjC,QAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,EAAU;AAExC,QAAA,MAAMC,mCAAwB,MAAM,CAAA;AAEpC,QAAA,MAAM,WAAA,GAAc,MAAMC,2BAAA,CAAc,UAAA,CAAW,OAAO,CAAA;AAE1D,QAAA,IAAI,CAAC,kBAAA,EAAoB;AACvB,UAAA,MAAM,IAAI,MAAM,wCAAwC,CAAA;AAAA,QAC1D;AAEA,QAAA,WAAA,CAAY,iBAAA,CAAkB,MAAM,kBAAA,CAAmB,aAAA,EAAe,CAAA;AAEtE,QAAA,MAAM,wBAAwBC,oCAAA,CAA4B;AAAA,UACxD,GAAG,OAAA;AAAA,UACH,WAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,MAAM,aAAA,GAAgBC,2BAAoB,OAAO,CAAA;AAEjD,QAAA,MAAMC,MAAA,GAAO,MAAMC,sBAAA,CAAkB;AAAA,UACnC,GAAG,OAAA;AAAA,UACH,MAAA;AAAA,UACA,WAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,UAAA,CAAW,GAAA,CAAI,MAAMC,kBAAA,CAAa,EAAE,GAAG,OAAA,QAASF,MAAA,EAAM,CAAC,CAAA;AACvD,QAAA,qBAAA,CAAsB,KAAA,EAAM;AAAA,MAC9B;AAAA,KACD,CAAA;AAAA,EACH;AACF,CAAC;;;;"}
@@ -0,0 +1,76 @@
1
+ 'use strict';
2
+
3
+ var uuid = require('uuid');
4
+ var chatStore = require('../database/chat-store.cjs.js');
5
+
6
+ const createChatService = async ({
7
+ models,
8
+ logger,
9
+ vectorStore,
10
+ promptBuilder,
11
+ database
12
+ }) => {
13
+ logger.info(`Available models: ${models.map((m) => m.id).join(", ")}`);
14
+ const chatStore$1 = await chatStore.ChatStore.fromConfig({ database });
15
+ const getChatModelById = (id) => {
16
+ return models.find((model) => model.id === id)?.chatModel;
17
+ };
18
+ const prompt = async ({
19
+ conversationId,
20
+ messages,
21
+ modelId,
22
+ stream,
23
+ userEntityRef
24
+ }) => {
25
+ const model = getChatModelById(modelId);
26
+ if (!model) {
27
+ throw new Error(`Model with id ${modelId} not found`);
28
+ }
29
+ chatStore$1.addChatMessage(messages, userEntityRef, conversationId);
30
+ const context = await vectorStore.similaritySearch(
31
+ messages.filter((m) => m.role === "user").map((m) => m.content).join("\n")
32
+ );
33
+ const recentConversationMessages = await chatStore$1.getChatMessages(
34
+ conversationId,
35
+ userEntityRef,
36
+ 10
37
+ );
38
+ const promptMessages = promptBuilder.buildPrompt(
39
+ [...recentConversationMessages, ...messages],
40
+ context
41
+ );
42
+ const responseId = uuid.v4();
43
+ if (stream) {
44
+ throw new Error("Not Implemented");
45
+ }
46
+ const response = await model.invoke(promptMessages);
47
+ const aiMessages = [
48
+ {
49
+ id: responseId,
50
+ role: "assistant",
51
+ content: response.text
52
+ }
53
+ ];
54
+ chatStore$1.addChatMessage(aiMessages, userEntityRef, conversationId);
55
+ return aiMessages;
56
+ };
57
+ const getAvailableModels = async () => {
58
+ return models.map((x) => x.id);
59
+ };
60
+ const getConversation = async (options) => {
61
+ const { conversationId, userEntityRef } = options;
62
+ const conversation = await chatStore$1.getChatMessages(
63
+ conversationId,
64
+ userEntityRef
65
+ );
66
+ return conversation;
67
+ };
68
+ return {
69
+ prompt,
70
+ getAvailableModels,
71
+ getConversation
72
+ };
73
+ };
74
+
75
+ exports.createChatService = createChatService;
76
+ //# sourceMappingURL=chat.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chat.cjs.js","sources":["../../src/services/chat.ts"],"sourcesContent":["import {\n Message,\n Model,\n VectorStore,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\nimport {\n LoggerService,\n RootConfigService,\n DatabaseService,\n} from '@backstage/backend-plugin-api';\nimport { PromptBuilder } from './prompt';\nimport { v4 as uuid } from 'uuid';\nimport { ChatStore } from '../database/chat-store';\n\nexport type ChatServiceOptions = {\n models: Model[];\n logger: LoggerService;\n vectorStore: VectorStore;\n config: RootConfigService;\n promptBuilder: PromptBuilder;\n database: DatabaseService;\n};\n\ntype PromptOptions = {\n modelId: string;\n messages: Message[];\n conversationId: string;\n stream: boolean;\n userEntityRef: string;\n};\n\ntype GetConversationOptions = {\n conversationId: string;\n userEntityRef: string;\n};\n\nexport type ChatService = {\n prompt: (options: PromptOptions) => Promise<Required<Message>[]>;\n getAvailableModels: () => Promise<string[]>;\n getConversation: (\n options: GetConversationOptions,\n ) => Promise<Required<Message>[]>;\n};\n\nexport const createChatService = async ({\n models,\n logger,\n vectorStore,\n promptBuilder,\n database,\n}: ChatServiceOptions): Promise<ChatService> => {\n logger.info(`Available models: ${models.map(m => m.id).join(', ')}`);\n\n const chatStore = await ChatStore.fromConfig({ database });\n\n const getChatModelById = (id: string) => {\n return models.find(model => model.id === id)?.chatModel;\n };\n\n const prompt: ChatService['prompt'] = async ({\n conversationId,\n messages,\n modelId,\n stream,\n userEntityRef,\n }: PromptOptions) => {\n const model = getChatModelById(modelId);\n\n if (!model) {\n throw new Error(`Model with id ${modelId} not found`);\n }\n\n chatStore.addChatMessage(messages, userEntityRef, conversationId);\n\n const context = await vectorStore.similaritySearch(\n messages\n .filter(m => m.role === 'user')\n .map(m => m.content)\n .join('\\n'),\n );\n\n const recentConversationMessages = await chatStore.getChatMessages(\n conversationId,\n userEntityRef,\n 10,\n );\n\n const promptMessages = promptBuilder.buildPrompt(\n [...recentConversationMessages, ...messages],\n context,\n );\n\n const responseId: string = uuid();\n\n if (stream) {\n // Handle streaming response\n throw new Error('Not Implemented');\n }\n\n const response = await model.invoke(promptMessages);\n\n const aiMessages: Required<Message>[] = [\n {\n id: responseId,\n role: 'assistant',\n content: response.text,\n },\n ];\n\n chatStore.addChatMessage(aiMessages, userEntityRef, conversationId);\n\n return aiMessages;\n };\n\n const getAvailableModels: ChatService['getAvailableModels'] = async () => {\n return models.map(x => x.id);\n };\n\n const getConversation: ChatService['getConversation'] = async (\n options: GetConversationOptions,\n ) => {\n const { conversationId, userEntityRef } = options;\n\n const conversation = await chatStore.getChatMessages(\n conversationId,\n userEntityRef,\n );\n\n return conversation;\n };\n\n return {\n prompt,\n getAvailableModels,\n getConversation,\n };\n};\n"],"names":["chatStore","ChatStore","uuid"],"mappings":";;;;;AA4CO,MAAM,oBAAoB,OAAO;AAAA,EACtC,MAAA;AAAA,EACA,MAAA;AAAA,EACA,WAAA;AAAA,EACA,aAAA;AAAA,EACA;AACF,CAAA,KAAgD;AAC9C,EAAA,MAAA,CAAO,IAAA,CAAK,CAAA,kBAAA,EAAqB,MAAA,CAAO,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAEnE,EAAA,MAAMA,cAAY,MAAMC,mBAAA,CAAU,UAAA,CAAW,EAAE,UAAU,CAAA;AAEzD,EAAA,MAAM,gBAAA,GAAmB,CAAC,EAAA,KAAe;AACvC,IAAA,OAAO,OAAO,IAAA,CAAK,CAAA,KAAA,KAAS,KAAA,CAAM,EAAA,KAAO,EAAE,CAAA,EAAG,SAAA;AAAA,EAChD,CAAA;AAEA,EAAA,MAAM,SAAgC,OAAO;AAAA,IAC3C,cAAA;AAAA,IACA,QAAA;AAAA,IACA,OAAA;AAAA,IACA,MAAA;AAAA,IACA;AAAA,GACF,KAAqB;AACnB,IAAA,MAAM,KAAA,GAAQ,iBAAiB,OAAO,CAAA;AAEtC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,cAAA,EAAiB,OAAO,CAAA,UAAA,CAAY,CAAA;AAAA,IACtD;AAEA,IAAAD,WAAA,CAAU,cAAA,CAAe,QAAA,EAAU,aAAA,EAAe,cAAc,CAAA;AAEhE,IAAA,MAAM,OAAA,GAAU,MAAM,WAAA,CAAY,gBAAA;AAAA,MAChC,QAAA,CACG,MAAA,CAAO,CAAA,CAAA,KAAK,CAAA,CAAE,IAAA,KAAS,MAAM,CAAA,CAC7B,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,OAAO,CAAA,CAClB,KAAK,IAAI;AAAA,KACd;AAEA,IAAA,MAAM,0BAAA,GAA6B,MAAMA,WAAA,CAAU,eAAA;AAAA,MACjD,cAAA;AAAA,MACA,aAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,MAAM,iBAAiB,aAAA,CAAc,WAAA;AAAA,MACnC,CAAC,GAAG,0BAAA,EAA4B,GAAG,QAAQ,CAAA;AAAA,MAC3C;AAAA,KACF;AAEA,IAAA,MAAM,aAAqBE,OAAA,EAAK;AAEhC,IAAA,IAAI,MAAA,EAAQ;AAEV,MAAA,MAAM,IAAI,MAAM,iBAAiB,CAAA;AAAA,IACnC;AAEA,IAAA,MAAM,QAAA,GAAW,MAAM,KAAA,CAAM,MAAA,CAAO,cAAc,CAAA;AAElD,IAAA,MAAM,UAAA,GAAkC;AAAA,MACtC;AAAA,QACE,EAAA,EAAI,UAAA;AAAA,QACJ,IAAA,EAAM,WAAA;AAAA,QACN,SAAS,QAAA,CAAS;AAAA;AACpB,KACF;AAEA,IAAAF,WAAA,CAAU,cAAA,CAAe,UAAA,EAAY,aAAA,EAAe,cAAc,CAAA;AAElE,IAAA,OAAO,UAAA;AAAA,EACT,CAAA;AAEA,EAAA,MAAM,qBAAwD,YAAY;AACxE,IAAA,OAAO,MAAA,CAAO,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAA;AAAA,EAC7B,CAAA;AAEA,EAAA,MAAM,eAAA,GAAkD,OACtD,OAAA,KACG;AACH,IAAA,MAAM,EAAE,cAAA,EAAgB,aAAA,EAAc,GAAI,OAAA;AAE1C,IAAA,MAAM,YAAA,GAAe,MAAMA,WAAA,CAAU,eAAA;AAAA,MACnC,cAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,YAAA;AAAA,EACT,CAAA;AAEA,EAAA,OAAO;AAAA,IACL,MAAA;AAAA,IACA,kBAAA;AAAA,IACA;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,84 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+ var textsplitters = require('@langchain/textsplitters');
5
+
6
+ const DEFAULT_DATA_INGESTION_SCHEDULE = {
7
+ frequency: {
8
+ hours: 24
9
+ },
10
+ timeout: {
11
+ hours: 3
12
+ }
13
+ };
14
+ const createDataIngestionPipeline = ({
15
+ config,
16
+ logger,
17
+ scheduler,
18
+ ingestors,
19
+ vectorStore
20
+ }) => {
21
+ const schedule = config.has("aiAssistant.ingestion.schedule") ? backendPluginApi.readSchedulerServiceTaskScheduleDefinitionFromConfig(
22
+ config.getConfig("aiAssistant.ingestion.schedule")
23
+ ) : DEFAULT_DATA_INGESTION_SCHEDULE;
24
+ const taskRunner = scheduler.createScheduledTaskRunner(schedule);
25
+ const taskId = `ai-assistant.data-ingestion:start`;
26
+ const dataIngestion = async () => {
27
+ logger.info("Starting data ingestion...");
28
+ if (ingestors.length === 0) {
29
+ logger.warn("No ingestors available for data ingestion.");
30
+ return;
31
+ }
32
+ logger.info(`Ingestors available: ${ingestors.map((i) => i.id).join(", ")}`);
33
+ for await (const ingestor of ingestors) {
34
+ logger.info(`Running ingestor: ${ingestor.id}`);
35
+ await vectorStore.deleteDocuments({ filter: { source: ingestor.id } });
36
+ const saveDocumentsBatch = async (documents2) => {
37
+ logger.info(
38
+ `Ingested documents for ${ingestor.id}: ${documents2.length}`
39
+ );
40
+ const splitter = new textsplitters.RecursiveCharacterTextSplitter({
41
+ chunkSize: 500,
42
+ // TODO: Make chunk size configurable
43
+ chunkOverlap: 50
44
+ // TODO: Make chunk overlap configurable
45
+ });
46
+ const docs = await Promise.all(
47
+ documents2.map(async (document) => {
48
+ const chunks = await splitter.splitText(document.content);
49
+ const chunkDocs = chunks.flatMap(
50
+ (chunk, i) => ({
51
+ metadata: { ...document.metadata, chunk: String(i) },
52
+ content: chunk
53
+ })
54
+ );
55
+ return chunkDocs;
56
+ })
57
+ );
58
+ logger.info(`Adding documents to vector store...`);
59
+ await vectorStore.addDocuments(docs.flat());
60
+ logger.info(`Added documents to vector store for ${ingestor.id}`);
61
+ };
62
+ const documents = await ingestor.ingest({
63
+ saveDocumentsBatch
64
+ });
65
+ if (documents) {
66
+ saveDocumentsBatch(documents);
67
+ }
68
+ logger.info(`Finished processing ingestor: ${ingestor.id}`);
69
+ }
70
+ logger.info("Data ingestion completed.");
71
+ };
72
+ const start = async () => {
73
+ taskRunner.run({
74
+ id: taskId,
75
+ fn: dataIngestion
76
+ });
77
+ };
78
+ return {
79
+ start
80
+ };
81
+ };
82
+
83
+ exports.createDataIngestionPipeline = createDataIngestionPipeline;
84
+ //# sourceMappingURL=ingestor.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ingestor.cjs.js","sources":["../../src/services/ingestor.ts"],"sourcesContent":["import {\n DataIngestionPipeline,\n DataIngestionPipelineOptions,\n EmbeddingDocument,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\n\nimport {\n SchedulerServiceTaskScheduleDefinition,\n readSchedulerServiceTaskScheduleDefinitionFromConfig,\n} from '@backstage/backend-plugin-api';\n\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\n\nconst DEFAULT_DATA_INGESTION_SCHEDULE: SchedulerServiceTaskScheduleDefinition =\n {\n frequency: {\n hours: 24,\n },\n timeout: {\n hours: 3,\n },\n };\n\nexport const createDataIngestionPipeline = ({\n config,\n logger,\n scheduler,\n ingestors,\n vectorStore,\n}: DataIngestionPipelineOptions): DataIngestionPipeline => {\n const schedule = config.has('aiAssistant.ingestion.schedule')\n ? readSchedulerServiceTaskScheduleDefinitionFromConfig(\n config.getConfig('aiAssistant.ingestion.schedule'),\n )\n : DEFAULT_DATA_INGESTION_SCHEDULE;\n\n const taskRunner = scheduler.createScheduledTaskRunner(schedule);\n\n const taskId = `ai-assistant.data-ingestion:start`;\n\n const dataIngestion = async () => {\n logger.info('Starting data ingestion...');\n\n if (ingestors.length === 0) {\n logger.warn('No ingestors available for data ingestion.');\n return;\n }\n\n logger.info(`Ingestors available: ${ingestors.map(i => i.id).join(', ')}`);\n\n for await (const ingestor of ingestors) {\n logger.info(`Running ingestor: ${ingestor.id}`);\n\n // TODO: This will cause these vectors to not be available while processing new documents\n // We should rather look at deleting a specific document from the store as it is added if the ids match\n await vectorStore.deleteDocuments({ filter: { source: ingestor.id } });\n\n const saveDocumentsBatch = async (documents: EmbeddingDocument[]) => {\n logger.info(\n `Ingested documents for ${ingestor.id}: ${documents.length}`,\n );\n\n const splitter = new RecursiveCharacterTextSplitter({\n chunkSize: 500, // TODO: Make chunk size configurable\n chunkOverlap: 50, // TODO: Make chunk overlap configurable\n });\n\n const docs = await Promise.all(\n documents.map(async document => {\n const chunks = await splitter.splitText(document.content);\n\n const chunkDocs: EmbeddingDocument[] = chunks.flatMap(\n (chunk, i) => ({\n metadata: { ...document.metadata, chunk: String(i) },\n content: chunk,\n }),\n );\n\n return chunkDocs;\n }),\n );\n\n logger.info(`Adding documents to vector store...`);\n await vectorStore.addDocuments(docs.flat());\n logger.info(`Added documents to vector store for ${ingestor.id}`);\n };\n\n const documents = await ingestor.ingest({\n saveDocumentsBatch,\n });\n\n if (documents) {\n saveDocumentsBatch(documents);\n }\n\n logger.info(`Finished processing ingestor: ${ingestor.id}`);\n }\n\n logger.info('Data ingestion completed.');\n };\n\n const start = async () => {\n taskRunner.run({\n id: taskId,\n fn: dataIngestion,\n });\n };\n\n return {\n start,\n };\n};\n"],"names":["readSchedulerServiceTaskScheduleDefinitionFromConfig","documents","RecursiveCharacterTextSplitter"],"mappings":";;;;;AAaA,MAAM,+BAAA,GACJ;AAAA,EACE,SAAA,EAAW;AAAA,IACT,KAAA,EAAO;AAAA,GACT;AAAA,EACA,OAAA,EAAS;AAAA,IACP,KAAA,EAAO;AAAA;AAEX,CAAA;AAEK,MAAM,8BAA8B,CAAC;AAAA,EAC1C,MAAA;AAAA,EACA,MAAA;AAAA,EACA,SAAA;AAAA,EACA,SAAA;AAAA,EACA;AACF,CAAA,KAA2D;AACzD,EAAA,MAAM,QAAA,GAAW,MAAA,CAAO,GAAA,CAAI,gCAAgC,CAAA,GACxDA,qEAAA;AAAA,IACE,MAAA,CAAO,UAAU,gCAAgC;AAAA,GACnD,GACA,+BAAA;AAEJ,EAAA,MAAM,UAAA,GAAa,SAAA,CAAU,yBAAA,CAA0B,QAAQ,CAAA;AAE/D,EAAA,MAAM,MAAA,GAAS,CAAA,iCAAA,CAAA;AAEf,EAAA,MAAM,gBAAgB,YAAY;AAChC,IAAA,MAAA,CAAO,KAAK,4BAA4B,CAAA;AAExC,IAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,MAAA,MAAA,CAAO,KAAK,4CAA4C,CAAA;AACxD,MAAA;AAAA,IACF;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,qBAAA,EAAwB,SAAA,CAAU,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,EAAE,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA,CAAE,CAAA;AAEzE,IAAA,WAAA,MAAiB,YAAY,SAAA,EAAW;AACtC,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,kBAAA,EAAqB,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAI9C,MAAA,MAAM,WAAA,CAAY,gBAAgB,EAAE,MAAA,EAAQ,EAAE,MAAA,EAAQ,QAAA,CAAS,EAAA,EAAG,EAAG,CAAA;AAErE,MAAA,MAAM,kBAAA,GAAqB,OAAOC,UAAAA,KAAmC;AACnE,QAAA,MAAA,CAAO,IAAA;AAAA,UACL,CAAA,uBAAA,EAA0B,QAAA,CAAS,EAAE,CAAA,EAAA,EAAKA,WAAU,MAAM,CAAA;AAAA,SAC5D;AAEA,QAAA,MAAM,QAAA,GAAW,IAAIC,4CAAA,CAA+B;AAAA,UAClD,SAAA,EAAW,GAAA;AAAA;AAAA,UACX,YAAA,EAAc;AAAA;AAAA,SACf,CAAA;AAED,QAAA,MAAM,IAAA,GAAO,MAAM,OAAA,CAAQ,GAAA;AAAA,UACzBD,UAAAA,CAAU,GAAA,CAAI,OAAM,QAAA,KAAY;AAC9B,YAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,SAAA,CAAU,SAAS,OAAO,CAAA;AAExD,YAAA,MAAM,YAAiC,MAAA,CAAO,OAAA;AAAA,cAC5C,CAAC,OAAO,CAAA,MAAO;AAAA,gBACb,QAAA,EAAU,EAAE,GAAG,QAAA,CAAS,UAAU,KAAA,EAAO,MAAA,CAAO,CAAC,CAAA,EAAE;AAAA,gBACnD,OAAA,EAAS;AAAA,eACX;AAAA,aACF;AAEA,YAAA,OAAO,SAAA;AAAA,UACT,CAAC;AAAA,SACH;AAEA,QAAA,MAAA,CAAO,KAAK,CAAA,mCAAA,CAAqC,CAAA;AACjD,QAAA,MAAM,WAAA,CAAY,YAAA,CAAa,IAAA,CAAK,IAAA,EAAM,CAAA;AAC1C,QAAA,MAAA,CAAO,IAAA,CAAK,CAAA,oCAAA,EAAuC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,MAClE,CAAA;AAEA,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,MAAA,CAAO;AAAA,QACtC;AAAA,OACD,CAAA;AAED,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,kBAAA,CAAmB,SAAS,CAAA;AAAA,MAC9B;AAEA,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,8BAAA,EAAiC,QAAA,CAAS,EAAE,CAAA,CAAE,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAA,CAAO,KAAK,2BAA2B,CAAA;AAAA,EACzC,CAAA;AAEA,EAAA,MAAM,QAAQ,YAAY;AACxB,IAAA,UAAA,CAAW,GAAA,CAAI;AAAA,MACb,EAAA,EAAI,MAAA;AAAA,MACJ,EAAA,EAAI;AAAA,KACL,CAAA;AAAA,EACH,CAAA;AAEA,EAAA,OAAO;AAAA,IACL;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,41 @@
1
+ 'use strict';
2
+
3
+ const DEFAULT_SYSTEM_PROMPT = `
4
+ You are a helpful assistant that answers questions based on provided context from various documents. The context may come from sources such as internal wikis, code repositories, technical documentation, or other structured or unstructured data.
5
+
6
+ Rules:
7
+ 1. Always base your answers on the provided context. Do not make up information.
8
+ 2. When relevant, cite or reference the source information provided in the context.
9
+ 3. Format answers clearly and concisely. Use bullet points for lists when appropriate.
10
+ 4. Maintain a professional, friendly, and helpful tone.
11
+ 5. Return only the relevant information without any filler or unnecessary details.
12
+ 6. If you don't know the answer, admit it and suggest ways to find the information.
13
+ 7. Always return a well-structured response using markdown.
14
+ `;
15
+ const createPromptBuilder = ({
16
+ config
17
+ }) => {
18
+ const system = config.getOptionalString("system") || DEFAULT_SYSTEM_PROMPT;
19
+ const getContext = (context) => {
20
+ return `
21
+ Context:
22
+ ${context.map((doc) => JSON.stringify(doc)).join("\n")}
23
+ `;
24
+ };
25
+ const buildPrompt = (chatHistory, promptContext) => {
26
+ const context = getContext(promptContext);
27
+ return [
28
+ {
29
+ role: "system",
30
+ content: system.concat(context)
31
+ },
32
+ ...chatHistory
33
+ ];
34
+ };
35
+ return {
36
+ buildPrompt
37
+ };
38
+ };
39
+
40
+ exports.createPromptBuilder = createPromptBuilder;
41
+ //# sourceMappingURL=prompt.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"prompt.cjs.js","sources":["../../src/services/prompt.ts"],"sourcesContent":["import { RootConfigService } from '@backstage/backend-plugin-api';\nimport {\n Message,\n EmbeddingDocument,\n} from '@sweetoburrito/backstage-plugin-ai-assistant-node';\n\ntype PromptBuilderOptions = {\n config: RootConfigService;\n};\n\nexport type PromptBuilder = {\n buildPrompt: (\n chatHistory: Message[],\n promptContext: EmbeddingDocument[],\n ) => Message[];\n};\n\nconst DEFAULT_SYSTEM_PROMPT = `\nYou are a helpful assistant that answers questions based on provided context from various documents. The context may come from sources such as internal wikis, code repositories, technical documentation, or other structured or unstructured data.\n\nRules:\n1. Always base your answers on the provided context. Do not make up information.\n2. When relevant, cite or reference the source information provided in the context.\n3. Format answers clearly and concisely. Use bullet points for lists when appropriate.\n4. Maintain a professional, friendly, and helpful tone.\n5. Return only the relevant information without any filler or unnecessary details.\n6. If you don't know the answer, admit it and suggest ways to find the information.\n7. Always return a well-structured response using markdown.\n`;\n\nexport const createPromptBuilder = ({\n config,\n}: PromptBuilderOptions): PromptBuilder => {\n const system = config.getOptionalString('system') || DEFAULT_SYSTEM_PROMPT;\n\n const getContext = (context: EmbeddingDocument[]) => {\n return `\n Context:\n ${context.map(doc => JSON.stringify(doc)).join('\\n')}\n `;\n };\n\n const buildPrompt: PromptBuilder['buildPrompt'] = (\n chatHistory,\n promptContext,\n ) => {\n const context = getContext(promptContext);\n\n return [\n {\n role: 'system',\n content: system.concat(context),\n },\n ...chatHistory,\n ];\n };\n\n return {\n buildPrompt,\n };\n};\n"],"names":[],"mappings":";;AAiBA,MAAM,qBAAA,GAAwB;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAAA;AAavB,MAAM,sBAAsB,CAAC;AAAA,EAClC;AACF,CAAA,KAA2C;AACzC,EAAA,MAAM,MAAA,GAAS,MAAA,CAAO,iBAAA,CAAkB,QAAQ,CAAA,IAAK,qBAAA;AAErD,EAAA,MAAM,UAAA,GAAa,CAAC,OAAA,KAAiC;AACnD,IAAA,OAAO;AAAA;AAAA,IAAA,EAEL,OAAA,CAAQ,GAAA,CAAI,CAAA,GAAA,KAAO,IAAA,CAAK,SAAA,CAAU,GAAG,CAAC,CAAA,CAAE,IAAA,CAAK,IAAI,CAAC;AAAA,IAAA,CAAA;AAAA,EAEtD,CAAA;AAEA,EAAA,MAAM,WAAA,GAA4C,CAChD,WAAA,EACA,aAAA,KACG;AACH,IAAA,MAAM,OAAA,GAAU,WAAW,aAAa,CAAA;AAExC,IAAA,OAAO;AAAA,MACL;AAAA,QACE,IAAA,EAAM,QAAA;AAAA,QACN,OAAA,EAAS,MAAA,CAAO,MAAA,CAAO,OAAO;AAAA,OAChC;AAAA,MACA,GAAG;AAAA,KACL;AAAA,EACF,CAAA;AAEA,EAAA,OAAO;AAAA,IACL;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,68 @@
1
+ 'use strict';
2
+
3
+ var express = require('express');
4
+ var Router = require('express-promise-router');
5
+ var z = require('zod');
6
+ var validation = require('./middleware/validation.cjs.js');
7
+ var uuid = require('uuid');
8
+
9
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
10
+
11
+ var express__default = /*#__PURE__*/_interopDefaultCompat(express);
12
+ var Router__default = /*#__PURE__*/_interopDefaultCompat(Router);
13
+ var z__default = /*#__PURE__*/_interopDefaultCompat(z);
14
+
15
+ async function createChatRouter(options) {
16
+ const { chat, httpAuth, userInfo } = options;
17
+ const router = Router__default.default();
18
+ router.use(express__default.default.json());
19
+ const messageSchema = z__default.default.object({
20
+ messages: z__default.default.array(
21
+ z__default.default.object({
22
+ id: z__default.default.string().uuid().optional().default(uuid.v4),
23
+ role: z__default.default.string(),
24
+ content: z__default.default.string()
25
+ })
26
+ ),
27
+ modelId: z__default.default.string(),
28
+ conversationId: z__default.default.string().uuid().optional().default(uuid.v4),
29
+ stream: z__default.default.boolean().optional().default(false)
30
+ });
31
+ router.post(
32
+ "/message",
33
+ validation.validation(messageSchema, "body"),
34
+ async (req, res) => {
35
+ const { messages, conversationId, modelId, stream } = req.body;
36
+ const credentials = await httpAuth.credentials(req);
37
+ const { userEntityRef } = await userInfo.getUserInfo(credentials);
38
+ const responseMessages = await chat.prompt({
39
+ modelId,
40
+ messages,
41
+ conversationId,
42
+ stream,
43
+ userEntityRef
44
+ });
45
+ res.json({
46
+ messages: responseMessages,
47
+ conversationId
48
+ });
49
+ }
50
+ );
51
+ const chatSchema = z__default.default.object({
52
+ id: z__default.default.string().uuid()
53
+ });
54
+ router.get("/", validation.validation(chatSchema, "body"), async (req, res) => {
55
+ const { id } = req.body;
56
+ const credentials = await httpAuth.credentials(req);
57
+ const { userEntityRef } = await userInfo.getUserInfo(credentials);
58
+ const conversation = await chat.getConversation({
59
+ conversationId: id,
60
+ userEntityRef
61
+ });
62
+ res.json({ conversation });
63
+ });
64
+ return router;
65
+ }
66
+
67
+ exports.createChatRouter = createChatRouter;
68
+ //# sourceMappingURL=chat.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chat.cjs.js","sources":["../../../src/services/router/chat.ts"],"sourcesContent":["import express from 'express';\nimport Router from 'express-promise-router';\nimport { ChatService } from '../chat';\nimport z from 'zod';\nimport { validation } from './middleware/validation';\nimport { v4 as uuid } from 'uuid';\nimport {\n DatabaseService,\n HttpAuthService,\n UserInfoService,\n} from '@backstage/backend-plugin-api';\n\nexport type ChatRouterOptions = {\n chat: ChatService;\n database: DatabaseService;\n httpAuth: HttpAuthService;\n userInfo: UserInfoService;\n};\n\nexport async function createChatRouter(\n options: ChatRouterOptions,\n): Promise<express.Router> {\n const { chat, httpAuth, userInfo } = options;\n\n const router = Router();\n router.use(express.json());\n\n const messageSchema = z.object({\n messages: z.array(\n z.object({\n id: z.string().uuid().optional().default(uuid),\n role: z.string(),\n content: z.string(),\n }),\n ),\n modelId: z.string(),\n conversationId: z.string().uuid().optional().default(uuid),\n stream: z.boolean().optional().default(false),\n });\n\n router.post(\n '/message',\n validation(messageSchema, 'body'),\n async (req, res) => {\n const { messages, conversationId, modelId, stream } = req.body;\n\n const credentials = await httpAuth.credentials(req);\n const { userEntityRef } = await userInfo.getUserInfo(credentials);\n\n const responseMessages = await chat.prompt({\n modelId,\n messages,\n conversationId,\n stream,\n userEntityRef,\n });\n\n res.json({\n messages: responseMessages,\n conversationId,\n });\n },\n );\n\n const chatSchema = z.object({\n id: z.string().uuid(),\n });\n\n router.get('/', validation(chatSchema, 'body'), async (req, res) => {\n const { id } = req.body;\n\n const credentials = await httpAuth.credentials(req);\n const { userEntityRef } = await userInfo.getUserInfo(credentials);\n\n const conversation = await chat.getConversation({\n conversationId: id,\n userEntityRef,\n });\n res.json({ conversation });\n });\n\n return router;\n}\n"],"names":["Router","express","z","uuid","validation"],"mappings":";;;;;;;;;;;;;;AAmBA,eAAsB,iBACpB,OAAA,EACyB;AACzB,EAAA,MAAM,EAAE,IAAA,EAAM,QAAA,EAAU,QAAA,EAAS,GAAI,OAAA;AAErC,EAAA,MAAM,SAASA,uBAAA,EAAO;AACtB,EAAA,MAAA,CAAO,GAAA,CAAIC,wBAAA,CAAQ,IAAA,EAAM,CAAA;AAEzB,EAAA,MAAM,aAAA,GAAgBC,mBAAE,MAAA,CAAO;AAAA,IAC7B,UAAUA,kBAAA,CAAE,KAAA;AAAA,MACVA,mBAAE,MAAA,CAAO;AAAA,QACP,EAAA,EAAIA,mBAAE,MAAA,EAAO,CAAE,MAAK,CAAE,QAAA,EAAS,CAAE,OAAA,CAAQC,OAAI,CAAA;AAAA,QAC7C,IAAA,EAAMD,mBAAE,MAAA,EAAO;AAAA,QACf,OAAA,EAASA,mBAAE,MAAA;AAAO,OACnB;AAAA,KACH;AAAA,IACA,OAAA,EAASA,mBAAE,MAAA,EAAO;AAAA,IAClB,cAAA,EAAgBA,mBAAE,MAAA,EAAO,CAAE,MAAK,CAAE,QAAA,EAAS,CAAE,OAAA,CAAQC,OAAI,CAAA;AAAA,IACzD,QAAQD,kBAAA,CAAE,OAAA,GAAU,QAAA,EAAS,CAAE,QAAQ,KAAK;AAAA,GAC7C,CAAA;AAED,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,UAAA;AAAA,IACAE,qBAAA,CAAW,eAAe,MAAM,CAAA;AAAA,IAChC,OAAO,KAAK,GAAA,KAAQ;AAClB,MAAA,MAAM,EAAE,QAAA,EAAU,cAAA,EAAgB,OAAA,EAAS,MAAA,KAAW,GAAA,CAAI,IAAA;AAE1D,MAAA,MAAM,WAAA,GAAc,MAAM,QAAA,CAAS,WAAA,CAAY,GAAG,CAAA;AAClD,MAAA,MAAM,EAAE,aAAA,EAAc,GAAI,MAAM,QAAA,CAAS,YAAY,WAAW,CAAA;AAEhE,MAAA,MAAM,gBAAA,GAAmB,MAAM,IAAA,CAAK,MAAA,CAAO;AAAA,QACzC,OAAA;AAAA,QACA,QAAA;AAAA,QACA,cAAA;AAAA,QACA,MAAA;AAAA,QACA;AAAA,OACD,CAAA;AAED,MAAA,GAAA,CAAI,IAAA,CAAK;AAAA,QACP,QAAA,EAAU,gBAAA;AAAA,QACV;AAAA,OACD,CAAA;AAAA,IACH;AAAA,GACF;AAEA,EAAA,MAAM,UAAA,GAAaF,mBAAE,MAAA,CAAO;AAAA,IAC1B,EAAA,EAAIA,kBAAA,CAAE,MAAA,EAAO,CAAE,IAAA;AAAK,GACrB,CAAA;AAED,EAAA,MAAA,CAAO,GAAA,CAAI,KAAKE,qBAAA,CAAW,UAAA,EAAY,MAAM,CAAA,EAAG,OAAO,KAAK,GAAA,KAAQ;AAClE,IAAA,MAAM,EAAE,EAAA,EAAG,GAAI,GAAA,CAAI,IAAA;AAEnB,IAAA,MAAM,WAAA,GAAc,MAAM,QAAA,CAAS,WAAA,CAAY,GAAG,CAAA;AAClD,IAAA,MAAM,EAAE,aAAA,EAAc,GAAI,MAAM,QAAA,CAAS,YAAY,WAAW,CAAA;AAEhE,IAAA,MAAM,YAAA,GAAe,MAAM,IAAA,CAAK,eAAA,CAAgB;AAAA,MAC9C,cAAA,EAAgB,EAAA;AAAA,MAChB;AAAA,KACD,CAAA;AACD,IAAA,GAAA,CAAI,IAAA,CAAK,EAAE,YAAA,EAAc,CAAA;AAAA,EAC3B,CAAC,CAAA;AAED,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -0,0 +1,25 @@
1
+ 'use strict';
2
+
3
+ var express = require('express');
4
+ var Router = require('express-promise-router');
5
+ var chat = require('./chat.cjs.js');
6
+ var models = require('./models.cjs.js');
7
+ var rootHttpRouter = require('@backstage/backend-defaults/rootHttpRouter');
8
+
9
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
10
+
11
+ var express__default = /*#__PURE__*/_interopDefaultCompat(express);
12
+ var Router__default = /*#__PURE__*/_interopDefaultCompat(Router);
13
+
14
+ async function createRouter(options) {
15
+ const router = Router__default.default();
16
+ router.use(express__default.default.json());
17
+ router.use("/chat", await chat.createChatRouter(options));
18
+ router.use("/models", await models.createModelRouter(options));
19
+ const middleware = rootHttpRouter.MiddlewareFactory.create(options);
20
+ router.use(middleware.error());
21
+ return router;
22
+ }
23
+
24
+ exports.createRouter = createRouter;
25
+ //# sourceMappingURL=index.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs.js","sources":["../../../src/services/router/index.ts"],"sourcesContent":["import express from 'express';\nimport Router from 'express-promise-router';\nimport { createChatRouter, ChatRouterOptions } from './chat';\nimport { createModelRouter } from './models';\nimport {\n LoggerService,\n RootConfigService,\n} from '@backstage/backend-plugin-api';\nimport { MiddlewareFactory } from '@backstage/backend-defaults/rootHttpRouter';\n\nexport type RouterOptions = ChatRouterOptions & {\n config: RootConfigService;\n logger: LoggerService;\n};\n\nexport async function createRouter(\n options: RouterOptions,\n): Promise<express.Router> {\n const router = Router();\n router.use(express.json());\n\n router.use('/chat', await createChatRouter(options));\n router.use('/models', await createModelRouter(options));\n\n const middleware = MiddlewareFactory.create(options);\n\n router.use(middleware.error());\n\n return router;\n}\n"],"names":["Router","express","createChatRouter","createModelRouter","MiddlewareFactory"],"mappings":";;;;;;;;;;;;;AAeA,eAAsB,aACpB,OAAA,EACyB;AACzB,EAAA,MAAM,SAASA,uBAAA,EAAO;AACtB,EAAA,MAAA,CAAO,GAAA,CAAIC,wBAAA,CAAQ,IAAA,EAAM,CAAA;AAEzB,EAAA,MAAA,CAAO,GAAA,CAAI,OAAA,EAAS,MAAMC,qBAAA,CAAiB,OAAO,CAAC,CAAA;AACnD,EAAA,MAAA,CAAO,GAAA,CAAI,SAAA,EAAW,MAAMC,wBAAA,CAAkB,OAAO,CAAC,CAAA;AAEtD,EAAA,MAAM,UAAA,GAAaC,gCAAA,CAAkB,MAAA,CAAO,OAAO,CAAA;AAEnD,EAAA,MAAA,CAAO,GAAA,CAAI,UAAA,CAAW,KAAA,EAAO,CAAA;AAE7B,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -0,0 +1,19 @@
1
+ 'use strict';
2
+
3
+ const validation = (schema, key) => {
4
+ return (req, res, next) => {
5
+ const parsed = schema.safeParse(req[key]);
6
+ if (!parsed.success) {
7
+ const errors = parsed.error.issues.map(
8
+ (issue) => `Validation Error:Field ${issue.path.join(".")} - ${issue.message}`
9
+ );
10
+ res.status(400).send({ errors });
11
+ return;
12
+ }
13
+ req[key] = parsed.data;
14
+ next();
15
+ };
16
+ };
17
+
18
+ exports.validation = validation;
19
+ //# sourceMappingURL=validation.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"validation.cjs.js","sources":["../../../../src/services/router/middleware/validation.ts"],"sourcesContent":["import express from 'express';\nimport { AnyZodObject, ZodEffects } from 'zod';\n\ntype ValidationKey = 'body' | 'query' | 'params' | 'headers';\n\nexport const validation = (\n schema: AnyZodObject | ZodEffects<AnyZodObject>,\n key: ValidationKey,\n) => {\n return (\n req: express.Request,\n res: express.Response,\n next: express.NextFunction,\n ) => {\n const parsed = schema.safeParse(req[key]);\n if (!parsed.success) {\n const errors = parsed.error.issues.map(\n issue =>\n `Validation Error:Field ${issue.path.join('.')} - ${issue.message}`,\n );\n res.status(400).send({ errors });\n return;\n }\n req[key] = parsed.data;\n next();\n };\n};\n"],"names":[],"mappings":";;AAKO,MAAM,UAAA,GAAa,CACxB,MAAA,EACA,GAAA,KACG;AACH,EAAA,OAAO,CACL,GAAA,EACA,GAAA,EACA,IAAA,KACG;AACH,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,SAAA,CAAU,GAAA,CAAI,GAAG,CAAC,CAAA;AACxC,IAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,MAAA,MAAM,MAAA,GAAS,MAAA,CAAO,KAAA,CAAM,MAAA,CAAO,GAAA;AAAA,QACjC,CAAA,KAAA,KACE,0BAA0B,KAAA,CAAM,IAAA,CAAK,KAAK,GAAG,CAAC,CAAA,GAAA,EAAM,KAAA,CAAM,OAAO,CAAA;AAAA,OACrE;AACA,MAAA,GAAA,CAAI,OAAO,GAAG,CAAA,CAAE,IAAA,CAAK,EAAE,QAAQ,CAAA;AAC/B,MAAA;AAAA,IACF;AACA,IAAA,GAAA,CAAI,GAAG,IAAI,MAAA,CAAO,IAAA;AAClB,IAAA,IAAA,EAAK;AAAA,EACP,CAAA;AACF;;;;"}
@@ -0,0 +1,23 @@
1
+ 'use strict';
2
+
3
+ var express = require('express');
4
+ var Router = require('express-promise-router');
5
+
6
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
7
+
8
+ var express__default = /*#__PURE__*/_interopDefaultCompat(express);
9
+ var Router__default = /*#__PURE__*/_interopDefaultCompat(Router);
10
+
11
+ async function createModelRouter(options) {
12
+ const { chat } = options;
13
+ const router = Router__default.default();
14
+ router.use(express__default.default.json());
15
+ router.get("/", async (_req, res) => {
16
+ const models = await chat.getAvailableModels();
17
+ res.json({ models });
18
+ });
19
+ return router;
20
+ }
21
+
22
+ exports.createModelRouter = createModelRouter;
23
+ //# sourceMappingURL=models.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"models.cjs.js","sources":["../../../src/services/router/models.ts"],"sourcesContent":["import express from 'express';\nimport Router from 'express-promise-router';\nimport { ChatService } from '../chat';\n\nexport type ModelRouterOptions = {\n chat: ChatService;\n};\n\nexport async function createModelRouter(\n options: ModelRouterOptions,\n): Promise<express.Router> {\n const { chat } = options;\n const router = Router();\n router.use(express.json());\n\n router.get('/', async (_req, res) => {\n const models = await chat.getAvailableModels();\n res.json({ models });\n });\n\n return router;\n}\n"],"names":["Router","express"],"mappings":";;;;;;;;;;AAQA,eAAsB,kBACpB,OAAA,EACyB;AACzB,EAAA,MAAM,EAAE,MAAK,GAAI,OAAA;AACjB,EAAA,MAAM,SAASA,uBAAA,EAAO;AACtB,EAAA,MAAA,CAAO,GAAA,CAAIC,wBAAA,CAAQ,IAAA,EAAM,CAAA;AAEzB,EAAA,MAAA,CAAO,GAAA,CAAI,GAAA,EAAK,OAAO,IAAA,EAAM,GAAA,KAAQ;AACnC,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,kBAAA,EAAmB;AAC7C,IAAA,GAAA,CAAI,IAAA,CAAK,EAAE,MAAA,EAAQ,CAAA;AAAA,EACrB,CAAC,CAAA;AAED,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -0,0 +1,47 @@
1
+ const TABLE_NAME = 'embeddings';
2
+
3
+ /**
4
+ *
5
+ * @param {import('knex').knex} knex
6
+ */
7
+
8
+ exports.down = async knex => {
9
+ await knex.schema.dropTable('embeddings');
10
+ await knex.raw('drop extension if exists "uuid-ossp"');
11
+ await knex.raw('drop extension if exists "vector"');
12
+ };
13
+
14
+ /**
15
+ *
16
+ * @param {import('knex').knex} knex
17
+ */
18
+
19
+ exports.up = async knex => {
20
+ await knex.raw('create extension if not exists "uuid-ossp"');
21
+ await knex.raw('create extension if not exists "vector"');
22
+ await knex.schema.createTable(TABLE_NAME, table => {
23
+ table.comment(
24
+ 'Stores embeddings of documents from the system to be used as RAG AI injectables. ',
25
+ );
26
+ table
27
+ .uuid('id')
28
+ .notNullable()
29
+ .primary()
30
+ .defaultTo(knex.raw('uuid_generate_v4()'))
31
+ .comment('UUID of the embedding');
32
+ table
33
+ .text('content')
34
+ .notNullable()
35
+ .comment('Actual content of the embedding. Chunks of text/data');
36
+ table
37
+ .jsonb('metadata')
38
+ .notNullable()
39
+ .comment(
40
+ 'Metadata of the embedding. Information like entityRef etc. that can be used to identify links to other parts of the system.',
41
+ );
42
+ });
43
+ await knex.schema.raw(`ALTER TABLE ${TABLE_NAME}
44
+ ADD vector vector NOT NULL ; `);
45
+ await knex.schema.raw(`COMMENT ON COLUMN ${TABLE_NAME}.vector
46
+ IS 'Vector weights of the related content.';`);
47
+ };
@@ -0,0 +1,44 @@
1
+ const TABLE_NAME = 'conversation';
2
+
3
+ /**
4
+ *
5
+ * @param {import('knex').knex} knex
6
+ */
7
+ exports.down = async knex => {
8
+ await knex.schema.dropTable(TABLE_NAME);
9
+ };
10
+
11
+ /**
12
+ *
13
+ * @param {import('knex').knex} knex
14
+ */
15
+ exports.up = async knex => {
16
+ await knex.schema.createTable(TABLE_NAME, table => {
17
+ table.comment(
18
+ 'Stores chat history for conversations with the AI assistant.',
19
+ );
20
+ table
21
+ .uuid('id')
22
+ .notNullable()
23
+ .primary()
24
+ .comment('UUID of the chat message');
25
+ table
26
+ .text('conversation_id')
27
+ .notNullable()
28
+ .comment('Identifier for the conversation this message belongs to');
29
+ table
30
+ .text('role')
31
+ .notNullable()
32
+ .comment("Role of the message sender, e.g., 'user' or 'assistant'");
33
+ table.text('content').notNullable().comment('Content of the chat message');
34
+ table
35
+ .text('userRef')
36
+ .notNullable()
37
+ .comment('Reference to the user who sent the message');
38
+ table
39
+ .timestamp('created_at')
40
+ .notNullable()
41
+ .defaultTo(knex.fn.now())
42
+ .comment('Timestamp when the message was created');
43
+ });
44
+ };
package/package.json ADDED
@@ -0,0 +1,66 @@
1
+ {
2
+ "name": "@sweetoburrito/backstage-plugin-ai-assistant-backend",
3
+ "version": "0.2.0",
4
+ "license": "Apache-2.0",
5
+ "main": "dist/index.cjs.js",
6
+ "types": "dist/index.d.ts",
7
+ "publishConfig": {
8
+ "access": "public",
9
+ "main": "dist/index.cjs.js",
10
+ "types": "dist/index.d.ts"
11
+ },
12
+ "backstage": {
13
+ "role": "backend-plugin",
14
+ "pluginId": "ai-assistant",
15
+ "pluginPackages": [
16
+ "@sweetoburrito/plugin-ai-assistant-backend"
17
+ ],
18
+ "features": {
19
+ ".": "@backstage/BackendFeature"
20
+ }
21
+ },
22
+ "scripts": {
23
+ "start": "backstage-cli package start",
24
+ "build": "backstage-cli package build",
25
+ "lint": "backstage-cli package lint",
26
+ "test": "backstage-cli package test",
27
+ "clean": "backstage-cli package clean",
28
+ "prepack": "backstage-cli package prepack",
29
+ "postpack": "backstage-cli package postpack"
30
+ },
31
+ "dependencies": {
32
+ "@backstage/backend-defaults": "backstage:^",
33
+ "@backstage/backend-plugin-api": "backstage:^",
34
+ "@backstage/catalog-client": "backstage:^",
35
+ "@backstage/errors": "backstage:^",
36
+ "@langchain/core": "^0.3.72",
37
+ "@langchain/textsplitters": "^0.1.0",
38
+ "@sweetoburrito/backstage-plugin-ai-assistant-node": "workspace:^",
39
+ "express": "^4.17.1",
40
+ "express-promise-router": "^4.1.0",
41
+ "knex": "^3.1.0",
42
+ "uuid": "^11.1.0",
43
+ "zod": "^3.22.4"
44
+ },
45
+ "devDependencies": {
46
+ "@backstage/backend-test-utils": "backstage:^",
47
+ "@backstage/cli": "backstage:^",
48
+ "@backstage/types": "backstage:^",
49
+ "@types/express": "^4.0.0",
50
+ "@types/supertest": "^2.0.12",
51
+ "supertest": "^6.2.4"
52
+ },
53
+ "configSchema": "config.d.ts",
54
+ "files": [
55
+ "dist",
56
+ "migrations",
57
+ "config.d.ts"
58
+ ],
59
+ "typesVersions": {
60
+ "*": {
61
+ "package.json": [
62
+ "package.json"
63
+ ]
64
+ }
65
+ }
66
+ }