@exulu/backend 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +71 -0
- package/dist/index.cjs +3763 -0
- package/dist/index.d.cts +450 -0
- package/dist/index.d.ts +450 -0
- package/dist/index.js +3721 -0
- package/git-conventional-commits.yaml +43 -0
- package/license.md +81 -0
- package/package.json +82 -0
- package/types/enums/field-types.ts +1 -0
- package/types/enums/statistics.ts +13 -0
- package/types/models/agent-backend.ts +15 -0
- package/types/models/agent-session.ts +17 -0
- package/types/models/agent.ts +23 -0
- package/types/models/context.ts +35 -0
- package/types/models/embedder-backend.ts +15 -0
- package/types/models/embedding.ts +17 -0
- package/types/models/item.ts +21 -0
- package/types/models/job.ts +8 -0
- package/types/models/tool.ts +8 -0
- package/types/models/user-role.ts +6 -0
- package/types/models/user.ts +10 -0
- package/types/models/vector-methods.ts +10 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,3763 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
EXULU_STATISTICS_TYPE_ENUM: () => STATISTICS_TYPE_ENUM,
|
|
34
|
+
ExuluAgent: () => ExuluAgent,
|
|
35
|
+
ExuluApp: () => ExuluApp,
|
|
36
|
+
ExuluAuthentication: () => authentication,
|
|
37
|
+
ExuluContext: () => ExuluContext,
|
|
38
|
+
ExuluDatabase: () => ExuluDatabase,
|
|
39
|
+
ExuluEmbedder: () => ExuluEmbedder,
|
|
40
|
+
ExuluJobs: () => ExuluJobs,
|
|
41
|
+
ExuluQueues: () => queues,
|
|
42
|
+
ExuluSource: () => ExuluSource,
|
|
43
|
+
ExuluTool: () => ExuluTool,
|
|
44
|
+
ExuluWorkflow: () => ExuluWorkflow,
|
|
45
|
+
ExuluZodFileType: () => ExuluZodFileType
|
|
46
|
+
});
|
|
47
|
+
module.exports = __toCommonJS(index_exports);
|
|
48
|
+
|
|
49
|
+
// src/redis/client.ts
|
|
50
|
+
var import_redis = require("redis");
|
|
51
|
+
|
|
52
|
+
// src/bullmq/server.ts
|
|
53
|
+
var redisServer = {
|
|
54
|
+
host: `${process.env.REDIS_HOST}`,
|
|
55
|
+
port: process.env.REDIS_PORT
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
// src/redis/client.ts
|
|
59
|
+
var redisClient = null;
|
|
60
|
+
(async () => {
|
|
61
|
+
if (!redisClient) {
|
|
62
|
+
const url = `redis://${redisServer.host}:${redisServer.port}`;
|
|
63
|
+
console.log(`[EXULU] connecting to redis.`);
|
|
64
|
+
redisClient = (0, import_redis.createClient)({
|
|
65
|
+
// todo add password
|
|
66
|
+
url
|
|
67
|
+
});
|
|
68
|
+
await redisClient.connect();
|
|
69
|
+
}
|
|
70
|
+
})();
|
|
71
|
+
|
|
72
|
+
// src/bullmq/validators.ts
|
|
73
|
+
var validateJob = (job) => {
|
|
74
|
+
if (!job.data) {
|
|
75
|
+
throw new Error(`Missing job data for job ${job.id}.`);
|
|
76
|
+
}
|
|
77
|
+
if (!job.data.type) {
|
|
78
|
+
throw new Error(`Missing property "type" in data for job ${job.id}.`);
|
|
79
|
+
}
|
|
80
|
+
if (!job.data.function) {
|
|
81
|
+
throw new Error(`Missing property "function" in data for job ${job.id}.`);
|
|
82
|
+
}
|
|
83
|
+
if (!job.data.inputs) {
|
|
84
|
+
throw new Error(`Missing property "inputs" in data for job ${job.id}.`);
|
|
85
|
+
}
|
|
86
|
+
if (job.data.type !== "embedder" && job.data.type !== "workflow") {
|
|
87
|
+
throw new Error(`Property "type" in data for job ${job.id} must be of value "embedder" or "agent".`);
|
|
88
|
+
}
|
|
89
|
+
if (job.data.type === "workflow" && job.data.function !== "execute") {
|
|
90
|
+
throw new Error(`Property "function" in data for job ${job.id} must be of value "execute" when using type "workflow".`);
|
|
91
|
+
}
|
|
92
|
+
if (job.data.type === "embedder" && job.data.function !== "upsert" && job.data.function !== "delete" && job.data.function !== "retrieve") {
|
|
93
|
+
throw new Error(`Property "function" in data for job ${job.id} must be of value "upsert", "delete" or "retrieve" when using type "embedder".`);
|
|
94
|
+
}
|
|
95
|
+
if (!job.data.id) {
|
|
96
|
+
throw new Error(`Property "id" in data for job ${job.id} missing.`);
|
|
97
|
+
}
|
|
98
|
+
return job;
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
// src/postgres/client.ts
|
|
102
|
+
var import_knex = __toESM(require("knex"), 1);
|
|
103
|
+
var import_knex2 = require("knex");
|
|
104
|
+
var import_knex3 = require("pgvector/knex");
|
|
105
|
+
var db = {};
|
|
106
|
+
async function postgresClient() {
|
|
107
|
+
if (!db["exulu"]) {
|
|
108
|
+
const knex = (0, import_knex.default)({
|
|
109
|
+
client: "pg",
|
|
110
|
+
connection: {
|
|
111
|
+
host: process.env.POSTGRES_DB_HOST,
|
|
112
|
+
port: parseInt(process.env.POSTGRES_DB_PORT || "5432"),
|
|
113
|
+
user: process.env.POSTGRES_DB_USER,
|
|
114
|
+
database: "exulu",
|
|
115
|
+
password: process.env.POSTGRES_DB_PASSWORD,
|
|
116
|
+
ssl: process.env.POSTGRES_DB_SSL === "true" ? { rejectUnauthorized: false } : false
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
await knex.schema.createExtensionIfNotExists("vector");
|
|
120
|
+
db["exulu"] = knex;
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
db: db["exulu"]
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// src/postgres/core-schema.ts
|
|
128
|
+
var usersSchema = {
|
|
129
|
+
name: {
|
|
130
|
+
plural: "users",
|
|
131
|
+
singular: "user"
|
|
132
|
+
},
|
|
133
|
+
fields: [
|
|
134
|
+
{
|
|
135
|
+
name: "firstname",
|
|
136
|
+
type: "text"
|
|
137
|
+
},
|
|
138
|
+
{
|
|
139
|
+
name: "lastname",
|
|
140
|
+
type: "text"
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
name: "email",
|
|
144
|
+
type: "text",
|
|
145
|
+
index: true
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
name: "temporary_token",
|
|
149
|
+
type: "text"
|
|
150
|
+
},
|
|
151
|
+
{
|
|
152
|
+
name: "type",
|
|
153
|
+
type: "text",
|
|
154
|
+
index: true
|
|
155
|
+
},
|
|
156
|
+
{
|
|
157
|
+
name: "profile_image",
|
|
158
|
+
type: "text"
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
name: "super_admin",
|
|
162
|
+
type: "boolean",
|
|
163
|
+
default: false
|
|
164
|
+
},
|
|
165
|
+
{
|
|
166
|
+
name: "status",
|
|
167
|
+
type: "text"
|
|
168
|
+
},
|
|
169
|
+
{
|
|
170
|
+
name: "emailVerified",
|
|
171
|
+
type: "text"
|
|
172
|
+
},
|
|
173
|
+
{
|
|
174
|
+
name: "apikey",
|
|
175
|
+
type: "text"
|
|
176
|
+
},
|
|
177
|
+
{
|
|
178
|
+
name: "role",
|
|
179
|
+
type: "reference",
|
|
180
|
+
references: {
|
|
181
|
+
table: "roles",
|
|
182
|
+
field: "id",
|
|
183
|
+
onDelete: "CASCADE"
|
|
184
|
+
}
|
|
185
|
+
},
|
|
186
|
+
{
|
|
187
|
+
name: "last_used",
|
|
188
|
+
type: "date"
|
|
189
|
+
}
|
|
190
|
+
]
|
|
191
|
+
};
|
|
192
|
+
var rolesSchema = {
|
|
193
|
+
name: {
|
|
194
|
+
plural: "roles",
|
|
195
|
+
singular: "role"
|
|
196
|
+
},
|
|
197
|
+
fields: [
|
|
198
|
+
{
|
|
199
|
+
name: "name",
|
|
200
|
+
type: "text"
|
|
201
|
+
},
|
|
202
|
+
{
|
|
203
|
+
name: "is_admin",
|
|
204
|
+
type: "boolean",
|
|
205
|
+
default: false
|
|
206
|
+
},
|
|
207
|
+
{
|
|
208
|
+
name: "agents",
|
|
209
|
+
type: "json"
|
|
210
|
+
}
|
|
211
|
+
]
|
|
212
|
+
};
|
|
213
|
+
var statisticsSchema = {
|
|
214
|
+
name: {
|
|
215
|
+
plural: "statistics",
|
|
216
|
+
singular: "statistic"
|
|
217
|
+
},
|
|
218
|
+
fields: [
|
|
219
|
+
{
|
|
220
|
+
name: "name",
|
|
221
|
+
type: "text"
|
|
222
|
+
},
|
|
223
|
+
{
|
|
224
|
+
name: "label",
|
|
225
|
+
type: "text"
|
|
226
|
+
},
|
|
227
|
+
{
|
|
228
|
+
name: "type",
|
|
229
|
+
type: "text"
|
|
230
|
+
},
|
|
231
|
+
{
|
|
232
|
+
name: "total",
|
|
233
|
+
type: "number"
|
|
234
|
+
},
|
|
235
|
+
{
|
|
236
|
+
name: "timeseries",
|
|
237
|
+
type: "json"
|
|
238
|
+
}
|
|
239
|
+
]
|
|
240
|
+
};
|
|
241
|
+
var jobsSchema = {
|
|
242
|
+
name: {
|
|
243
|
+
plural: "jobs",
|
|
244
|
+
singular: "job"
|
|
245
|
+
},
|
|
246
|
+
fields: [
|
|
247
|
+
{
|
|
248
|
+
name: "redis",
|
|
249
|
+
type: "text"
|
|
250
|
+
},
|
|
251
|
+
{
|
|
252
|
+
name: "session",
|
|
253
|
+
type: "text"
|
|
254
|
+
},
|
|
255
|
+
{
|
|
256
|
+
name: "status",
|
|
257
|
+
type: "text"
|
|
258
|
+
},
|
|
259
|
+
{
|
|
260
|
+
name: "type",
|
|
261
|
+
type: "text"
|
|
262
|
+
},
|
|
263
|
+
{
|
|
264
|
+
name: "result",
|
|
265
|
+
type: "text"
|
|
266
|
+
},
|
|
267
|
+
{
|
|
268
|
+
name: "name",
|
|
269
|
+
type: "text"
|
|
270
|
+
},
|
|
271
|
+
{
|
|
272
|
+
name: "agent",
|
|
273
|
+
type: "text"
|
|
274
|
+
},
|
|
275
|
+
{
|
|
276
|
+
name: "user",
|
|
277
|
+
type: "text"
|
|
278
|
+
},
|
|
279
|
+
{
|
|
280
|
+
name: "item",
|
|
281
|
+
type: "text"
|
|
282
|
+
},
|
|
283
|
+
{
|
|
284
|
+
name: "inputs",
|
|
285
|
+
type: "json"
|
|
286
|
+
},
|
|
287
|
+
{
|
|
288
|
+
name: "finished_at",
|
|
289
|
+
type: "date"
|
|
290
|
+
},
|
|
291
|
+
{
|
|
292
|
+
name: "duration",
|
|
293
|
+
type: "number"
|
|
294
|
+
}
|
|
295
|
+
]
|
|
296
|
+
};
|
|
297
|
+
var agentsSchema = {
|
|
298
|
+
name: {
|
|
299
|
+
plural: "agents",
|
|
300
|
+
singular: "agent"
|
|
301
|
+
},
|
|
302
|
+
fields: [
|
|
303
|
+
{
|
|
304
|
+
name: "name",
|
|
305
|
+
type: "text"
|
|
306
|
+
},
|
|
307
|
+
{
|
|
308
|
+
name: "description",
|
|
309
|
+
type: "text"
|
|
310
|
+
},
|
|
311
|
+
{
|
|
312
|
+
name: "extensions",
|
|
313
|
+
type: "json"
|
|
314
|
+
},
|
|
315
|
+
{
|
|
316
|
+
name: "backend",
|
|
317
|
+
type: "text"
|
|
318
|
+
},
|
|
319
|
+
{
|
|
320
|
+
name: "type",
|
|
321
|
+
type: "text"
|
|
322
|
+
},
|
|
323
|
+
{
|
|
324
|
+
name: "active",
|
|
325
|
+
type: "boolean",
|
|
326
|
+
default: false
|
|
327
|
+
},
|
|
328
|
+
{
|
|
329
|
+
name: "public",
|
|
330
|
+
type: "boolean",
|
|
331
|
+
default: false
|
|
332
|
+
},
|
|
333
|
+
{
|
|
334
|
+
name: "tools",
|
|
335
|
+
type: "json"
|
|
336
|
+
}
|
|
337
|
+
]
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
// src/registry/utils/map-types.ts
|
|
341
|
+
var mapType = (t, type, name, defaultValue) => {
|
|
342
|
+
if (type === "text") {
|
|
343
|
+
t.string(name, 255);
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
if (type === "longText") {
|
|
347
|
+
t.text(name);
|
|
348
|
+
return;
|
|
349
|
+
}
|
|
350
|
+
if (type === "shortText") {
|
|
351
|
+
t.string(name, 100);
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
if (type === "number") {
|
|
355
|
+
t.float(name);
|
|
356
|
+
return;
|
|
357
|
+
}
|
|
358
|
+
if (type === "boolean") {
|
|
359
|
+
t.boolean(name).defaultTo(defaultValue || false);
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
if (type === "code") {
|
|
363
|
+
t.text(name);
|
|
364
|
+
return;
|
|
365
|
+
}
|
|
366
|
+
if (type === "json") {
|
|
367
|
+
t.jsonb(name);
|
|
368
|
+
return;
|
|
369
|
+
}
|
|
370
|
+
if (type === "date") {
|
|
371
|
+
t.date(name);
|
|
372
|
+
return;
|
|
373
|
+
}
|
|
374
|
+
if (type === "uuid") {
|
|
375
|
+
t.uuid(name);
|
|
376
|
+
return;
|
|
377
|
+
}
|
|
378
|
+
throw new Error("Invalid type: " + type);
|
|
379
|
+
};
|
|
380
|
+
|
|
381
|
+
// src/registry/utils/sanitize-name.ts
|
|
382
|
+
var sanitizeName = (name) => {
|
|
383
|
+
return name.toLowerCase().replace(/ /g, "_");
|
|
384
|
+
};
|
|
385
|
+
|
|
386
|
+
// src/postgres/init-db.ts
|
|
387
|
+
var up = async function(knex) {
|
|
388
|
+
if (!await knex.schema.hasTable("roles")) {
|
|
389
|
+
await knex.schema.createTable("roles", (table) => {
|
|
390
|
+
table.uuid("id").primary().defaultTo(knex.fn.uuid());
|
|
391
|
+
table.date("createdAt").defaultTo(knex.fn.now());
|
|
392
|
+
table.date("updatedAt").defaultTo(knex.fn.now());
|
|
393
|
+
for (const field of rolesSchema.fields) {
|
|
394
|
+
const { type, name, references, default: defaultValue } = field;
|
|
395
|
+
if (!type || !name) {
|
|
396
|
+
continue;
|
|
397
|
+
}
|
|
398
|
+
if (type === "reference") {
|
|
399
|
+
if (!references) {
|
|
400
|
+
throw new Error("Field with type reference must have a reference definition.");
|
|
401
|
+
}
|
|
402
|
+
table.uuid(name).references(references.field).inTable(references.table);
|
|
403
|
+
return;
|
|
404
|
+
}
|
|
405
|
+
mapType(table, type, sanitizeName(name), defaultValue);
|
|
406
|
+
}
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
if (!await knex.schema.hasTable("statistics")) {
|
|
410
|
+
await knex.schema.createTable("statistics", (table) => {
|
|
411
|
+
table.uuid("id").primary().defaultTo(knex.fn.uuid());
|
|
412
|
+
table.date("createdAt").defaultTo(knex.fn.now());
|
|
413
|
+
table.date("updatedAt").defaultTo(knex.fn.now());
|
|
414
|
+
for (const field of statisticsSchema.fields) {
|
|
415
|
+
const { type, name, references, default: defaultValue } = field;
|
|
416
|
+
if (!type || !name) {
|
|
417
|
+
continue;
|
|
418
|
+
}
|
|
419
|
+
if (type === "reference") {
|
|
420
|
+
if (!references) {
|
|
421
|
+
throw new Error("Field with type reference must have a reference definition.");
|
|
422
|
+
}
|
|
423
|
+
table.uuid(name).references(references.field).inTable(references.table);
|
|
424
|
+
return;
|
|
425
|
+
}
|
|
426
|
+
mapType(table, type, sanitizeName(name), defaultValue);
|
|
427
|
+
}
|
|
428
|
+
});
|
|
429
|
+
}
|
|
430
|
+
if (!await knex.schema.hasTable("jobs")) {
|
|
431
|
+
await knex.schema.createTable("jobs", (table) => {
|
|
432
|
+
table.increments("id").primary();
|
|
433
|
+
table.date("createdAt").defaultTo(knex.fn.now());
|
|
434
|
+
table.date("updatedAt").defaultTo(knex.fn.now());
|
|
435
|
+
for (const field of jobsSchema.fields) {
|
|
436
|
+
const { type, name, references, default: defaultValue } = field;
|
|
437
|
+
if (!type || !name) {
|
|
438
|
+
continue;
|
|
439
|
+
}
|
|
440
|
+
if (type === "reference") {
|
|
441
|
+
if (!references) {
|
|
442
|
+
throw new Error("Field with type reference must have a reference definition.");
|
|
443
|
+
}
|
|
444
|
+
table.uuid(name).references(references.field).inTable(references.table);
|
|
445
|
+
return;
|
|
446
|
+
}
|
|
447
|
+
mapType(table, type, sanitizeName(name), defaultValue);
|
|
448
|
+
}
|
|
449
|
+
});
|
|
450
|
+
}
|
|
451
|
+
if (!await knex.schema.hasTable("agents")) {
|
|
452
|
+
await knex.schema.createTable("agents", (table) => {
|
|
453
|
+
table.increments("id").primary();
|
|
454
|
+
table.date("createdAt").defaultTo(knex.fn.now());
|
|
455
|
+
table.date("updatedAt").defaultTo(knex.fn.now());
|
|
456
|
+
for (const field of agentsSchema.fields) {
|
|
457
|
+
const { type, name, references, default: defaultValue } = field;
|
|
458
|
+
if (!type || !name) {
|
|
459
|
+
continue;
|
|
460
|
+
}
|
|
461
|
+
if (type === "reference") {
|
|
462
|
+
if (!references) {
|
|
463
|
+
throw new Error("Field with type reference must have a reference definition.");
|
|
464
|
+
}
|
|
465
|
+
table.uuid(name).references(references.field).inTable(references.table);
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
mapType(table, type, sanitizeName(name), defaultValue);
|
|
469
|
+
}
|
|
470
|
+
});
|
|
471
|
+
}
|
|
472
|
+
if (!await knex.schema.hasTable("verification_token")) {
|
|
473
|
+
await knex.schema.createTable("verification_token", (table) => {
|
|
474
|
+
table.text("identifier").notNullable();
|
|
475
|
+
table.timestamp("expires", { useTz: true }).notNullable();
|
|
476
|
+
table.text("token").notNullable();
|
|
477
|
+
table.primary(["identifier", "token"]);
|
|
478
|
+
});
|
|
479
|
+
}
|
|
480
|
+
if (!await knex.schema.hasTable("users")) {
|
|
481
|
+
await knex.schema.createTable("users", (table) => {
|
|
482
|
+
table.increments("id").primary();
|
|
483
|
+
table.date("createdAt").defaultTo(knex.fn.now());
|
|
484
|
+
table.date("updatedAt").defaultTo(knex.fn.now());
|
|
485
|
+
table.string("name", 255);
|
|
486
|
+
table.string("password", 255);
|
|
487
|
+
table.string("email", 255);
|
|
488
|
+
table.timestamp("emailVerified", { useTz: true });
|
|
489
|
+
table.text("image");
|
|
490
|
+
for (const field of usersSchema.fields) {
|
|
491
|
+
const { type, name, references, default: defaultValue } = field;
|
|
492
|
+
if (name === "id" || name === "name" || name === "email" || name === "emailVerified" || name === "image") {
|
|
493
|
+
continue;
|
|
494
|
+
}
|
|
495
|
+
if (!type || !name) {
|
|
496
|
+
continue;
|
|
497
|
+
}
|
|
498
|
+
if (type === "reference") {
|
|
499
|
+
if (!references) {
|
|
500
|
+
throw new Error("Field with type reference must have a reference definition.");
|
|
501
|
+
}
|
|
502
|
+
table.uuid(name).references(references.field).inTable(references.table);
|
|
503
|
+
return;
|
|
504
|
+
}
|
|
505
|
+
mapType(table, type, sanitizeName(name), defaultValue);
|
|
506
|
+
}
|
|
507
|
+
});
|
|
508
|
+
}
|
|
509
|
+
if (!await knex.schema.hasTable("accounts")) {
|
|
510
|
+
await knex.schema.createTable("accounts", (table) => {
|
|
511
|
+
table.increments("id").primary();
|
|
512
|
+
table.integer("userId").notNullable();
|
|
513
|
+
table.string("type", 255).notNullable();
|
|
514
|
+
table.string("provider", 255).notNullable();
|
|
515
|
+
table.string("providerAccountId", 255).notNullable();
|
|
516
|
+
table.text("refresh_token");
|
|
517
|
+
table.text("access_token");
|
|
518
|
+
table.bigInteger("expires_at");
|
|
519
|
+
table.text("id_token");
|
|
520
|
+
table.text("scope");
|
|
521
|
+
table.text("session_state");
|
|
522
|
+
table.text("token_type");
|
|
523
|
+
});
|
|
524
|
+
}
|
|
525
|
+
if (!await knex.schema.hasTable("sessions")) {
|
|
526
|
+
await knex.schema.createTable("sessions", (table) => {
|
|
527
|
+
table.increments("id").primary();
|
|
528
|
+
table.integer("userId").notNullable();
|
|
529
|
+
table.timestamp("expires", { useTz: true }).notNullable();
|
|
530
|
+
table.string("sessionToken", 255).notNullable();
|
|
531
|
+
});
|
|
532
|
+
}
|
|
533
|
+
};
|
|
534
|
+
var execute = async () => {
|
|
535
|
+
console.log("[EXULU] Initializing database.");
|
|
536
|
+
const { db: db2 } = await postgresClient();
|
|
537
|
+
await up(db2);
|
|
538
|
+
console.log("[EXULU] Inserting default user and admin role.");
|
|
539
|
+
const existingRole = await db2.from("roles").where({ name: "admin" }).first();
|
|
540
|
+
let roleId;
|
|
541
|
+
if (!existingRole) {
|
|
542
|
+
console.log("[EXULU] Creating default admin role.");
|
|
543
|
+
const role = await db2.from("roles").insert({
|
|
544
|
+
name: "admin",
|
|
545
|
+
is_admin: true,
|
|
546
|
+
agents: []
|
|
547
|
+
}).returning("id");
|
|
548
|
+
roleId = role[0].id;
|
|
549
|
+
} else {
|
|
550
|
+
roleId = existingRole.id;
|
|
551
|
+
}
|
|
552
|
+
const existingUser = await db2.from("users").where({ email: "admin@exulu.com" }).first();
|
|
553
|
+
if (!existingUser) {
|
|
554
|
+
console.log("[EXULU] Creating default admin user.");
|
|
555
|
+
await db2.from("users").insert({
|
|
556
|
+
name: "exulu",
|
|
557
|
+
email: "admin@exulu.com",
|
|
558
|
+
super_admin: true,
|
|
559
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
560
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
561
|
+
// password: "admin", todo add this again when we implement password auth / encryption as alternative to OTP
|
|
562
|
+
role: roleId
|
|
563
|
+
});
|
|
564
|
+
}
|
|
565
|
+
console.log("[EXULU] Database initialized.");
|
|
566
|
+
return;
|
|
567
|
+
};
|
|
568
|
+
|
|
569
|
+
// src/registry/classes.ts
|
|
570
|
+
var import_zod = require("zod");
|
|
571
|
+
var import_bullmq2 = require("bullmq");
|
|
572
|
+
var import_core = require("@mastra/core");
|
|
573
|
+
var import_zod2 = require("zod");
|
|
574
|
+
var fs = __toESM(require("fs"), 1);
|
|
575
|
+
var path = __toESM(require("path"), 1);
|
|
576
|
+
var import_bullmq3 = require("bullmq");
|
|
577
|
+
var import_memory = require("@mastra/memory");
|
|
578
|
+
var import_pg = require("@mastra/pg");
|
|
579
|
+
|
|
580
|
+
// types/enums/statistics.ts
|
|
581
|
+
var STATISTICS_TYPE_ENUM = {
|
|
582
|
+
CONTEXT_RETRIEVE: "context.retrieve",
|
|
583
|
+
SOURCE_UPDATE: "source.update",
|
|
584
|
+
EMBEDDER_UPSERT: "embedder.upsert",
|
|
585
|
+
EMBEDDER_GENERATE: "embedder.generate",
|
|
586
|
+
EMBEDDER_DELETE: "embedder.delete",
|
|
587
|
+
WORKFLOW_RUN: "workflow.run",
|
|
588
|
+
CONTEXT_UPSERT: "context.upsert",
|
|
589
|
+
TOOL_CALL: "tool.call",
|
|
590
|
+
AGENT_RUN: "agent.run"
|
|
591
|
+
};
|
|
592
|
+
|
|
593
|
+
// src/registry/classes.ts
|
|
594
|
+
var import_knex4 = __toESM(require("pgvector/knex"), 1);
|
|
595
|
+
|
|
596
|
+
// src/registry/decoraters/bullmq.ts
|
|
597
|
+
var import_bullmq = require("bullmq");
|
|
598
|
+
var import_uuid = require("uuid");
|
|
599
|
+
var bullmqDecorator = async ({
|
|
600
|
+
label,
|
|
601
|
+
type,
|
|
602
|
+
workflow,
|
|
603
|
+
embedder,
|
|
604
|
+
inputs,
|
|
605
|
+
queue,
|
|
606
|
+
user,
|
|
607
|
+
agent,
|
|
608
|
+
session,
|
|
609
|
+
configuration,
|
|
610
|
+
updater,
|
|
611
|
+
context,
|
|
612
|
+
source,
|
|
613
|
+
documents,
|
|
614
|
+
trigger,
|
|
615
|
+
item
|
|
616
|
+
}) => {
|
|
617
|
+
const redisId = (0, import_uuid.v4)();
|
|
618
|
+
const job = await queue.add(
|
|
619
|
+
`${embedder || workflow}`,
|
|
620
|
+
{
|
|
621
|
+
type: `${type}`,
|
|
622
|
+
...embedder && { embedder },
|
|
623
|
+
...workflow && { workflow },
|
|
624
|
+
...configuration && { configuration },
|
|
625
|
+
...updater && { updater },
|
|
626
|
+
...context && { context },
|
|
627
|
+
...source && { source },
|
|
628
|
+
...documents && { documents },
|
|
629
|
+
...trigger && { trigger },
|
|
630
|
+
...item && { item },
|
|
631
|
+
agent,
|
|
632
|
+
user,
|
|
633
|
+
inputs,
|
|
634
|
+
session
|
|
635
|
+
},
|
|
636
|
+
{
|
|
637
|
+
jobId: redisId
|
|
638
|
+
}
|
|
639
|
+
);
|
|
640
|
+
const { db: db2 } = await postgresClient();
|
|
641
|
+
const now = /* @__PURE__ */ new Date();
|
|
642
|
+
console.log("[EXULU] scheduling new job", inputs);
|
|
643
|
+
const insertData = {
|
|
644
|
+
name: `${label}`,
|
|
645
|
+
redis: job.id,
|
|
646
|
+
status: "waiting",
|
|
647
|
+
type,
|
|
648
|
+
inputs,
|
|
649
|
+
agent,
|
|
650
|
+
item,
|
|
651
|
+
createdAt: now,
|
|
652
|
+
updatedAt: now,
|
|
653
|
+
user,
|
|
654
|
+
session,
|
|
655
|
+
...embedder && { embedder },
|
|
656
|
+
...workflow && { workflow },
|
|
657
|
+
...configuration && { configuration },
|
|
658
|
+
...updater && { updater },
|
|
659
|
+
...context && { context },
|
|
660
|
+
...source && { source },
|
|
661
|
+
...documents && { documents: documents.map((doc2) => doc2.id) },
|
|
662
|
+
...trigger && { trigger }
|
|
663
|
+
};
|
|
664
|
+
await db2("jobs").insert(insertData).onConflict("redis").merge({
|
|
665
|
+
...insertData,
|
|
666
|
+
updatedAt: now
|
|
667
|
+
// Only updatedAt changes on updates
|
|
668
|
+
});
|
|
669
|
+
const doc = await db2.from("jobs").where({ redis: job.id }).first();
|
|
670
|
+
if (!doc?.id) {
|
|
671
|
+
throw new Error("Failed to get job ID after insert/update");
|
|
672
|
+
}
|
|
673
|
+
console.log("[EXULU] created job", doc?.id);
|
|
674
|
+
return {
|
|
675
|
+
...job,
|
|
676
|
+
id: doc?.id,
|
|
677
|
+
redis: job.id
|
|
678
|
+
};
|
|
679
|
+
};
|
|
680
|
+
|
|
681
|
+
// src/registry/classes.ts
|
|
682
|
+
function generateSlug(name) {
|
|
683
|
+
const normalized = name.normalize("NFKD").replace(/[\u0300-\u036f]/g, "");
|
|
684
|
+
const lowercase = normalized.toLowerCase();
|
|
685
|
+
const slug = lowercase.replace(/[\W_]+/g, "-").replace(/^-+|-+$/g, "");
|
|
686
|
+
return slug;
|
|
687
|
+
}
|
|
688
|
+
var ExuluZodFileType = ({
|
|
689
|
+
name,
|
|
690
|
+
label,
|
|
691
|
+
description,
|
|
692
|
+
allowedFileTypes
|
|
693
|
+
}) => {
|
|
694
|
+
return import_zod2.z.object({
|
|
695
|
+
[`exulu_file_${name}`]: import_zod2.z.string().describe(JSON.stringify({
|
|
696
|
+
label,
|
|
697
|
+
isFile: true,
|
|
698
|
+
description,
|
|
699
|
+
allowedFileTypes
|
|
700
|
+
}))
|
|
701
|
+
});
|
|
702
|
+
};
|
|
703
|
+
var ExuluAgent = class {
|
|
704
|
+
id;
|
|
705
|
+
name;
|
|
706
|
+
description = "";
|
|
707
|
+
slug = "";
|
|
708
|
+
streaming = false;
|
|
709
|
+
type;
|
|
710
|
+
outputSchema;
|
|
711
|
+
rateLimit;
|
|
712
|
+
config;
|
|
713
|
+
memory;
|
|
714
|
+
tools;
|
|
715
|
+
capabilities;
|
|
716
|
+
constructor({ id, name, description, outputSchema, config, rateLimit, type, capabilities, tools }) {
|
|
717
|
+
this.id = id;
|
|
718
|
+
this.name = name;
|
|
719
|
+
this.type = type;
|
|
720
|
+
this.description = description;
|
|
721
|
+
this.outputSchema = outputSchema;
|
|
722
|
+
this.rateLimit = rateLimit;
|
|
723
|
+
this.tools = tools;
|
|
724
|
+
this.config = config;
|
|
725
|
+
this.capabilities = capabilities;
|
|
726
|
+
this.slug = `/agents/${generateSlug(this.name)}/run`;
|
|
727
|
+
if (config?.memory) {
|
|
728
|
+
const connectionString = `postgresql://${process.env.POSTGRES_DB_USER}:${process.env.POSTGRES_DB_PASSWORD}@${process.env.POSTGRES_DB_HOST}:${process.env.POSTGRES_DB_PORT}/exulu`;
|
|
729
|
+
this.memory = new import_memory.Memory({
|
|
730
|
+
storage: new import_pg.PostgresStore({
|
|
731
|
+
host: process.env.POSTGRES_DB_HOST || "",
|
|
732
|
+
port: parseInt(process.env.POSTGRES_DB_PORT || "5432"),
|
|
733
|
+
user: process.env.POSTGRES_DB_USER || "",
|
|
734
|
+
database: "exulu",
|
|
735
|
+
// putting it into an own database that is not managed by exulu
|
|
736
|
+
password: process.env.POSTGRES_DB_PASSWORD || "",
|
|
737
|
+
ssl: process.env.POSTGRES_DB_SSL === "true" ? { rejectUnauthorized: false } : false
|
|
738
|
+
}),
|
|
739
|
+
...config?.memory.vector ? { vector: new import_pg.PgVector(connectionString) } : {},
|
|
740
|
+
options: {
|
|
741
|
+
lastMessages: config?.memory.lastMessages || 10,
|
|
742
|
+
semanticRecall: {
|
|
743
|
+
topK: config?.memory.semanticRecall.topK || 3,
|
|
744
|
+
messageRange: config?.memory.semanticRecall.messageRange || 2
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
});
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
chat = async (id) => {
|
|
751
|
+
const { db: db2 } = await postgresClient();
|
|
752
|
+
const agent = await db2.from("agents").select("*").where("id", "=", id).first();
|
|
753
|
+
if (!agent) {
|
|
754
|
+
throw new Error("Agent not found");
|
|
755
|
+
}
|
|
756
|
+
let tools = {};
|
|
757
|
+
agent.tools?.forEach(({ name }) => {
|
|
758
|
+
const tool = this.tools?.find((t) => t.name === name);
|
|
759
|
+
if (!tool) {
|
|
760
|
+
return;
|
|
761
|
+
}
|
|
762
|
+
return tool;
|
|
763
|
+
});
|
|
764
|
+
updateStatistic({
|
|
765
|
+
name: "count",
|
|
766
|
+
label: this.name,
|
|
767
|
+
type: STATISTICS_TYPE_ENUM.AGENT_RUN,
|
|
768
|
+
trigger: "agent"
|
|
769
|
+
});
|
|
770
|
+
return new import_core.Agent({
|
|
771
|
+
name: this.config.name,
|
|
772
|
+
instructions: this.config.instructions,
|
|
773
|
+
model: this.config.model,
|
|
774
|
+
tools,
|
|
775
|
+
memory: this.memory ? this.memory : void 0
|
|
776
|
+
});
|
|
777
|
+
};
|
|
778
|
+
};
|
|
779
|
+
var ExuluEmbedder = class {
|
|
780
|
+
id;
|
|
781
|
+
name;
|
|
782
|
+
slug = "";
|
|
783
|
+
queue;
|
|
784
|
+
generateEmbeddings;
|
|
785
|
+
vectorDimensions;
|
|
786
|
+
maxChunkSize;
|
|
787
|
+
chunker;
|
|
788
|
+
constructor({ id, name, description, generateEmbeddings, queue, vectorDimensions, maxChunkSize, chunker }) {
|
|
789
|
+
this.id = id;
|
|
790
|
+
this.name = name;
|
|
791
|
+
this.vectorDimensions = vectorDimensions;
|
|
792
|
+
this.maxChunkSize = maxChunkSize;
|
|
793
|
+
this.chunker = chunker;
|
|
794
|
+
this.slug = `/embedders/${generateSlug(this.name)}/run`;
|
|
795
|
+
this.queue = queue;
|
|
796
|
+
this.generateEmbeddings = generateEmbeddings;
|
|
797
|
+
}
|
|
798
|
+
async generateFromQuery(query, statistics) {
|
|
799
|
+
if (statistics) {
|
|
800
|
+
}
|
|
801
|
+
return await this.generateEmbeddings({
|
|
802
|
+
item: {
|
|
803
|
+
id: "placeholder"
|
|
804
|
+
},
|
|
805
|
+
chunks: [{
|
|
806
|
+
content: query,
|
|
807
|
+
index: 1
|
|
808
|
+
}]
|
|
809
|
+
});
|
|
810
|
+
}
|
|
811
|
+
async generateFromDocument(input, statistics) {
|
|
812
|
+
if (statistics) {
|
|
813
|
+
}
|
|
814
|
+
if (!this.chunker) {
|
|
815
|
+
throw new Error("Chunker not found for embedder " + this.name);
|
|
816
|
+
}
|
|
817
|
+
console.log("generating chunks");
|
|
818
|
+
if (!input.id) {
|
|
819
|
+
throw new Error("Item id is required for generating embeddings.");
|
|
820
|
+
}
|
|
821
|
+
const output = await this.chunker(input, this.maxChunkSize);
|
|
822
|
+
console.log("generating embeddings");
|
|
823
|
+
return await this.generateEmbeddings(output);
|
|
824
|
+
}
|
|
825
|
+
};
|
|
826
|
+
var ExuluWorkflow = class {
|
|
827
|
+
id;
|
|
828
|
+
name;
|
|
829
|
+
description = "";
|
|
830
|
+
enable_batch = false;
|
|
831
|
+
slug = "";
|
|
832
|
+
queue;
|
|
833
|
+
workflow;
|
|
834
|
+
inputSchema;
|
|
835
|
+
constructor({ id, name, description, workflow, queue, enable_batch, inputSchema }) {
|
|
836
|
+
this.id = id;
|
|
837
|
+
this.name = name;
|
|
838
|
+
this.description = description;
|
|
839
|
+
this.enable_batch = enable_batch;
|
|
840
|
+
this.slug = `/workflows/${generateSlug(this.name)}/run`;
|
|
841
|
+
this.queue = queue;
|
|
842
|
+
this.inputSchema = inputSchema;
|
|
843
|
+
this.workflow = workflow;
|
|
844
|
+
}
|
|
845
|
+
};
|
|
846
|
+
var ExuluLogger = class {
|
|
847
|
+
logPath;
|
|
848
|
+
job;
|
|
849
|
+
constructor(job, logsDir) {
|
|
850
|
+
this.job = job;
|
|
851
|
+
if (!fs.existsSync(logsDir)) {
|
|
852
|
+
fs.mkdirSync(logsDir, { recursive: true });
|
|
853
|
+
}
|
|
854
|
+
this.logPath = path.join(logsDir, `${job.id}_${(/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-")}.txt`);
|
|
855
|
+
}
|
|
856
|
+
async write(message, level) {
|
|
857
|
+
const logMessage = message.endsWith("\n") ? message : message + "\n";
|
|
858
|
+
try {
|
|
859
|
+
await fs.promises.appendFile(this.logPath, `[EXULU][${level}] - ${(/* @__PURE__ */ new Date()).toISOString()}: ${logMessage}`);
|
|
860
|
+
} catch (error) {
|
|
861
|
+
console.error(`Error writing to log file ${this.job.id}:`, error);
|
|
862
|
+
throw error;
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
};
|
|
866
|
+
var ExuluTool = class {
|
|
867
|
+
id;
|
|
868
|
+
name;
|
|
869
|
+
description;
|
|
870
|
+
inputSchema;
|
|
871
|
+
outputSchema;
|
|
872
|
+
type;
|
|
873
|
+
_execute;
|
|
874
|
+
constructor({ id, name, description, inputSchema, outputSchema, type, execute: execute2 }) {
|
|
875
|
+
this.id = id;
|
|
876
|
+
this.name = name;
|
|
877
|
+
this.description = description;
|
|
878
|
+
this.inputSchema = inputSchema;
|
|
879
|
+
this.outputSchema = outputSchema;
|
|
880
|
+
this.type = type;
|
|
881
|
+
this._execute = execute2;
|
|
882
|
+
}
|
|
883
|
+
execute = async (inputs) => {
|
|
884
|
+
if (!this._execute) {
|
|
885
|
+
throw new Error("Tool has no execute function.");
|
|
886
|
+
}
|
|
887
|
+
updateStatistic({
|
|
888
|
+
name: "count",
|
|
889
|
+
label: this.name,
|
|
890
|
+
type: STATISTICS_TYPE_ENUM.TOOL_CALL,
|
|
891
|
+
trigger: "agent"
|
|
892
|
+
});
|
|
893
|
+
return await this._execute(inputs);
|
|
894
|
+
};
|
|
895
|
+
};
|
|
896
|
+
var ExuluContext = class {
|
|
897
|
+
id;
|
|
898
|
+
name;
|
|
899
|
+
active;
|
|
900
|
+
fields;
|
|
901
|
+
rateLimit;
|
|
902
|
+
description;
|
|
903
|
+
embedder;
|
|
904
|
+
queryRewriter;
|
|
905
|
+
resultReranker;
|
|
906
|
+
// todo typings
|
|
907
|
+
_sources = [];
|
|
908
|
+
configuration;
|
|
909
|
+
constructor({ id, name, description, embedder, active, rateLimit, fields, queryRewriter, resultReranker, configuration }) {
|
|
910
|
+
this.id = id;
|
|
911
|
+
this.name = name;
|
|
912
|
+
this.fields = fields || [];
|
|
913
|
+
this.configuration = configuration || {
|
|
914
|
+
calculateVectors: "manual"
|
|
915
|
+
};
|
|
916
|
+
this.description = description;
|
|
917
|
+
this.embedder = embedder;
|
|
918
|
+
this.active = active;
|
|
919
|
+
this.rateLimit = rateLimit;
|
|
920
|
+
this._sources = [];
|
|
921
|
+
this.queryRewriter = queryRewriter;
|
|
922
|
+
this.resultReranker = resultReranker;
|
|
923
|
+
}
|
|
924
|
+
deleteOne = async (id) => {
|
|
925
|
+
return {};
|
|
926
|
+
};
|
|
927
|
+
deleteAll = async () => {
|
|
928
|
+
return {};
|
|
929
|
+
};
|
|
930
|
+
getTableName = () => {
|
|
931
|
+
return sanitizeName(this.name) + "_items";
|
|
932
|
+
};
|
|
933
|
+
getChunksTableName = () => {
|
|
934
|
+
return sanitizeName(this.name) + "_chunks";
|
|
935
|
+
};
|
|
936
|
+
tableExists = async () => {
|
|
937
|
+
const { db: db2 } = await postgresClient();
|
|
938
|
+
const tableExists = await db2.schema.hasTable(this.getTableName());
|
|
939
|
+
return tableExists;
|
|
940
|
+
};
|
|
941
|
+
async updateItem(user, id, item) {
|
|
942
|
+
if (!id) {
|
|
943
|
+
throw new Error("Id is required for updating an item.");
|
|
944
|
+
}
|
|
945
|
+
const { db: db2 } = await postgresClient();
|
|
946
|
+
Object.keys(item).forEach((key) => {
|
|
947
|
+
if (key === "name" || key === "description" || key === "external_id" || key === "tags" || key === "source" || key === "textLength" || key === "upsert") {
|
|
948
|
+
return;
|
|
949
|
+
}
|
|
950
|
+
console.log("this.fields", this.fields);
|
|
951
|
+
const field = this.fields.find((field2) => field2.name === key);
|
|
952
|
+
if (!field) {
|
|
953
|
+
throw new Error("Trying to uppdate value for field '" + key + "' that does not exist on the context fields definition. Available fields: " + this.fields.map((field2) => sanitizeName(field2.name)).join(", ") + " ,name, description, external_id");
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
delete item.id;
|
|
957
|
+
delete item.created_at;
|
|
958
|
+
delete item.upsert;
|
|
959
|
+
item.updated_at = db2.fn.now();
|
|
960
|
+
const result = await db2.from(this.getTableName()).where({ id }).update(item).returning("id");
|
|
961
|
+
if (this.configuration.calculateVectors === "onUpdate" || this.configuration.calculateVectors === "always") {
|
|
962
|
+
if (this.embedder.queue?.name) {
|
|
963
|
+
console.log("[EXULU] embedder is in queue mode, scheduling job.");
|
|
964
|
+
const job = await bullmqDecorator({
|
|
965
|
+
label: `Job running '${this.embedder.name}' for '${item.name} (${item.id}).'`,
|
|
966
|
+
embedder: this.embedder.id,
|
|
967
|
+
type: "embedder",
|
|
968
|
+
inputs: item,
|
|
969
|
+
queue: this.embedder.queue,
|
|
970
|
+
user
|
|
971
|
+
});
|
|
972
|
+
return {
|
|
973
|
+
id: result[0].id,
|
|
974
|
+
job: job.id
|
|
975
|
+
};
|
|
976
|
+
}
|
|
977
|
+
const { id: source, chunks } = await this.embedder.generateFromDocument({
|
|
978
|
+
...item,
|
|
979
|
+
id
|
|
980
|
+
}, {
|
|
981
|
+
label: this.name,
|
|
982
|
+
trigger: "agent"
|
|
983
|
+
});
|
|
984
|
+
const exists = await db2.schema.hasTable(this.getChunksTableName());
|
|
985
|
+
if (!exists) {
|
|
986
|
+
await this.createChunksTable();
|
|
987
|
+
}
|
|
988
|
+
await db2.from(this.getChunksTableName()).where({ source }).delete();
|
|
989
|
+
await db2.from(this.getChunksTableName()).insert(chunks.map((chunk) => ({
|
|
990
|
+
source,
|
|
991
|
+
content: chunk.content,
|
|
992
|
+
chunk_index: chunk.index,
|
|
993
|
+
embedding: import_knex4.default.toSql(chunk.vector)
|
|
994
|
+
})));
|
|
995
|
+
}
|
|
996
|
+
return {
|
|
997
|
+
id: result[0].id,
|
|
998
|
+
job: void 0
|
|
999
|
+
};
|
|
1000
|
+
}
|
|
1001
|
+
async insertItem(user, item, upsert = false) {
|
|
1002
|
+
if (!item.name) {
|
|
1003
|
+
throw new Error("Name field is required.");
|
|
1004
|
+
}
|
|
1005
|
+
const { db: db2 } = await postgresClient();
|
|
1006
|
+
if (item.external_id) {
|
|
1007
|
+
const existingItem = await db2.from(this.getTableName()).where({ external_id: item.external_id }).first();
|
|
1008
|
+
if (existingItem && !upsert) {
|
|
1009
|
+
throw new Error("Item with external id " + item.external_id + " already exists.");
|
|
1010
|
+
}
|
|
1011
|
+
if (existingItem && upsert) {
|
|
1012
|
+
await this.updateItem(user, existingItem.id, item);
|
|
1013
|
+
return existingItem.id;
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
if (upsert && item.id) {
|
|
1017
|
+
const existingItem = await db2.from(this.getTableName()).where({ id: item.id }).first();
|
|
1018
|
+
if (existingItem && upsert) {
|
|
1019
|
+
await this.updateItem(user, existingItem.id, item);
|
|
1020
|
+
return existingItem.id;
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
Object.keys(item).forEach((key) => {
|
|
1024
|
+
if (key === "name" || key === "description" || key === "external_id" || key === "tags" || key === "source" || key === "textLength" || key === "upsert") {
|
|
1025
|
+
return;
|
|
1026
|
+
}
|
|
1027
|
+
console.log("this.fields", this.fields);
|
|
1028
|
+
const field = this.fields.find((field2) => field2.name === key);
|
|
1029
|
+
if (!field) {
|
|
1030
|
+
throw new Error("Trying to insert value for field '" + key + "' that does not exist on the context fields definition. Available fields: " + this.fields.map((field2) => sanitizeName(field2.name)).join(", ") + " ,name, description, external_id");
|
|
1031
|
+
}
|
|
1032
|
+
});
|
|
1033
|
+
delete item.id;
|
|
1034
|
+
delete item.upsert;
|
|
1035
|
+
const result = await db2.from(this.getTableName()).insert({
|
|
1036
|
+
...item,
|
|
1037
|
+
id: db2.fn.uuid(),
|
|
1038
|
+
created_at: db2.fn.now(),
|
|
1039
|
+
updated_at: db2.fn.now()
|
|
1040
|
+
}).returning("id");
|
|
1041
|
+
if (this.configuration.calculateVectors === "onInsert" || this.configuration.calculateVectors === "always") {
|
|
1042
|
+
if (this.embedder.queue?.name) {
|
|
1043
|
+
console.log("[EXULU] embedder is in queue mode, scheduling job.");
|
|
1044
|
+
const job = await bullmqDecorator({
|
|
1045
|
+
label: `Job running '${this.embedder.name}' for '${item.name} (${item.id}).'`,
|
|
1046
|
+
embedder: this.embedder.id,
|
|
1047
|
+
type: "embedder",
|
|
1048
|
+
inputs: item,
|
|
1049
|
+
queue: this.embedder.queue,
|
|
1050
|
+
user
|
|
1051
|
+
});
|
|
1052
|
+
return {
|
|
1053
|
+
id: result[0].id,
|
|
1054
|
+
job: job.id
|
|
1055
|
+
};
|
|
1056
|
+
}
|
|
1057
|
+
console.log("[EXULU] embedder is not in queue mode, calculating vectors directly.");
|
|
1058
|
+
const { id: source, chunks } = await this.embedder.generateFromDocument({
|
|
1059
|
+
...item,
|
|
1060
|
+
id: result[0].id
|
|
1061
|
+
}, {
|
|
1062
|
+
label: this.name,
|
|
1063
|
+
trigger: "agent"
|
|
1064
|
+
});
|
|
1065
|
+
const exists = await db2.schema.hasTable(this.getChunksTableName());
|
|
1066
|
+
if (!exists) {
|
|
1067
|
+
await this.createChunksTable();
|
|
1068
|
+
}
|
|
1069
|
+
console.log("inserting chunks");
|
|
1070
|
+
await db2.from(this.getChunksTableName()).insert(chunks.map((chunk) => ({
|
|
1071
|
+
source,
|
|
1072
|
+
content: chunk.content,
|
|
1073
|
+
chunk_index: chunk.index,
|
|
1074
|
+
embedding: import_knex4.default.toSql(chunk.vector)
|
|
1075
|
+
})));
|
|
1076
|
+
}
|
|
1077
|
+
return {
|
|
1078
|
+
id: result[0].id,
|
|
1079
|
+
job: void 0
|
|
1080
|
+
};
|
|
1081
|
+
}
|
|
1082
|
+
getItems = async ({
|
|
1083
|
+
statistics,
|
|
1084
|
+
limit,
|
|
1085
|
+
page,
|
|
1086
|
+
name,
|
|
1087
|
+
archived,
|
|
1088
|
+
query,
|
|
1089
|
+
method
|
|
1090
|
+
}) => {
|
|
1091
|
+
if (!query && limit > 500) {
|
|
1092
|
+
throw new Error("Limit cannot be greater than 500.");
|
|
1093
|
+
}
|
|
1094
|
+
if (query && limit > 50) {
|
|
1095
|
+
throw new Error("Limit cannot be greater than 50 when using a vector search query.");
|
|
1096
|
+
}
|
|
1097
|
+
if (page < 1) page = 1;
|
|
1098
|
+
if (limit < 1) limit = 10;
|
|
1099
|
+
let offset = (page - 1) * limit;
|
|
1100
|
+
const mainTable = this.getTableName();
|
|
1101
|
+
const { db: db2 } = await postgresClient();
|
|
1102
|
+
const columns = await db2(mainTable).columnInfo();
|
|
1103
|
+
const totalQuery = db2.count("* as count").from(mainTable).first();
|
|
1104
|
+
const itemsQuery = db2.select(Object.keys(columns).map((column) => mainTable + "." + column)).from(mainTable).offset(offset).limit(limit);
|
|
1105
|
+
if (typeof name === "string") {
|
|
1106
|
+
itemsQuery.whereILike("name", `%${name}%`);
|
|
1107
|
+
totalQuery.whereILike("name", `%${name}%`);
|
|
1108
|
+
}
|
|
1109
|
+
if (typeof archived === "boolean") {
|
|
1110
|
+
itemsQuery.where("archived", archived);
|
|
1111
|
+
totalQuery.where("archived", archived);
|
|
1112
|
+
}
|
|
1113
|
+
if (!query) {
|
|
1114
|
+
const total = await totalQuery;
|
|
1115
|
+
let items = await itemsQuery;
|
|
1116
|
+
const last = Math.ceil(total.count / limit);
|
|
1117
|
+
return {
|
|
1118
|
+
pagination: {
|
|
1119
|
+
totalCount: parseInt(total.count),
|
|
1120
|
+
currentPage: page,
|
|
1121
|
+
limit,
|
|
1122
|
+
from: offset,
|
|
1123
|
+
pageCount: last || 1,
|
|
1124
|
+
to: offset + items.length,
|
|
1125
|
+
lastPage: last || 1,
|
|
1126
|
+
nextPage: page + 1 > last ? null : page + 1,
|
|
1127
|
+
previousPage: page - 1 || null
|
|
1128
|
+
},
|
|
1129
|
+
filters: {
|
|
1130
|
+
archived,
|
|
1131
|
+
name,
|
|
1132
|
+
query
|
|
1133
|
+
},
|
|
1134
|
+
context: {
|
|
1135
|
+
name: this.name,
|
|
1136
|
+
id: this.id,
|
|
1137
|
+
embedder: this.embedder.name
|
|
1138
|
+
},
|
|
1139
|
+
items
|
|
1140
|
+
};
|
|
1141
|
+
}
|
|
1142
|
+
if (typeof query === "string") {
|
|
1143
|
+
itemsQuery.limit(limit * 5);
|
|
1144
|
+
if (statistics) {
|
|
1145
|
+
updateStatistic({
|
|
1146
|
+
name: "count",
|
|
1147
|
+
label: statistics.label,
|
|
1148
|
+
type: STATISTICS_TYPE_ENUM.CONTEXT_RETRIEVE,
|
|
1149
|
+
trigger: statistics.trigger
|
|
1150
|
+
});
|
|
1151
|
+
}
|
|
1152
|
+
if (this.queryRewriter) {
|
|
1153
|
+
query = await this.queryRewriter(query);
|
|
1154
|
+
}
|
|
1155
|
+
const chunksTable = this.getChunksTableName();
|
|
1156
|
+
itemsQuery.leftJoin(chunksTable, function() {
|
|
1157
|
+
this.on(chunksTable + ".source", "=", mainTable + ".id");
|
|
1158
|
+
});
|
|
1159
|
+
itemsQuery.select(chunksTable + ".id");
|
|
1160
|
+
itemsQuery.select(chunksTable + ".source");
|
|
1161
|
+
itemsQuery.select(chunksTable + ".content");
|
|
1162
|
+
itemsQuery.select(chunksTable + ".chunk_index");
|
|
1163
|
+
itemsQuery.select(chunksTable + ".created_at");
|
|
1164
|
+
itemsQuery.select(chunksTable + ".updated_at");
|
|
1165
|
+
const { chunks } = await this.embedder.generateFromQuery(query);
|
|
1166
|
+
if (!chunks?.[0]?.vector) {
|
|
1167
|
+
throw new Error("No vector generated for query.");
|
|
1168
|
+
}
|
|
1169
|
+
const vector = chunks[0].vector;
|
|
1170
|
+
const vectorStr = `ARRAY[${vector.join(",")}]`;
|
|
1171
|
+
const vectorExpr = `${vectorStr}::vector`;
|
|
1172
|
+
switch (method) {
|
|
1173
|
+
case "l1Distance":
|
|
1174
|
+
itemsQuery.select(db2.raw(`?? <-> ${vectorExpr} as l1_distance`, [`${chunksTable}.embedding`]));
|
|
1175
|
+
itemsQuery.orderByRaw(db2.raw(`?? <-> ${vectorExpr} ASC`, [`${chunksTable}.embedding`]));
|
|
1176
|
+
break;
|
|
1177
|
+
case "l2Distance":
|
|
1178
|
+
itemsQuery.select(db2.raw(`?? <-> ${vectorExpr} as l2_distance`, [`${chunksTable}.embedding`]));
|
|
1179
|
+
itemsQuery.orderByRaw(db2.raw(`?? <-> ${vectorExpr} ASC`, [`${chunksTable}.embedding`]));
|
|
1180
|
+
break;
|
|
1181
|
+
case "hammingDistance":
|
|
1182
|
+
itemsQuery.select(db2.raw(`?? <#> ${vectorExpr} as hamming_distance`, [`${chunksTable}.embedding`]));
|
|
1183
|
+
itemsQuery.orderByRaw(db2.raw(`?? <#> ${vectorExpr} ASC`, [`${chunksTable}.embedding`]));
|
|
1184
|
+
break;
|
|
1185
|
+
case "jaccardDistance":
|
|
1186
|
+
itemsQuery.select(db2.raw(`?? <#> ${vectorExpr} as jaccard_distance`, [`${chunksTable}.embedding`]));
|
|
1187
|
+
itemsQuery.orderByRaw(db2.raw(`?? <#> ${vectorExpr} ASC`, [`${chunksTable}.embedding`]));
|
|
1188
|
+
break;
|
|
1189
|
+
case "maxInnerProduct":
|
|
1190
|
+
itemsQuery.select(db2.raw(`?? <#> ${vectorExpr} as inner_product`, [`${chunksTable}.embedding`]));
|
|
1191
|
+
itemsQuery.orderByRaw(db2.raw(`?? <#> ${vectorExpr} ASC`, [`${chunksTable}.embedding`]));
|
|
1192
|
+
break;
|
|
1193
|
+
case "cosineDistance":
|
|
1194
|
+
default:
|
|
1195
|
+
itemsQuery.select(db2.raw(`1 - (?? <#> ${vectorExpr}) as cosine_distance`, [`${chunksTable}.embedding`]));
|
|
1196
|
+
itemsQuery.orderByRaw(db2.raw(`1 - (?? <#> ${vectorExpr}) DESC`, [`${chunksTable}.embedding`]));
|
|
1197
|
+
break;
|
|
1198
|
+
}
|
|
1199
|
+
let items = await itemsQuery;
|
|
1200
|
+
const seenSources = /* @__PURE__ */ new Map();
|
|
1201
|
+
items = items.reduce((acc, item) => {
|
|
1202
|
+
if (!seenSources.has(item.source)) {
|
|
1203
|
+
seenSources.set(item.source, {
|
|
1204
|
+
...Object.fromEntries(
|
|
1205
|
+
Object.keys(item).filter(
|
|
1206
|
+
(key) => key !== "l1_distance" && key !== "l2_distance" && key !== "hamming_distance" && key !== "jaccard_distance" && key !== "inner_product" && key !== "cosine_distance" && key !== "content" && key !== "source" && key !== "chunk_index"
|
|
1207
|
+
).map((key) => [key, item[key]])
|
|
1208
|
+
),
|
|
1209
|
+
chunks: [{
|
|
1210
|
+
content: item.content,
|
|
1211
|
+
chunk_index: item.chunk_index,
|
|
1212
|
+
...method === "l1Distance" && { l1_distance: item.l1_distance },
|
|
1213
|
+
...method === "l2Distance" && { l2_distance: item.l2_distance },
|
|
1214
|
+
...method === "hammingDistance" && { hamming_distance: item.hamming_distance },
|
|
1215
|
+
...method === "jaccardDistance" && { jaccard_distance: item.jaccard_distance },
|
|
1216
|
+
...method === "maxInnerProduct" && { inner_product: item.inner_product },
|
|
1217
|
+
...method === "cosineDistance" && { cosine_distance: item.cosine_distance }
|
|
1218
|
+
}]
|
|
1219
|
+
});
|
|
1220
|
+
acc.push(seenSources.get(item.source));
|
|
1221
|
+
} else {
|
|
1222
|
+
seenSources.get(item.source).chunks.push({
|
|
1223
|
+
content: item.content,
|
|
1224
|
+
chunk_index: item.chunk_index,
|
|
1225
|
+
...method === "l1Distance" && { l1_distance: item.l1_distance },
|
|
1226
|
+
...method === "l2Distance" && { l2_distance: item.l2_distance },
|
|
1227
|
+
...method === "hammingDistance" && { hamming_distance: item.hamming_distance },
|
|
1228
|
+
...method === "jaccardDistance" && { jaccard_distance: item.jaccard_distance },
|
|
1229
|
+
...method === "maxInnerProduct" && { inner_product: item.inner_product },
|
|
1230
|
+
...method === "cosineDistance" && { cosine_distance: item.cosine_distance }
|
|
1231
|
+
});
|
|
1232
|
+
}
|
|
1233
|
+
return acc;
|
|
1234
|
+
}, []);
|
|
1235
|
+
if (this.resultReranker && query) {
|
|
1236
|
+
items = await this.resultReranker(items);
|
|
1237
|
+
}
|
|
1238
|
+
return {
|
|
1239
|
+
filters: {
|
|
1240
|
+
archived,
|
|
1241
|
+
name,
|
|
1242
|
+
query
|
|
1243
|
+
},
|
|
1244
|
+
context: {
|
|
1245
|
+
name: this.name,
|
|
1246
|
+
id: this.id,
|
|
1247
|
+
embedder: this.embedder.name
|
|
1248
|
+
},
|
|
1249
|
+
items
|
|
1250
|
+
};
|
|
1251
|
+
}
|
|
1252
|
+
};
|
|
1253
|
+
createItemsTable = async () => {
|
|
1254
|
+
const { db: db2 } = await postgresClient();
|
|
1255
|
+
const tableName = this.getTableName();
|
|
1256
|
+
console.log("[EXULU] Creating table: " + tableName);
|
|
1257
|
+
return await db2.schema.createTable(tableName, (table) => {
|
|
1258
|
+
console.log("[EXULU] Creating fields for table.", this.fields);
|
|
1259
|
+
table.uuid("id").primary().defaultTo(db2.fn.uuid());
|
|
1260
|
+
table.string("name", 100);
|
|
1261
|
+
table.text("description");
|
|
1262
|
+
table.string("tags", 100);
|
|
1263
|
+
table.boolean("archived").defaultTo(false);
|
|
1264
|
+
table.string("external_id", 100);
|
|
1265
|
+
table.integer("textLength");
|
|
1266
|
+
table.string("source", 100);
|
|
1267
|
+
for (const field of this.fields) {
|
|
1268
|
+
const { type, name } = field;
|
|
1269
|
+
if (!type || !name) {
|
|
1270
|
+
continue;
|
|
1271
|
+
}
|
|
1272
|
+
mapType(table, type, sanitizeName(name));
|
|
1273
|
+
}
|
|
1274
|
+
table.timestamps(true, true);
|
|
1275
|
+
});
|
|
1276
|
+
};
|
|
1277
|
+
createChunksTable = async () => {
|
|
1278
|
+
const { db: db2 } = await postgresClient();
|
|
1279
|
+
const tableName = this.getChunksTableName();
|
|
1280
|
+
console.log("[EXULU] Creating table: " + tableName);
|
|
1281
|
+
return await db2.schema.createTable(tableName, (table) => {
|
|
1282
|
+
table.uuid("id").primary().defaultTo(db2.fn.uuid());
|
|
1283
|
+
table.uuid("source").references("id").inTable(this.getTableName());
|
|
1284
|
+
table.text("content");
|
|
1285
|
+
table.integer("chunk_index");
|
|
1286
|
+
table.specificType("embedding", `vector(${this.embedder.vectorDimensions})`);
|
|
1287
|
+
table.timestamps(true, true);
|
|
1288
|
+
});
|
|
1289
|
+
};
|
|
1290
|
+
// Exports the context as a tool that can be used by an agent
|
|
1291
|
+
tool = () => {
|
|
1292
|
+
return new ExuluTool({
|
|
1293
|
+
id: this.id,
|
|
1294
|
+
name: `${this.name} context`,
|
|
1295
|
+
type: "context",
|
|
1296
|
+
inputSchema: import_zod2.z.object({
|
|
1297
|
+
query: import_zod2.z.string()
|
|
1298
|
+
}),
|
|
1299
|
+
outputSchema: import_zod2.z.object({
|
|
1300
|
+
// todo check if result format is still correct based on above getItems function
|
|
1301
|
+
results: import_zod2.z.array(import_zod2.z.object({
|
|
1302
|
+
count: import_zod2.z.number(),
|
|
1303
|
+
results: import_zod2.z.array(import_zod2.z.object({
|
|
1304
|
+
id: import_zod2.z.string(),
|
|
1305
|
+
content: import_zod2.z.string(),
|
|
1306
|
+
metadata: import_zod2.z.record(import_zod2.z.any())
|
|
1307
|
+
})),
|
|
1308
|
+
errors: import_zod2.z.array(import_zod2.z.string()).optional()
|
|
1309
|
+
}))
|
|
1310
|
+
}),
|
|
1311
|
+
description: `Gets information from the context called: ${this.name}. The context description is: ${this.description}.`,
|
|
1312
|
+
execute: async ({ context }) => {
|
|
1313
|
+
return await this.getItems({
|
|
1314
|
+
page: 1,
|
|
1315
|
+
limit: 10,
|
|
1316
|
+
query: context.query,
|
|
1317
|
+
statistics: {
|
|
1318
|
+
label: this.name,
|
|
1319
|
+
trigger: "agent"
|
|
1320
|
+
}
|
|
1321
|
+
});
|
|
1322
|
+
}
|
|
1323
|
+
});
|
|
1324
|
+
};
|
|
1325
|
+
sources = {
|
|
1326
|
+
add: (inputs) => {
|
|
1327
|
+
const source = new ExuluSource({
|
|
1328
|
+
...inputs,
|
|
1329
|
+
context: this.id
|
|
1330
|
+
});
|
|
1331
|
+
this._sources.push(source);
|
|
1332
|
+
return source;
|
|
1333
|
+
},
|
|
1334
|
+
get: (id) => {
|
|
1335
|
+
if (id) {
|
|
1336
|
+
return this._sources.find((source) => source.id === id);
|
|
1337
|
+
}
|
|
1338
|
+
return this._sources ?? [];
|
|
1339
|
+
}
|
|
1340
|
+
};
|
|
1341
|
+
};
|
|
1342
|
+
var ExuluSource = class {
|
|
1343
|
+
id;
|
|
1344
|
+
name;
|
|
1345
|
+
description;
|
|
1346
|
+
updaters;
|
|
1347
|
+
context;
|
|
1348
|
+
constructor({ id, name, description, updaters, context }) {
|
|
1349
|
+
this.id = id;
|
|
1350
|
+
this.name = name;
|
|
1351
|
+
this.description = description;
|
|
1352
|
+
this.context = context;
|
|
1353
|
+
this.updaters = updaters.map((updater) => {
|
|
1354
|
+
if (updater.type === "webhook") {
|
|
1355
|
+
return {
|
|
1356
|
+
...updater,
|
|
1357
|
+
slug: `/contexts/${context}/sources/${this.id}/${updater.id}/webhook`
|
|
1358
|
+
};
|
|
1359
|
+
}
|
|
1360
|
+
return updater;
|
|
1361
|
+
});
|
|
1362
|
+
}
|
|
1363
|
+
};
|
|
1364
|
+
var updateStatistic = async (statistic) => {
|
|
1365
|
+
const currentDate = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
1366
|
+
const { db: db2 } = await postgresClient();
|
|
1367
|
+
await db2.from("statistics").update({
|
|
1368
|
+
total: db2.raw("total + ?", [statistic.count ?? 1]),
|
|
1369
|
+
timeseries: db2.raw("CASE WHEN date = ? THEN array_append(timeseries, ?) ELSE timeseries END", [currentDate, { date: currentDate, count: statistic.count ?? 1 }])
|
|
1370
|
+
}).where({
|
|
1371
|
+
name: statistic.name,
|
|
1372
|
+
label: statistic.label,
|
|
1373
|
+
type: statistic.type
|
|
1374
|
+
}).onConflict("name").merge();
|
|
1375
|
+
};
|
|
1376
|
+
|
|
1377
|
+
// src/registry/index.ts
|
|
1378
|
+
var import_express6 = require("express");
|
|
1379
|
+
|
|
1380
|
+
// src/registry/routes.ts
|
|
1381
|
+
var import_express3 = require("express");
|
|
1382
|
+
|
|
1383
|
+
// src/registry/rate-limiter.ts
|
|
1384
|
+
var rateLimiter = async (key, windowSeconds, limit, points) => {
|
|
1385
|
+
if (!redisClient) {
|
|
1386
|
+
return {
|
|
1387
|
+
status: false,
|
|
1388
|
+
retryAfter: 10
|
|
1389
|
+
// 10 seconds
|
|
1390
|
+
};
|
|
1391
|
+
}
|
|
1392
|
+
const redisKey = `exulu/${key}`;
|
|
1393
|
+
const current = await redisClient.incrBy(redisKey, points);
|
|
1394
|
+
if (current === points) {
|
|
1395
|
+
await redisClient.expire(redisKey, windowSeconds);
|
|
1396
|
+
}
|
|
1397
|
+
if (current > limit) {
|
|
1398
|
+
const ttl = await redisClient.ttl(redisKey);
|
|
1399
|
+
return {
|
|
1400
|
+
status: false,
|
|
1401
|
+
retryAfter: ttl
|
|
1402
|
+
};
|
|
1403
|
+
}
|
|
1404
|
+
return {
|
|
1405
|
+
status: true,
|
|
1406
|
+
retryAfter: null
|
|
1407
|
+
};
|
|
1408
|
+
};
|
|
1409
|
+
|
|
1410
|
+
// src/registry/route-validators/index.ts
|
|
1411
|
+
var import_express = require("express");
|
|
1412
|
+
var import_jwt = require("next-auth/jwt");
|
|
1413
|
+
|
|
1414
|
+
// src/auth/auth.ts
|
|
1415
|
+
var import_bcryptjs = __toESM(require("bcryptjs"), 1);
|
|
1416
|
+
var authentication = async ({
|
|
1417
|
+
apikey,
|
|
1418
|
+
authtoken,
|
|
1419
|
+
internalkey,
|
|
1420
|
+
db: db2
|
|
1421
|
+
}) => {
|
|
1422
|
+
if (internalkey) {
|
|
1423
|
+
if (!process.env.INTERNAL_SECRET) {
|
|
1424
|
+
return {
|
|
1425
|
+
error: true,
|
|
1426
|
+
message: `Header "internal" provided, but no INTERNAL_SECRET was provided in the environment variables.`,
|
|
1427
|
+
code: 401
|
|
1428
|
+
};
|
|
1429
|
+
}
|
|
1430
|
+
if (process.env.INTERNAL_SECRET !== internalkey) {
|
|
1431
|
+
return {
|
|
1432
|
+
error: true,
|
|
1433
|
+
message: `Internal key was provided in header but did not match the INTERNAL_SECRET environment variable.`,
|
|
1434
|
+
code: 401
|
|
1435
|
+
};
|
|
1436
|
+
}
|
|
1437
|
+
return {
|
|
1438
|
+
error: false,
|
|
1439
|
+
code: 200,
|
|
1440
|
+
user: {
|
|
1441
|
+
type: "api",
|
|
1442
|
+
id: "XXXX-XXXX-XXXX-XXXX",
|
|
1443
|
+
email: "internal@exulu.com"
|
|
1444
|
+
}
|
|
1445
|
+
};
|
|
1446
|
+
}
|
|
1447
|
+
if (authtoken) {
|
|
1448
|
+
try {
|
|
1449
|
+
console.log("authtoken", authtoken);
|
|
1450
|
+
if (!authtoken?.email) {
|
|
1451
|
+
return {
|
|
1452
|
+
error: true,
|
|
1453
|
+
message: `No email provided in session ${JSON.stringify(authtoken)}`,
|
|
1454
|
+
code: 401
|
|
1455
|
+
};
|
|
1456
|
+
}
|
|
1457
|
+
const user = await db2.from("users").select("*").where("email", authtoken?.email).first();
|
|
1458
|
+
console.log("user", user);
|
|
1459
|
+
if (!user) {
|
|
1460
|
+
return {
|
|
1461
|
+
error: true,
|
|
1462
|
+
message: `No user found for email: ${authtoken.email}`,
|
|
1463
|
+
code: 401
|
|
1464
|
+
};
|
|
1465
|
+
}
|
|
1466
|
+
return {
|
|
1467
|
+
error: false,
|
|
1468
|
+
code: 200,
|
|
1469
|
+
user
|
|
1470
|
+
};
|
|
1471
|
+
} catch (error) {
|
|
1472
|
+
console.error(error);
|
|
1473
|
+
return {
|
|
1474
|
+
error: true,
|
|
1475
|
+
message: "Invalid token.",
|
|
1476
|
+
code: 401
|
|
1477
|
+
};
|
|
1478
|
+
}
|
|
1479
|
+
}
|
|
1480
|
+
if (apikey) {
|
|
1481
|
+
const users = await db2.from("users").select("*").where("type", "api");
|
|
1482
|
+
if (!users || users.length === 0) {
|
|
1483
|
+
return {
|
|
1484
|
+
error: true,
|
|
1485
|
+
message: `No API users found.`,
|
|
1486
|
+
code: 401
|
|
1487
|
+
};
|
|
1488
|
+
}
|
|
1489
|
+
const keyParts = apikey.split("/");
|
|
1490
|
+
const keyName = keyParts.pop();
|
|
1491
|
+
const keyValue = keyParts[0];
|
|
1492
|
+
if (!keyName) {
|
|
1493
|
+
return {
|
|
1494
|
+
error: true,
|
|
1495
|
+
message: "Provided api key does not include postfix with key name ({key}/{name}).",
|
|
1496
|
+
code: 401
|
|
1497
|
+
};
|
|
1498
|
+
}
|
|
1499
|
+
if (!keyValue) {
|
|
1500
|
+
return {
|
|
1501
|
+
error: true,
|
|
1502
|
+
message: "Provided api key is not in the correct format.",
|
|
1503
|
+
code: 401
|
|
1504
|
+
};
|
|
1505
|
+
}
|
|
1506
|
+
const filtered = users.filter(({ apiKey, id }) => apiKey.includes(keyName));
|
|
1507
|
+
for (const user of filtered) {
|
|
1508
|
+
const lastSlashIndex = user.apiKey.lastIndexOf("/");
|
|
1509
|
+
const compareValue = lastSlashIndex !== -1 ? user.apiKey.substring(0, lastSlashIndex) : user.apiKey;
|
|
1510
|
+
const isMatch = await import_bcryptjs.default.compare(keyValue, compareValue);
|
|
1511
|
+
if (isMatch) {
|
|
1512
|
+
await db2.from("users").where({ id: user.id }).update({
|
|
1513
|
+
lastUsed: /* @__PURE__ */ new Date()
|
|
1514
|
+
}).returning("id");
|
|
1515
|
+
return {
|
|
1516
|
+
error: false,
|
|
1517
|
+
code: 200,
|
|
1518
|
+
user
|
|
1519
|
+
};
|
|
1520
|
+
}
|
|
1521
|
+
}
|
|
1522
|
+
}
|
|
1523
|
+
return {
|
|
1524
|
+
error: true,
|
|
1525
|
+
message: "Either an api key or authorization key must be provided.",
|
|
1526
|
+
code: 401
|
|
1527
|
+
};
|
|
1528
|
+
};
|
|
1529
|
+
|
|
1530
|
+
// src/registry/route-validators/index.ts
|
|
1531
|
+
var requestValidators = {
|
|
1532
|
+
authenticate: async (req) => {
|
|
1533
|
+
const apikey = req.headers["exulu-api-key"] || null;
|
|
1534
|
+
const { db: db2 } = await postgresClient();
|
|
1535
|
+
let authtoken = null;
|
|
1536
|
+
if (typeof apikey !== "string") {
|
|
1537
|
+
const secret = process.env.NEXTAUTH_SECRET;
|
|
1538
|
+
authtoken = await (0, import_jwt.getToken)({ req, secret });
|
|
1539
|
+
}
|
|
1540
|
+
return await authentication({
|
|
1541
|
+
authtoken,
|
|
1542
|
+
apikey,
|
|
1543
|
+
db: db2
|
|
1544
|
+
});
|
|
1545
|
+
},
|
|
1546
|
+
workflows: (req) => {
|
|
1547
|
+
const contentType = req.headers["content-type"] || "";
|
|
1548
|
+
if (!contentType.includes("application/json")) {
|
|
1549
|
+
return {
|
|
1550
|
+
error: true,
|
|
1551
|
+
code: 400,
|
|
1552
|
+
message: "Unsupported content type."
|
|
1553
|
+
};
|
|
1554
|
+
}
|
|
1555
|
+
if (!req.body) {
|
|
1556
|
+
return {
|
|
1557
|
+
error: true,
|
|
1558
|
+
code: 400,
|
|
1559
|
+
message: "Missing body."
|
|
1560
|
+
};
|
|
1561
|
+
}
|
|
1562
|
+
if (!req.body.agent) {
|
|
1563
|
+
return {
|
|
1564
|
+
error: true,
|
|
1565
|
+
code: 400,
|
|
1566
|
+
message: "Missing agent in body."
|
|
1567
|
+
};
|
|
1568
|
+
}
|
|
1569
|
+
if (!req.body.session) {
|
|
1570
|
+
return {
|
|
1571
|
+
error: true,
|
|
1572
|
+
code: 400,
|
|
1573
|
+
message: "Missing session in body."
|
|
1574
|
+
};
|
|
1575
|
+
}
|
|
1576
|
+
if (!req.body.inputs) {
|
|
1577
|
+
return {
|
|
1578
|
+
error: true,
|
|
1579
|
+
code: 400,
|
|
1580
|
+
message: "Missing inputs in body."
|
|
1581
|
+
};
|
|
1582
|
+
}
|
|
1583
|
+
if (!req.body.label) {
|
|
1584
|
+
return {
|
|
1585
|
+
error: true,
|
|
1586
|
+
code: 400,
|
|
1587
|
+
message: "Missing label for job in body."
|
|
1588
|
+
};
|
|
1589
|
+
}
|
|
1590
|
+
return {
|
|
1591
|
+
error: false
|
|
1592
|
+
};
|
|
1593
|
+
},
|
|
1594
|
+
embedders: (req, configuration) => {
|
|
1595
|
+
const contentType = req.headers["content-type"] || "";
|
|
1596
|
+
if (!contentType.includes("application/json")) {
|
|
1597
|
+
return {
|
|
1598
|
+
error: true,
|
|
1599
|
+
code: 400,
|
|
1600
|
+
message: "Unsupported content type."
|
|
1601
|
+
};
|
|
1602
|
+
}
|
|
1603
|
+
if (!req.body) {
|
|
1604
|
+
return {
|
|
1605
|
+
error: true,
|
|
1606
|
+
code: 400,
|
|
1607
|
+
message: "Missing body."
|
|
1608
|
+
};
|
|
1609
|
+
}
|
|
1610
|
+
if (!req.body.inputs) {
|
|
1611
|
+
return {
|
|
1612
|
+
error: true,
|
|
1613
|
+
code: 400,
|
|
1614
|
+
message: "Missing inputs."
|
|
1615
|
+
};
|
|
1616
|
+
}
|
|
1617
|
+
if (!req.body.label) {
|
|
1618
|
+
return {
|
|
1619
|
+
error: true,
|
|
1620
|
+
code: 400,
|
|
1621
|
+
message: "Missing label for job in body."
|
|
1622
|
+
};
|
|
1623
|
+
}
|
|
1624
|
+
if (configuration) {
|
|
1625
|
+
for (const key in configuration) {
|
|
1626
|
+
if (!req.body.configuration[key]) {
|
|
1627
|
+
return {
|
|
1628
|
+
error: true,
|
|
1629
|
+
code: 400,
|
|
1630
|
+
message: `Missing ${key} in body.configuration.`
|
|
1631
|
+
};
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
return {
|
|
1636
|
+
error: false
|
|
1637
|
+
};
|
|
1638
|
+
},
|
|
1639
|
+
agents: (req) => {
|
|
1640
|
+
console.log("[EXULU] validating request body and headers.", req.body);
|
|
1641
|
+
const contentType = req.headers["content-type"] || "";
|
|
1642
|
+
if (!contentType.includes("application/json")) {
|
|
1643
|
+
return {
|
|
1644
|
+
error: true,
|
|
1645
|
+
code: 400,
|
|
1646
|
+
message: "Unsupported content type."
|
|
1647
|
+
};
|
|
1648
|
+
}
|
|
1649
|
+
if (!req.body) {
|
|
1650
|
+
return {
|
|
1651
|
+
error: true,
|
|
1652
|
+
code: 400,
|
|
1653
|
+
message: "Missing body."
|
|
1654
|
+
};
|
|
1655
|
+
}
|
|
1656
|
+
if (!req.body.threadId) {
|
|
1657
|
+
return {
|
|
1658
|
+
error: true,
|
|
1659
|
+
code: 400,
|
|
1660
|
+
message: "Missing threadId in body."
|
|
1661
|
+
};
|
|
1662
|
+
}
|
|
1663
|
+
if (!req.body.resourceId) {
|
|
1664
|
+
return {
|
|
1665
|
+
error: true,
|
|
1666
|
+
code: 400,
|
|
1667
|
+
message: "Missing resourceId in body."
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
if (!req.body.messages) {
|
|
1671
|
+
return {
|
|
1672
|
+
error: true,
|
|
1673
|
+
code: 400,
|
|
1674
|
+
message: 'Missing "messages" property in body.'
|
|
1675
|
+
};
|
|
1676
|
+
}
|
|
1677
|
+
return {
|
|
1678
|
+
error: false
|
|
1679
|
+
};
|
|
1680
|
+
}
|
|
1681
|
+
};
|
|
1682
|
+
|
|
1683
|
+
// src/registry/routes.ts
|
|
1684
|
+
var import_zodex = require("zodex");
|
|
1685
|
+
|
|
1686
|
+
// src/bullmq/queues.ts
|
|
1687
|
+
var import_bullmq5 = require("bullmq");
|
|
1688
|
+
var ExuluQueues = class {
|
|
1689
|
+
queues;
|
|
1690
|
+
constructor() {
|
|
1691
|
+
this.queues = [];
|
|
1692
|
+
}
|
|
1693
|
+
queue(name) {
|
|
1694
|
+
return this.queues.find((x) => x.name === name);
|
|
1695
|
+
}
|
|
1696
|
+
use(name) {
|
|
1697
|
+
const existing = this.queues.find((x) => x.name === name);
|
|
1698
|
+
if (existing) {
|
|
1699
|
+
return existing;
|
|
1700
|
+
}
|
|
1701
|
+
const newQueue = new import_bullmq5.Queue(`${name}`, { connection: redisServer });
|
|
1702
|
+
this.queues.push(newQueue);
|
|
1703
|
+
return newQueue;
|
|
1704
|
+
}
|
|
1705
|
+
};
|
|
1706
|
+
var queues = new ExuluQueues();
|
|
1707
|
+
|
|
1708
|
+
// types/models/vector-methods.ts
|
|
1709
|
+
var VectorMethodEnum = {
|
|
1710
|
+
"cosineDistance": "cosineDistance",
|
|
1711
|
+
"l1Distance": "l1Distance",
|
|
1712
|
+
"l2Distance": "l2Distance",
|
|
1713
|
+
"hammingDistance": "hammingDistance",
|
|
1714
|
+
"jaccardDistance": "jaccardDistance",
|
|
1715
|
+
"maxInnerProduct": "maxInnerProduct"
|
|
1716
|
+
};
|
|
1717
|
+
|
|
1718
|
+
// src/registry/routes.ts
|
|
1719
|
+
var import_express4 = __toESM(require("express"), 1);
|
|
1720
|
+
var import_server3 = require("@apollo/server");
|
|
1721
|
+
var import_cors = __toESM(require("cors"), 1);
|
|
1722
|
+
var import_reflect_metadata = require("reflect-metadata");
|
|
1723
|
+
|
|
1724
|
+
// src/registry/utils/graphql.ts
|
|
1725
|
+
var import_schema = require("@graphql-tools/schema");
|
|
1726
|
+
var import_graphql_type_json = __toESM(require("graphql-type-json"), 1);
|
|
1727
|
+
var map = (field) => {
|
|
1728
|
+
let type;
|
|
1729
|
+
switch (field.type) {
|
|
1730
|
+
case "text":
|
|
1731
|
+
case "shortText":
|
|
1732
|
+
case "longText":
|
|
1733
|
+
case "code":
|
|
1734
|
+
type = "String";
|
|
1735
|
+
break;
|
|
1736
|
+
case "number":
|
|
1737
|
+
type = "Float";
|
|
1738
|
+
break;
|
|
1739
|
+
case "boolean":
|
|
1740
|
+
type = "Boolean";
|
|
1741
|
+
break;
|
|
1742
|
+
case "json":
|
|
1743
|
+
type = "JSON";
|
|
1744
|
+
break;
|
|
1745
|
+
case "date":
|
|
1746
|
+
type = "String";
|
|
1747
|
+
break;
|
|
1748
|
+
default:
|
|
1749
|
+
type = "String";
|
|
1750
|
+
}
|
|
1751
|
+
return type;
|
|
1752
|
+
};
|
|
1753
|
+
function createTypeDefs(table) {
|
|
1754
|
+
const fields = table.fields.map((field) => {
|
|
1755
|
+
let type;
|
|
1756
|
+
type = map(field);
|
|
1757
|
+
const required = field.required ? "!" : "";
|
|
1758
|
+
return ` ${field.name}: ${type}${required}`;
|
|
1759
|
+
});
|
|
1760
|
+
const typeDef = `
|
|
1761
|
+
type ${table.name.singular} {
|
|
1762
|
+
${fields.join("\n")}
|
|
1763
|
+
id: ID!
|
|
1764
|
+
createdAt: String!
|
|
1765
|
+
updatedAt: String!
|
|
1766
|
+
}
|
|
1767
|
+
`;
|
|
1768
|
+
const inputDef = `
|
|
1769
|
+
input ${table.name.singular}Input {
|
|
1770
|
+
${table.fields.map((f) => ` ${f.name}: ${map(f)}`).join("\n")}
|
|
1771
|
+
}
|
|
1772
|
+
`;
|
|
1773
|
+
return typeDef + inputDef;
|
|
1774
|
+
}
|
|
1775
|
+
function createFilterTypeDefs(table) {
|
|
1776
|
+
const fieldFilters = table.fields.map((field) => {
|
|
1777
|
+
let type;
|
|
1778
|
+
type = map(field);
|
|
1779
|
+
return `
|
|
1780
|
+
${field.name}: FilterOperator${type}`;
|
|
1781
|
+
});
|
|
1782
|
+
const tableNameSingularUpperCaseFirst = table.name.singular.charAt(0).toUpperCase() + table.name.singular.slice(1);
|
|
1783
|
+
const operatorTypes = `
|
|
1784
|
+
input FilterOperatorString {
|
|
1785
|
+
eq: String
|
|
1786
|
+
ne: String
|
|
1787
|
+
in: [String]
|
|
1788
|
+
contains: String
|
|
1789
|
+
}
|
|
1790
|
+
|
|
1791
|
+
input FilterOperatorFloat {
|
|
1792
|
+
eq: Float
|
|
1793
|
+
ne: Float
|
|
1794
|
+
in: [Float]
|
|
1795
|
+
}
|
|
1796
|
+
|
|
1797
|
+
input FilterOperatorBoolean {
|
|
1798
|
+
eq: Boolean
|
|
1799
|
+
ne: Boolean
|
|
1800
|
+
in: [Boolean]
|
|
1801
|
+
}
|
|
1802
|
+
|
|
1803
|
+
input FilterOperatorJSON {
|
|
1804
|
+
eq: JSON
|
|
1805
|
+
ne: JSON
|
|
1806
|
+
in: [JSON]
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
input SortBy {
|
|
1810
|
+
field: String!
|
|
1811
|
+
direction: SortDirection!
|
|
1812
|
+
}
|
|
1813
|
+
|
|
1814
|
+
enum SortDirection {
|
|
1815
|
+
ASC
|
|
1816
|
+
DESC
|
|
1817
|
+
}
|
|
1818
|
+
|
|
1819
|
+
input Filter${tableNameSingularUpperCaseFirst} {
|
|
1820
|
+
${fieldFilters.join("\n")}
|
|
1821
|
+
}`;
|
|
1822
|
+
return operatorTypes;
|
|
1823
|
+
}
|
|
1824
|
+
var getRequestedFields = (info) => {
|
|
1825
|
+
const selections = info.operation.selectionSet.selections[0].selectionSet.selections;
|
|
1826
|
+
const itemsSelection = selections.find((s) => s.name.value === "items");
|
|
1827
|
+
const fields = itemsSelection ? Object.keys(itemsSelection.selectionSet.selections.reduce((acc, field) => {
|
|
1828
|
+
acc[field.name.value] = true;
|
|
1829
|
+
return acc;
|
|
1830
|
+
}, {})) : Object.keys(selections.reduce((acc, field) => {
|
|
1831
|
+
acc[field.name.value] = true;
|
|
1832
|
+
return acc;
|
|
1833
|
+
}, {}));
|
|
1834
|
+
return fields.filter((field) => field !== "pageInfo" && field !== "items");
|
|
1835
|
+
};
|
|
1836
|
+
function createMutations(table) {
|
|
1837
|
+
const tableNamePlural = table.name.plural.toLowerCase();
|
|
1838
|
+
const tableNameSingular = table.name.singular.toLowerCase();
|
|
1839
|
+
return {
|
|
1840
|
+
[`${tableNamePlural}CreateOne`]: async (_, args, context, info) => {
|
|
1841
|
+
const { db: db2 } = context;
|
|
1842
|
+
const requestedFields = getRequestedFields(info);
|
|
1843
|
+
const results = await db2(tableNamePlural).insert({
|
|
1844
|
+
...args.input,
|
|
1845
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
1846
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1847
|
+
}).returning(requestedFields);
|
|
1848
|
+
console.log("requestedFields", requestedFields);
|
|
1849
|
+
return results[0];
|
|
1850
|
+
},
|
|
1851
|
+
[`${tableNamePlural}UpdateOne`]: async (_, args, context, info) => {
|
|
1852
|
+
const { db: db2 } = context;
|
|
1853
|
+
const { where, input } = args;
|
|
1854
|
+
await db2(tableNamePlural).where(where).update({
|
|
1855
|
+
...input,
|
|
1856
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1857
|
+
});
|
|
1858
|
+
const requestedFields = getRequestedFields(info);
|
|
1859
|
+
const result = await db2.from(tableNamePlural).select(requestedFields).where(where).first();
|
|
1860
|
+
return result;
|
|
1861
|
+
},
|
|
1862
|
+
[`${tableNamePlural}UpdateOneById`]: async (_, args, context, info) => {
|
|
1863
|
+
const { id, input } = args;
|
|
1864
|
+
const { db: db2 } = context;
|
|
1865
|
+
await db2(tableNamePlural).where({ id }).update({
|
|
1866
|
+
...input,
|
|
1867
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1868
|
+
});
|
|
1869
|
+
const requestedFields = getRequestedFields(info);
|
|
1870
|
+
const result = await db2.from(tableNamePlural).select(requestedFields).where({ id }).first();
|
|
1871
|
+
return result;
|
|
1872
|
+
},
|
|
1873
|
+
[`${tableNamePlural}RemoveOne`]: async (_, args, context, info) => {
|
|
1874
|
+
const { db: db2 } = context;
|
|
1875
|
+
const { where } = args;
|
|
1876
|
+
const requestedFields = getRequestedFields(info);
|
|
1877
|
+
const result = await db2.from(tableNamePlural).select(requestedFields).where(where).first();
|
|
1878
|
+
await db2(tableNamePlural).where(where).del();
|
|
1879
|
+
return result;
|
|
1880
|
+
},
|
|
1881
|
+
[`${tableNamePlural}RemoveOneById`]: async (_, args, context, info) => {
|
|
1882
|
+
const { id } = args;
|
|
1883
|
+
const { db: db2 } = context;
|
|
1884
|
+
const requestedFields = getRequestedFields(info);
|
|
1885
|
+
const result = await db2.from(tableNamePlural).select(requestedFields).where({ id }).first();
|
|
1886
|
+
await db2(tableNamePlural).where({ id }).del();
|
|
1887
|
+
return result;
|
|
1888
|
+
}
|
|
1889
|
+
};
|
|
1890
|
+
}
|
|
1891
|
+
function createQueries(table) {
|
|
1892
|
+
const tableNamePlural = table.name.plural.toLowerCase();
|
|
1893
|
+
const tableNameSingular = table.name.singular.toLowerCase();
|
|
1894
|
+
const applyFilters = (query, filters) => {
|
|
1895
|
+
filters.forEach((filter) => {
|
|
1896
|
+
Object.entries(filter).forEach(([fieldName, operators]) => {
|
|
1897
|
+
if (operators) {
|
|
1898
|
+
if (operators.eq !== void 0) {
|
|
1899
|
+
query = query.where(fieldName, operators.eq);
|
|
1900
|
+
}
|
|
1901
|
+
if (operators.ne !== void 0) {
|
|
1902
|
+
query = query.whereRaw(`?? IS DISTINCT FROM ?`, [fieldName, operators.ne]);
|
|
1903
|
+
}
|
|
1904
|
+
if (operators.in !== void 0) {
|
|
1905
|
+
query = query.whereIn(fieldName, operators.in);
|
|
1906
|
+
}
|
|
1907
|
+
if (operators.contains !== void 0) {
|
|
1908
|
+
query = query.where(fieldName, "like", `%${operators.contains}%`);
|
|
1909
|
+
}
|
|
1910
|
+
}
|
|
1911
|
+
});
|
|
1912
|
+
});
|
|
1913
|
+
return query;
|
|
1914
|
+
};
|
|
1915
|
+
const applySorting = (query, sort) => {
|
|
1916
|
+
if (sort) {
|
|
1917
|
+
query = query.orderBy(sort.field, sort.direction.toLowerCase());
|
|
1918
|
+
}
|
|
1919
|
+
return query;
|
|
1920
|
+
};
|
|
1921
|
+
return {
|
|
1922
|
+
[`${tableNameSingular}ById`]: async (_, args, context, info) => {
|
|
1923
|
+
const { db: db2 } = context;
|
|
1924
|
+
const requestedFields = getRequestedFields(info);
|
|
1925
|
+
const result = await db2.from(tableNamePlural).select(requestedFields).where({ id: args.id }).first();
|
|
1926
|
+
return result;
|
|
1927
|
+
},
|
|
1928
|
+
[`${tableNameSingular}One`]: async (_, args, context, info) => {
|
|
1929
|
+
const { filters = [], sort } = args;
|
|
1930
|
+
const { db: db2 } = context;
|
|
1931
|
+
const requestedFields = getRequestedFields(info);
|
|
1932
|
+
let query = db2.from(tableNamePlural).select(requestedFields);
|
|
1933
|
+
query = applyFilters(query, filters);
|
|
1934
|
+
query = applySorting(query, sort);
|
|
1935
|
+
const result = await query.first();
|
|
1936
|
+
return result;
|
|
1937
|
+
},
|
|
1938
|
+
[`${tableNamePlural}Pagination`]: async (_, args, context, info) => {
|
|
1939
|
+
const { limit = 10, page = 0, filters = [], sort } = args;
|
|
1940
|
+
const { db: db2 } = context;
|
|
1941
|
+
console.log("page", page);
|
|
1942
|
+
let baseQuery = db2(tableNamePlural);
|
|
1943
|
+
baseQuery = applyFilters(baseQuery, filters);
|
|
1944
|
+
const [{ count }] = await baseQuery.clone().count("* as count");
|
|
1945
|
+
const itemCount = Number(count);
|
|
1946
|
+
const pageCount = Math.ceil(itemCount / limit);
|
|
1947
|
+
const currentPage = page;
|
|
1948
|
+
const hasPreviousPage = currentPage > 1;
|
|
1949
|
+
const hasNextPage = currentPage < pageCount - 1;
|
|
1950
|
+
let dataQuery = baseQuery.clone();
|
|
1951
|
+
const requestedFields = getRequestedFields(info);
|
|
1952
|
+
dataQuery = applySorting(dataQuery, sort);
|
|
1953
|
+
if (page > 1) {
|
|
1954
|
+
dataQuery = dataQuery.offset((page - 1) * limit);
|
|
1955
|
+
}
|
|
1956
|
+
const items = await dataQuery.select(requestedFields).limit(limit);
|
|
1957
|
+
console.log("items", items);
|
|
1958
|
+
console.log("query", dataQuery.toQuery());
|
|
1959
|
+
return {
|
|
1960
|
+
pageInfo: {
|
|
1961
|
+
pageCount,
|
|
1962
|
+
itemCount,
|
|
1963
|
+
currentPage,
|
|
1964
|
+
hasPreviousPage,
|
|
1965
|
+
hasNextPage
|
|
1966
|
+
},
|
|
1967
|
+
items
|
|
1968
|
+
};
|
|
1969
|
+
}
|
|
1970
|
+
};
|
|
1971
|
+
}
|
|
1972
|
+
function createSDL(tables) {
|
|
1973
|
+
let typeDefs = `
|
|
1974
|
+
scalar JSON
|
|
1975
|
+
|
|
1976
|
+
type Query {
|
|
1977
|
+
`;
|
|
1978
|
+
let mutationDefs = `
|
|
1979
|
+
type Mutation {
|
|
1980
|
+
`;
|
|
1981
|
+
let modelDefs = "";
|
|
1982
|
+
const resolvers = { JSON: import_graphql_type_json.default, Query: {}, Mutation: {} };
|
|
1983
|
+
for (const table of tables) {
|
|
1984
|
+
const tableNamePlural = table.name.plural.toLowerCase();
|
|
1985
|
+
const tableNameSingular = table.name.singular.toLowerCase();
|
|
1986
|
+
const tableNameSingularUpperCaseFirst = table.name.singular.charAt(0).toUpperCase() + table.name.singular.slice(1);
|
|
1987
|
+
typeDefs += `
|
|
1988
|
+
${tableNameSingular}ById(id: ID!): ${tableNameSingular}
|
|
1989
|
+
${tableNamePlural}Pagination(limit: Int, page: Int, filters: [Filter${tableNameSingularUpperCaseFirst}], sort: SortBy): ${tableNameSingularUpperCaseFirst}PaginationResult
|
|
1990
|
+
${tableNameSingular}One(filters: [Filter${tableNameSingularUpperCaseFirst}], sort: SortBy): ${tableNameSingular}
|
|
1991
|
+
`;
|
|
1992
|
+
mutationDefs += `
|
|
1993
|
+
${tableNamePlural}CreateOne(input: ${tableNameSingular}Input!): ${tableNameSingular}
|
|
1994
|
+
${tableNamePlural}UpdateOne(where: JSON!, input: ${tableNameSingular}Input!): ${tableNameSingular}
|
|
1995
|
+
${tableNamePlural}UpdateOneById(id: ID!, input: ${tableNameSingular}Input!): ${tableNameSingular}
|
|
1996
|
+
${tableNamePlural}RemoveOne(where: JSON!): ${tableNameSingular}
|
|
1997
|
+
${tableNamePlural}RemoveOneById(id: ID!): ${tableNameSingular}
|
|
1998
|
+
`;
|
|
1999
|
+
modelDefs += createTypeDefs(table);
|
|
2000
|
+
modelDefs += createFilterTypeDefs(table);
|
|
2001
|
+
modelDefs += `
|
|
2002
|
+
type ${tableNameSingularUpperCaseFirst}PaginationResult {
|
|
2003
|
+
pageInfo: PageInfo!
|
|
2004
|
+
items: [${tableNameSingular}]!
|
|
2005
|
+
}
|
|
2006
|
+
|
|
2007
|
+
type PageInfo {
|
|
2008
|
+
pageCount: Int!
|
|
2009
|
+
itemCount: Int!
|
|
2010
|
+
currentPage: Int!
|
|
2011
|
+
hasPreviousPage: Boolean!
|
|
2012
|
+
hasNextPage: Boolean!
|
|
2013
|
+
}
|
|
2014
|
+
`;
|
|
2015
|
+
Object.assign(resolvers.Query, createQueries(table));
|
|
2016
|
+
Object.assign(resolvers.Mutation, createMutations(table));
|
|
2017
|
+
}
|
|
2018
|
+
typeDefs += "}\n";
|
|
2019
|
+
mutationDefs += "}\n";
|
|
2020
|
+
const fullSDL = typeDefs + mutationDefs + modelDefs;
|
|
2021
|
+
const schema = (0, import_schema.makeExecutableSchema)({
|
|
2022
|
+
typeDefs: fullSDL,
|
|
2023
|
+
resolvers
|
|
2024
|
+
});
|
|
2025
|
+
console.log("\n\u{1F4CA} GraphQL Schema Overview\n");
|
|
2026
|
+
const queriesTable = Object.keys(resolvers.Query).map((query) => ({
|
|
2027
|
+
"Operation Type": "Query",
|
|
2028
|
+
"Name": query,
|
|
2029
|
+
"Description": "Retrieves data"
|
|
2030
|
+
}));
|
|
2031
|
+
const mutationsTable = Object.keys(resolvers.Mutation).map((mutation) => ({
|
|
2032
|
+
"Operation Type": "Mutation",
|
|
2033
|
+
"Name": mutation,
|
|
2034
|
+
"Description": "Modifies data"
|
|
2035
|
+
}));
|
|
2036
|
+
const typesTable = tables.flatMap(
|
|
2037
|
+
(table) => table.fields.map((field) => ({
|
|
2038
|
+
"Type": table.name.singular,
|
|
2039
|
+
"Field": field.name,
|
|
2040
|
+
"Field Type": field.type,
|
|
2041
|
+
"Required": field.required ? "Yes" : "No"
|
|
2042
|
+
}))
|
|
2043
|
+
);
|
|
2044
|
+
console.log("\u{1F50D} Operations:");
|
|
2045
|
+
console.table([...queriesTable, ...mutationsTable]);
|
|
2046
|
+
console.log("\n\u{1F4DD} Types and Fields:");
|
|
2047
|
+
console.table(typesTable);
|
|
2048
|
+
console.log("\n");
|
|
2049
|
+
return schema;
|
|
2050
|
+
}
|
|
2051
|
+
|
|
2052
|
+
// src/registry/routes.ts
|
|
2053
|
+
var import_express5 = require("@as-integrations/express5");
|
|
2054
|
+
|
|
2055
|
+
// src/registry/uppy.ts
|
|
2056
|
+
var import_express2 = require("express");
|
|
2057
|
+
var import_jwt2 = require("next-auth/jwt");
|
|
2058
|
+
var bodyParser = require("body-parser");
|
|
2059
|
+
var createUppyRoutes = async (app) => {
|
|
2060
|
+
const {
|
|
2061
|
+
S3Client,
|
|
2062
|
+
AbortMultipartUploadCommand,
|
|
2063
|
+
CompleteMultipartUploadCommand,
|
|
2064
|
+
CreateMultipartUploadCommand,
|
|
2065
|
+
GetObjectCommand,
|
|
2066
|
+
ListPartsCommand,
|
|
2067
|
+
PutObjectCommand,
|
|
2068
|
+
UploadPartCommand,
|
|
2069
|
+
ListObjectsV2Command
|
|
2070
|
+
} = require("@aws-sdk/client-s3");
|
|
2071
|
+
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
|
|
2072
|
+
const {
|
|
2073
|
+
STSClient,
|
|
2074
|
+
GetFederationTokenCommand
|
|
2075
|
+
} = require("@aws-sdk/client-sts");
|
|
2076
|
+
const policy = {
|
|
2077
|
+
Version: "2012-10-17",
|
|
2078
|
+
Statement: [
|
|
2079
|
+
{
|
|
2080
|
+
Effect: "Allow",
|
|
2081
|
+
Action: [
|
|
2082
|
+
"s3:PutObject"
|
|
2083
|
+
],
|
|
2084
|
+
Resource: [
|
|
2085
|
+
`arn:aws:s3:::${process.env.COMPANION_S3_BUCKET}/*`,
|
|
2086
|
+
`arn:aws:s3:::${process.env.COMPANION_S3_BUCKET}`
|
|
2087
|
+
]
|
|
2088
|
+
}
|
|
2089
|
+
]
|
|
2090
|
+
};
|
|
2091
|
+
let s3Client;
|
|
2092
|
+
let stsClient;
|
|
2093
|
+
const expiresIn = 60 * 60 * 24 * 1;
|
|
2094
|
+
function getS3Client() {
|
|
2095
|
+
s3Client ??= new S3Client({
|
|
2096
|
+
region: process.env.COMPANION_S3_REGION,
|
|
2097
|
+
...process.env.COMPANION_S3_ENDPOINT && {
|
|
2098
|
+
forcePathStyle: true,
|
|
2099
|
+
endpoint: process.env.COMPANION_S3_ENDPOINT
|
|
2100
|
+
},
|
|
2101
|
+
credentials: {
|
|
2102
|
+
accessKeyId: process.env.COMPANION_S3_KEY,
|
|
2103
|
+
secretAccessKey: process.env.COMPANION_S3_SECRET
|
|
2104
|
+
}
|
|
2105
|
+
});
|
|
2106
|
+
return s3Client;
|
|
2107
|
+
}
|
|
2108
|
+
function getSTSClient() {
|
|
2109
|
+
stsClient ??= new STSClient({
|
|
2110
|
+
region: process.env.COMPANION_S3_REGION,
|
|
2111
|
+
...process.env.COMPANION_S3_ENDPOINT && { endpoint: process.env.COMPANION_S3_ENDPOINT },
|
|
2112
|
+
credentials: {
|
|
2113
|
+
accessKeyId: process.env.COMPANION_S3_KEY,
|
|
2114
|
+
secretAccessKey: process.env.COMPANION_S3_SECRET
|
|
2115
|
+
}
|
|
2116
|
+
});
|
|
2117
|
+
return stsClient;
|
|
2118
|
+
}
|
|
2119
|
+
app.use(bodyParser.urlencoded({ extended: true }), bodyParser.json());
|
|
2120
|
+
app.get("/", (req, res) => {
|
|
2121
|
+
res.json("Exulu upload server.");
|
|
2122
|
+
});
|
|
2123
|
+
app.get("/s3/list", async (req, res, next) => {
|
|
2124
|
+
const apikey = req.headers["exulu-api-key"] || null;
|
|
2125
|
+
let authtoken = null;
|
|
2126
|
+
if (typeof apikey !== "string") {
|
|
2127
|
+
const secret = process.env.NEXTAUTH_SECRET;
|
|
2128
|
+
authtoken = await (0, import_jwt2.getToken)({ req, secret });
|
|
2129
|
+
}
|
|
2130
|
+
const { db: db2 } = await postgresClient();
|
|
2131
|
+
const authenticationResult = await authentication({
|
|
2132
|
+
authtoken,
|
|
2133
|
+
apikey,
|
|
2134
|
+
db: db2
|
|
2135
|
+
});
|
|
2136
|
+
if (!authenticationResult.user?.id) {
|
|
2137
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2138
|
+
return;
|
|
2139
|
+
}
|
|
2140
|
+
const { prefix = "" } = req.query;
|
|
2141
|
+
if (typeof prefix !== "string") {
|
|
2142
|
+
res.status(400).json({ error: "Invalid prefix parameter. Must be a string." });
|
|
2143
|
+
return;
|
|
2144
|
+
}
|
|
2145
|
+
if (authenticationResult.user.type !== "api" && !prefix.includes(authenticationResult.user.id)) {
|
|
2146
|
+
res.status(405).json({ error: "Not allowed to list files in this folder based on authenticated user." });
|
|
2147
|
+
return;
|
|
2148
|
+
}
|
|
2149
|
+
try {
|
|
2150
|
+
const command = new ListObjectsV2Command({
|
|
2151
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2152
|
+
Prefix: prefix,
|
|
2153
|
+
MaxKeys: 1e3
|
|
2154
|
+
// Adjust this value based on your needs
|
|
2155
|
+
});
|
|
2156
|
+
const data = await getS3Client().send(command);
|
|
2157
|
+
const files = data.Contents?.map((item) => ({
|
|
2158
|
+
key: item.Key,
|
|
2159
|
+
size: item.Size,
|
|
2160
|
+
lastModified: item.LastModified
|
|
2161
|
+
})) || [];
|
|
2162
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2163
|
+
res.status(200).json({
|
|
2164
|
+
files,
|
|
2165
|
+
isTruncated: data.IsTruncated,
|
|
2166
|
+
nextContinuationToken: data.NextContinuationToken
|
|
2167
|
+
});
|
|
2168
|
+
} catch (err) {
|
|
2169
|
+
next(err);
|
|
2170
|
+
}
|
|
2171
|
+
});
|
|
2172
|
+
app.get("/s3/download", async (req, res, next) => {
|
|
2173
|
+
const apikey = req.headers["exulu-api-key"] || null;
|
|
2174
|
+
const internalkey = req.headers["internal-key"] || null;
|
|
2175
|
+
const { db: db2 } = await postgresClient();
|
|
2176
|
+
let authtoken = null;
|
|
2177
|
+
if (typeof apikey !== "string" && typeof internalkey !== "string") {
|
|
2178
|
+
const secret = process.env.NEXTAUTH_SECRET;
|
|
2179
|
+
authtoken = await (0, import_jwt2.getToken)({ req, secret });
|
|
2180
|
+
}
|
|
2181
|
+
const authenticationResult = await authentication({
|
|
2182
|
+
authtoken,
|
|
2183
|
+
apikey,
|
|
2184
|
+
internalkey,
|
|
2185
|
+
db: db2
|
|
2186
|
+
});
|
|
2187
|
+
if (!authenticationResult.user?.id) {
|
|
2188
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2189
|
+
return;
|
|
2190
|
+
}
|
|
2191
|
+
const { key } = req.query;
|
|
2192
|
+
if (typeof key !== "string" || key.trim() === "") {
|
|
2193
|
+
res.status(400).json({ error: "Missing or invalid `key` query parameter." });
|
|
2194
|
+
return;
|
|
2195
|
+
}
|
|
2196
|
+
if (authenticationResult.user.type !== "api" && !key.includes(authenticationResult.user.id)) {
|
|
2197
|
+
res.status(405).json({ error: "Not allowed to access the files in the folder based on authenticated user." });
|
|
2198
|
+
return;
|
|
2199
|
+
}
|
|
2200
|
+
try {
|
|
2201
|
+
const url = await getSignedUrl(
|
|
2202
|
+
getS3Client(),
|
|
2203
|
+
new GetObjectCommand({
|
|
2204
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2205
|
+
Key: key
|
|
2206
|
+
}),
|
|
2207
|
+
{ expiresIn }
|
|
2208
|
+
);
|
|
2209
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2210
|
+
res.json({ url, method: "GET", expiresIn });
|
|
2211
|
+
} catch (err) {
|
|
2212
|
+
next(err);
|
|
2213
|
+
}
|
|
2214
|
+
});
|
|
2215
|
+
app.get("/s3/sts", (req, res, next) => {
|
|
2216
|
+
getSTSClient().send(new GetFederationTokenCommand({
|
|
2217
|
+
Name: "Exulu",
|
|
2218
|
+
// The duration, in seconds, of the role session. The value specified
|
|
2219
|
+
// can range from 900 seconds (15 minutes) up to the maximum session
|
|
2220
|
+
// duration set for the role.
|
|
2221
|
+
DurationSeconds: expiresIn,
|
|
2222
|
+
Policy: JSON.stringify(policy)
|
|
2223
|
+
})).then((response) => {
|
|
2224
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2225
|
+
res.setHeader("Cache-Control", `public,max-age=${expiresIn}`);
|
|
2226
|
+
res.json({
|
|
2227
|
+
credentials: response.Credentials,
|
|
2228
|
+
bucket: process.env.COMPANION_S3_BUCKET,
|
|
2229
|
+
region: process.env.COMPANION_S3_REGION
|
|
2230
|
+
});
|
|
2231
|
+
}, next);
|
|
2232
|
+
});
|
|
2233
|
+
const validateFileParameters = (filename, contentType) => {
|
|
2234
|
+
if (!filename || !contentType) {
|
|
2235
|
+
throw new Error("Missing required parameters: filename and content type are required");
|
|
2236
|
+
}
|
|
2237
|
+
};
|
|
2238
|
+
const extractFileParameters = (req) => {
|
|
2239
|
+
const isPostRequest = req.method === "POST";
|
|
2240
|
+
const params = isPostRequest ? req.body : req.query;
|
|
2241
|
+
return {
|
|
2242
|
+
filename: params.filename,
|
|
2243
|
+
contentType: params.type
|
|
2244
|
+
};
|
|
2245
|
+
};
|
|
2246
|
+
const generateS3Key = (filename) => `${crypto.randomUUID()}-${filename}`;
|
|
2247
|
+
const signOnServer = async (req, res, next) => {
|
|
2248
|
+
const apikey = req.headers["exulu-api-key"] || null;
|
|
2249
|
+
const { db: db2 } = await postgresClient();
|
|
2250
|
+
let authtoken = null;
|
|
2251
|
+
if (typeof apikey !== "string") {
|
|
2252
|
+
const secret = process.env.NEXTAUTH_SECRET;
|
|
2253
|
+
authtoken = await (0, import_jwt2.getToken)({ req, secret });
|
|
2254
|
+
}
|
|
2255
|
+
const authenticationResult = await authentication({
|
|
2256
|
+
authtoken,
|
|
2257
|
+
apikey,
|
|
2258
|
+
db: db2
|
|
2259
|
+
});
|
|
2260
|
+
if (!authenticationResult.user?.id) {
|
|
2261
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2262
|
+
return;
|
|
2263
|
+
}
|
|
2264
|
+
const { filename, contentType } = extractFileParameters(req);
|
|
2265
|
+
validateFileParameters(filename, contentType);
|
|
2266
|
+
const key = generateS3Key(filename);
|
|
2267
|
+
let folder = "";
|
|
2268
|
+
if (authenticationResult.user.type === "api") {
|
|
2269
|
+
folder = `api/`;
|
|
2270
|
+
} else {
|
|
2271
|
+
folder = `${authenticationResult.user.id}/`;
|
|
2272
|
+
}
|
|
2273
|
+
getSignedUrl(
|
|
2274
|
+
getS3Client(),
|
|
2275
|
+
new PutObjectCommand({
|
|
2276
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2277
|
+
Key: folder + key,
|
|
2278
|
+
ContentType: contentType
|
|
2279
|
+
}),
|
|
2280
|
+
{ expiresIn }
|
|
2281
|
+
).then((url) => {
|
|
2282
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2283
|
+
res.json({
|
|
2284
|
+
url,
|
|
2285
|
+
method: "PUT"
|
|
2286
|
+
});
|
|
2287
|
+
res.end();
|
|
2288
|
+
}, next);
|
|
2289
|
+
};
|
|
2290
|
+
app.get("/s3/params", async (req, res, next) => {
|
|
2291
|
+
return await signOnServer(req, res, next);
|
|
2292
|
+
});
|
|
2293
|
+
app.post("/s3/sign", async (req, res, next) => {
|
|
2294
|
+
return await signOnServer(req, res, next);
|
|
2295
|
+
});
|
|
2296
|
+
app.post("/s3/multipart", async (req, res, next) => {
|
|
2297
|
+
const apikey = req.headers["exulu-api-key"] || null;
|
|
2298
|
+
const { db: db2 } = await postgresClient();
|
|
2299
|
+
let authtoken = null;
|
|
2300
|
+
if (typeof apikey !== "string") {
|
|
2301
|
+
const secret = process.env.NEXTAUTH_SECRET;
|
|
2302
|
+
authtoken = await (0, import_jwt2.getToken)({ req, secret });
|
|
2303
|
+
}
|
|
2304
|
+
const authenticationResult = await authentication({
|
|
2305
|
+
authtoken,
|
|
2306
|
+
apikey,
|
|
2307
|
+
db: db2
|
|
2308
|
+
});
|
|
2309
|
+
if (!authenticationResult.user?.id) {
|
|
2310
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2311
|
+
return;
|
|
2312
|
+
}
|
|
2313
|
+
const client = getS3Client();
|
|
2314
|
+
const { type, metadata, filename } = req.body;
|
|
2315
|
+
if (typeof filename !== "string") {
|
|
2316
|
+
return res.status(400).json({ error: "s3: content filename must be a string" });
|
|
2317
|
+
}
|
|
2318
|
+
if (typeof type !== "string") {
|
|
2319
|
+
return res.status(400).json({ error: "s3: content type must be a string" });
|
|
2320
|
+
}
|
|
2321
|
+
const key = `${crypto.randomUUID()}-${filename}`;
|
|
2322
|
+
let folder = "";
|
|
2323
|
+
if (authenticationResult.user.type === "api") {
|
|
2324
|
+
folder = `api/`;
|
|
2325
|
+
} else {
|
|
2326
|
+
folder = `${authenticationResult.user.id}/`;
|
|
2327
|
+
}
|
|
2328
|
+
const params = {
|
|
2329
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2330
|
+
Key: folder + key,
|
|
2331
|
+
ContentType: type,
|
|
2332
|
+
Metadata: metadata
|
|
2333
|
+
};
|
|
2334
|
+
const command = new CreateMultipartUploadCommand(params);
|
|
2335
|
+
return client.send(command, (err, data) => {
|
|
2336
|
+
if (err) {
|
|
2337
|
+
next(err);
|
|
2338
|
+
return;
|
|
2339
|
+
}
|
|
2340
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2341
|
+
res.json({
|
|
2342
|
+
key: data.Key,
|
|
2343
|
+
uploadId: data.UploadId
|
|
2344
|
+
});
|
|
2345
|
+
});
|
|
2346
|
+
});
|
|
2347
|
+
function validatePartNumber(partNumber) {
|
|
2348
|
+
partNumber = Number(partNumber);
|
|
2349
|
+
return Number.isInteger(partNumber) && partNumber >= 1 && partNumber <= 1e4;
|
|
2350
|
+
}
|
|
2351
|
+
app.get("/s3/multipart/:uploadId/:partNumber", (req, res, next) => {
|
|
2352
|
+
const { uploadId, partNumber } = req.params;
|
|
2353
|
+
const { key } = req.query;
|
|
2354
|
+
if (!validatePartNumber(partNumber)) {
|
|
2355
|
+
return res.status(400).json({ error: "s3: the part number must be an integer between 1 and 10000." });
|
|
2356
|
+
}
|
|
2357
|
+
if (typeof key !== "string") {
|
|
2358
|
+
return res.status(400).json({ error: 's3: the object key must be passed as a query parameter. For example: "?key=abc.jpg"' });
|
|
2359
|
+
}
|
|
2360
|
+
return getSignedUrl(getS3Client(), new UploadPartCommand({
|
|
2361
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2362
|
+
Key: key,
|
|
2363
|
+
UploadId: uploadId,
|
|
2364
|
+
PartNumber: partNumber,
|
|
2365
|
+
Body: ""
|
|
2366
|
+
}), { expiresIn }).then((url) => {
|
|
2367
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2368
|
+
res.json({ url, expires: expiresIn });
|
|
2369
|
+
}, next);
|
|
2370
|
+
});
|
|
2371
|
+
app.get("/s3/multipart/:uploadId", (req, res, next) => {
|
|
2372
|
+
const client = getS3Client();
|
|
2373
|
+
const { uploadId } = req.params;
|
|
2374
|
+
const { key } = req.query;
|
|
2375
|
+
if (typeof key !== "string") {
|
|
2376
|
+
res.status(400).json({ error: 's3: the object key must be passed as a query parameter. For example: "?key=abc.jpg"' });
|
|
2377
|
+
return;
|
|
2378
|
+
}
|
|
2379
|
+
const parts = [];
|
|
2380
|
+
function listPartsPage(startAt) {
|
|
2381
|
+
client.send(new ListPartsCommand({
|
|
2382
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2383
|
+
Key: key,
|
|
2384
|
+
UploadId: uploadId,
|
|
2385
|
+
PartNumberMarker: startAt
|
|
2386
|
+
}), (err, data) => {
|
|
2387
|
+
if (err) {
|
|
2388
|
+
next(err);
|
|
2389
|
+
return;
|
|
2390
|
+
}
|
|
2391
|
+
parts.push(...data.Parts);
|
|
2392
|
+
if (data.IsTruncated) {
|
|
2393
|
+
listPartsPage(data.NextPartNumberMarker);
|
|
2394
|
+
} else {
|
|
2395
|
+
res.json(parts);
|
|
2396
|
+
}
|
|
2397
|
+
});
|
|
2398
|
+
}
|
|
2399
|
+
listPartsPage(0);
|
|
2400
|
+
});
|
|
2401
|
+
function isValidPart(part) {
|
|
2402
|
+
return part && typeof part === "object" && Number(part.PartNumber) && typeof part.ETag === "string";
|
|
2403
|
+
}
|
|
2404
|
+
app.post("/s3/multipart/:uploadId/complete", (req, res, next) => {
|
|
2405
|
+
const client = getS3Client();
|
|
2406
|
+
const { uploadId } = req.params;
|
|
2407
|
+
const { key } = req.query;
|
|
2408
|
+
const { parts } = req.body;
|
|
2409
|
+
if (typeof key !== "string") {
|
|
2410
|
+
return res.status(400).json({ error: 's3: the object key must be passed as a query parameter. For example: "?key=abc.jpg"' });
|
|
2411
|
+
}
|
|
2412
|
+
if (!Array.isArray(parts) || !parts.every(isValidPart)) {
|
|
2413
|
+
return res.status(400).json({ error: "s3: `parts` must be an array of {ETag, PartNumber} objects." });
|
|
2414
|
+
}
|
|
2415
|
+
return client.send(new CompleteMultipartUploadCommand({
|
|
2416
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2417
|
+
Key: key,
|
|
2418
|
+
UploadId: uploadId,
|
|
2419
|
+
MultipartUpload: {
|
|
2420
|
+
Parts: parts
|
|
2421
|
+
}
|
|
2422
|
+
}), (err, data) => {
|
|
2423
|
+
if (err) {
|
|
2424
|
+
next(err);
|
|
2425
|
+
return;
|
|
2426
|
+
}
|
|
2427
|
+
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
2428
|
+
res.json({
|
|
2429
|
+
location: data.Location
|
|
2430
|
+
});
|
|
2431
|
+
});
|
|
2432
|
+
});
|
|
2433
|
+
app.delete("/s3/multipart/:uploadId", (req, res, next) => {
|
|
2434
|
+
const client = getS3Client();
|
|
2435
|
+
const { uploadId } = req.params;
|
|
2436
|
+
const { key } = req.query;
|
|
2437
|
+
if (typeof key !== "string") {
|
|
2438
|
+
return res.status(400).json({ error: 's3: the object key must be passed as a query parameter. For example: "?key=abc.jpg"' });
|
|
2439
|
+
}
|
|
2440
|
+
return client.send(new AbortMultipartUploadCommand({
|
|
2441
|
+
Bucket: process.env.COMPANION_S3_BUCKET,
|
|
2442
|
+
Key: key,
|
|
2443
|
+
UploadId: uploadId
|
|
2444
|
+
}), (err) => {
|
|
2445
|
+
if (err) {
|
|
2446
|
+
next(err);
|
|
2447
|
+
return;
|
|
2448
|
+
}
|
|
2449
|
+
res.json({});
|
|
2450
|
+
});
|
|
2451
|
+
});
|
|
2452
|
+
return app;
|
|
2453
|
+
};
|
|
2454
|
+
|
|
2455
|
+
// src/registry/routes.ts
|
|
2456
|
+
var Papa = require("papaparse");
|
|
2457
|
+
var global_queues = {
|
|
2458
|
+
logs_cleaner: "logs-cleaner"
|
|
2459
|
+
};
|
|
2460
|
+
var createRecurringJobs = async () => {
|
|
2461
|
+
const recurringJobSchedulersLogs = [];
|
|
2462
|
+
const queue = queues.use(global_queues.logs_cleaner);
|
|
2463
|
+
recurringJobSchedulersLogs.push({
|
|
2464
|
+
name: global_queues.logs_cleaner,
|
|
2465
|
+
pattern: "0 10 * * * *",
|
|
2466
|
+
ttld: "30 days",
|
|
2467
|
+
opts: {
|
|
2468
|
+
backoff: 3,
|
|
2469
|
+
attempts: 5,
|
|
2470
|
+
removeOnFail: 1e3
|
|
2471
|
+
}
|
|
2472
|
+
});
|
|
2473
|
+
await queue.upsertJobScheduler(
|
|
2474
|
+
"logs-cleaner-scheduler",
|
|
2475
|
+
{ pattern: "0 10 * * * *" },
|
|
2476
|
+
// every 10 minutes
|
|
2477
|
+
{
|
|
2478
|
+
name: global_queues.logs_cleaner,
|
|
2479
|
+
data: { ttld: 30 },
|
|
2480
|
+
// time to live in days
|
|
2481
|
+
opts: {
|
|
2482
|
+
backoff: 3,
|
|
2483
|
+
attempts: 5,
|
|
2484
|
+
removeOnFail: 1e3
|
|
2485
|
+
}
|
|
2486
|
+
}
|
|
2487
|
+
);
|
|
2488
|
+
console.log("Recurring job schedulers:");
|
|
2489
|
+
console.table(recurringJobSchedulersLogs);
|
|
2490
|
+
return queue;
|
|
2491
|
+
};
|
|
2492
|
+
var createExpressRoutes = async (app, agents, embedders, tools, workflows, contexts) => {
|
|
2493
|
+
const routeLogs = [];
|
|
2494
|
+
var corsOptions = {
|
|
2495
|
+
origin: "*",
|
|
2496
|
+
optionsSuccessStatus: 200
|
|
2497
|
+
// some legacy browsers (IE11, various SmartTVs) choke on 204
|
|
2498
|
+
};
|
|
2499
|
+
app.use((0, import_cors.default)(corsOptions));
|
|
2500
|
+
console.log(`
|
|
2501
|
+
\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557\u2588\u2588\u2557 \u2588\u2588\u2557\u2588\u2588\u2557 \u2588\u2588\u2557\u2588\u2588\u2557 \u2588\u2588\u2557 \u2588\u2588\u2557
|
|
2502
|
+
\u2588\u2588\u2554\u2550\u2550\u2550\u2550\u255D\u255A\u2588\u2588\u2557\u2588\u2588\u2554\u255D\u2588\u2588\u2551 \u2588\u2588\u2551\u2588\u2588\u2551 \u2588\u2588\u2551 \u2588\u2588\u2551
|
|
2503
|
+
\u2588\u2588\u2588\u2588\u2588\u2557 \u255A\u2588\u2588\u2588\u2554\u255D \u2588\u2588\u2551 \u2588\u2588\u2551\u2588\u2588\u2551 \u2588\u2588\u2551 \u2588\u2588\u2551
|
|
2504
|
+
\u2588\u2588\u2554\u2550\u2550\u255D \u2588\u2588\u2554\u2588\u2588\u2557 \u2588\u2588\u2551 \u2588\u2588\u2551\u2588\u2588\u2551 \u2588\u2588\u2551 \u2588\u2588\u2551
|
|
2505
|
+
\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557\u2588\u2588\u2554\u255D \u2588\u2588\u2557\u255A\u2588\u2588\u2588\u2588\u2588\u2588\u2554\u255D\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557\u255A\u2588\u2588\u2588\u2588\u2588\u2588\u2554\u255D
|
|
2506
|
+
\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u255D\u255A\u2550\u255D \u255A\u2550\u255D \u255A\u2550\u2550\u2550\u2550\u2550\u255D \u255A\u2550\u2550\u2550\u2550\u2550\u2550\u255D \u255A\u2550\u2550\u2550\u2550\u2550\u255D
|
|
2507
|
+
`);
|
|
2508
|
+
console.log("Agents:");
|
|
2509
|
+
console.table(agents.map((agent) => {
|
|
2510
|
+
return {
|
|
2511
|
+
id: agent.id,
|
|
2512
|
+
name: agent.name,
|
|
2513
|
+
description: agent.description,
|
|
2514
|
+
slug: "/agents/" + agent.id,
|
|
2515
|
+
active: true
|
|
2516
|
+
};
|
|
2517
|
+
}));
|
|
2518
|
+
console.log("Contexts:");
|
|
2519
|
+
console.table(contexts.map((context) => {
|
|
2520
|
+
const sources = context.sources.get();
|
|
2521
|
+
return {
|
|
2522
|
+
id: context.id,
|
|
2523
|
+
name: context.name,
|
|
2524
|
+
description: context.description,
|
|
2525
|
+
embedder: context.embedder.name,
|
|
2526
|
+
slug: "/contexts/" + context.id,
|
|
2527
|
+
active: context.active,
|
|
2528
|
+
sources: Array.isArray(sources) ? sources.length : 0,
|
|
2529
|
+
sources_details: Array.isArray(sources) ? sources.map((source) => `${source.name} (${source.id})`).join(", ") : "No sources"
|
|
2530
|
+
};
|
|
2531
|
+
}));
|
|
2532
|
+
routeLogs.push(
|
|
2533
|
+
{ route: "/agents", method: "GET", note: "List all agents" },
|
|
2534
|
+
{ route: "/agents/:id", method: "GET", note: "Get specific agent" },
|
|
2535
|
+
{ route: "/workflows", method: "GET", note: "List all workflows" },
|
|
2536
|
+
{ route: "/workflows/:id", method: "GET", note: "Get specific workflow" },
|
|
2537
|
+
{ route: "/contexts", method: "GET", note: "List all contexts" },
|
|
2538
|
+
{ route: "/contexts/:id", method: "GET", note: "Get specific context" },
|
|
2539
|
+
{ route: "/contexts/statistics", method: "GET", note: "Get context statistics" },
|
|
2540
|
+
{ route: "/tools", method: "GET", note: "List all tools" },
|
|
2541
|
+
{ route: "/tools/:id", method: "GET", note: "Get specific tool" },
|
|
2542
|
+
{ route: "/statistics/timeseries", method: "POST", note: "Get time series statistics" },
|
|
2543
|
+
{ route: "/statistics/totals", method: "POST", note: "Get totals statistics" },
|
|
2544
|
+
{ route: "/items/:context", method: "POST", note: "Create new item in context" },
|
|
2545
|
+
{ route: "/items/:context", method: "GET", note: "Get items from context" },
|
|
2546
|
+
{ route: "/items/export/:context", method: "GET", note: "Export items from context" },
|
|
2547
|
+
{ route: "/graphql", method: "POST", note: "GraphQL endpoint" }
|
|
2548
|
+
);
|
|
2549
|
+
await createRecurringJobs();
|
|
2550
|
+
const schema = createSDL([usersSchema, rolesSchema, agentsSchema, jobsSchema]);
|
|
2551
|
+
const server = new import_server3.ApolloServer({ schema, introspection: true });
|
|
2552
|
+
await server.start();
|
|
2553
|
+
app.use(
|
|
2554
|
+
"/graphql",
|
|
2555
|
+
(0, import_cors.default)(),
|
|
2556
|
+
import_express4.default.json(),
|
|
2557
|
+
(0, import_express5.expressMiddleware)(server, {
|
|
2558
|
+
context: async ({ req }) => {
|
|
2559
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2560
|
+
if (!authenticationResult.user?.id) {
|
|
2561
|
+
throw new Error(authenticationResult.message);
|
|
2562
|
+
}
|
|
2563
|
+
const { db: db2 } = await postgresClient();
|
|
2564
|
+
return {
|
|
2565
|
+
req,
|
|
2566
|
+
db: db2
|
|
2567
|
+
};
|
|
2568
|
+
}
|
|
2569
|
+
})
|
|
2570
|
+
);
|
|
2571
|
+
app.get(`/agents`, async (req, res) => {
|
|
2572
|
+
res.status(200).json(agents);
|
|
2573
|
+
});
|
|
2574
|
+
app.get(`/agents/:id`, async (req, res) => {
|
|
2575
|
+
const { db: db2 } = await postgresClient();
|
|
2576
|
+
const id = req.params.id;
|
|
2577
|
+
if (!id) {
|
|
2578
|
+
res.status(400).json({
|
|
2579
|
+
message: "Missing id in request."
|
|
2580
|
+
});
|
|
2581
|
+
return;
|
|
2582
|
+
}
|
|
2583
|
+
const agent = await db2.from("agents").where({ id }).first();
|
|
2584
|
+
if (!agent) {
|
|
2585
|
+
res.status(400).json({
|
|
2586
|
+
message: "Agent not found in database."
|
|
2587
|
+
});
|
|
2588
|
+
return;
|
|
2589
|
+
}
|
|
2590
|
+
console.log("[EXULU] agent", agent);
|
|
2591
|
+
const backend = agents.find((a) => a.id === agent.backend);
|
|
2592
|
+
res.status(200).json({
|
|
2593
|
+
...{
|
|
2594
|
+
name: agent.name,
|
|
2595
|
+
id: agent.id,
|
|
2596
|
+
description: agent.description,
|
|
2597
|
+
active: agent.active,
|
|
2598
|
+
public: agent.public,
|
|
2599
|
+
slug: backend?.slug,
|
|
2600
|
+
rateLimit: backend?.rateLimit,
|
|
2601
|
+
streaming: backend?.streaming,
|
|
2602
|
+
capabilities: backend?.capabilities,
|
|
2603
|
+
// todo add contexts
|
|
2604
|
+
availableTools: backend?.tools,
|
|
2605
|
+
enabledTools: agent.tools
|
|
2606
|
+
}
|
|
2607
|
+
});
|
|
2608
|
+
});
|
|
2609
|
+
console.log("tools", tools);
|
|
2610
|
+
app.get("/tools", async (req, res) => {
|
|
2611
|
+
res.status(200).json(tools.map((tool) => ({
|
|
2612
|
+
id: tool.id,
|
|
2613
|
+
name: tool.name,
|
|
2614
|
+
description: tool.description,
|
|
2615
|
+
type: tool.type || "tool",
|
|
2616
|
+
inputSchema: tool.inputSchema ? (0, import_zodex.zerialize)(tool.inputSchema) : null,
|
|
2617
|
+
outputSchema: tool.outputSchema ? (0, import_zodex.zerialize)(tool.outputSchema) : null
|
|
2618
|
+
})));
|
|
2619
|
+
});
|
|
2620
|
+
app.get("/tools/:id", async (req, res) => {
|
|
2621
|
+
const id = req.params.id;
|
|
2622
|
+
if (!id) {
|
|
2623
|
+
res.status(400).json({
|
|
2624
|
+
message: "Missing id in request."
|
|
2625
|
+
});
|
|
2626
|
+
return;
|
|
2627
|
+
}
|
|
2628
|
+
const tool = tools.find((tool2) => tool2.id === id);
|
|
2629
|
+
if (!tool) {
|
|
2630
|
+
res.status(400).json({
|
|
2631
|
+
message: "Tool not found."
|
|
2632
|
+
});
|
|
2633
|
+
return;
|
|
2634
|
+
}
|
|
2635
|
+
res.status(200).json(tool);
|
|
2636
|
+
});
|
|
2637
|
+
const deleteItem = async ({
|
|
2638
|
+
id,
|
|
2639
|
+
external_id,
|
|
2640
|
+
contextId
|
|
2641
|
+
}) => {
|
|
2642
|
+
if (!contextId) {
|
|
2643
|
+
throw new Error("Missing context in request.");
|
|
2644
|
+
}
|
|
2645
|
+
if (!id && !external_id) {
|
|
2646
|
+
throw new Error("Missing id or external_id in request.");
|
|
2647
|
+
}
|
|
2648
|
+
const { db: db2 } = await postgresClient();
|
|
2649
|
+
const context = contexts.find((context2) => context2.id === contextId);
|
|
2650
|
+
if (!context) {
|
|
2651
|
+
throw new Error("Context not found in registry.");
|
|
2652
|
+
}
|
|
2653
|
+
const exists = await context.tableExists();
|
|
2654
|
+
if (!exists) {
|
|
2655
|
+
throw new Error("Table with name " + context.getTableName() + " does not exist.");
|
|
2656
|
+
}
|
|
2657
|
+
const mutation = db2.from(context.getTableName()).delete().returning("id");
|
|
2658
|
+
if (id) {
|
|
2659
|
+
mutation.where({ id });
|
|
2660
|
+
}
|
|
2661
|
+
if (external_id) {
|
|
2662
|
+
mutation.where({ external_id });
|
|
2663
|
+
}
|
|
2664
|
+
const result = await mutation;
|
|
2665
|
+
return result;
|
|
2666
|
+
};
|
|
2667
|
+
app.delete("/items/:context/:id", async (req, res) => {
|
|
2668
|
+
if (!req.params.context) {
|
|
2669
|
+
res.status(400).json({
|
|
2670
|
+
message: "Missing context in request."
|
|
2671
|
+
});
|
|
2672
|
+
return;
|
|
2673
|
+
}
|
|
2674
|
+
const result = await deleteItem({
|
|
2675
|
+
id: req.params.id,
|
|
2676
|
+
contextId: req.params.context
|
|
2677
|
+
});
|
|
2678
|
+
res.status(200).json(result);
|
|
2679
|
+
});
|
|
2680
|
+
app.delete("/items/:context/external/:id", async (req, res) => {
|
|
2681
|
+
if (!req.params.context) {
|
|
2682
|
+
res.status(400).json({
|
|
2683
|
+
message: "Missing context in request."
|
|
2684
|
+
});
|
|
2685
|
+
return;
|
|
2686
|
+
}
|
|
2687
|
+
const result = await deleteItem({
|
|
2688
|
+
external_id: req.params.id,
|
|
2689
|
+
contextId: req.params.context
|
|
2690
|
+
});
|
|
2691
|
+
res.status(200).json(result);
|
|
2692
|
+
});
|
|
2693
|
+
app.get("/items/:context/:id", async (req, res) => {
|
|
2694
|
+
if (!req.params.context) {
|
|
2695
|
+
res.status(400).json({
|
|
2696
|
+
message: "Missing context in request."
|
|
2697
|
+
});
|
|
2698
|
+
return;
|
|
2699
|
+
}
|
|
2700
|
+
if (!req.params.id) {
|
|
2701
|
+
res.status(400).json({
|
|
2702
|
+
message: "Missing id in request."
|
|
2703
|
+
});
|
|
2704
|
+
return;
|
|
2705
|
+
}
|
|
2706
|
+
const { db: db2 } = await postgresClient();
|
|
2707
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2708
|
+
if (!context) {
|
|
2709
|
+
res.status(400).json({
|
|
2710
|
+
message: "Context not found in registry."
|
|
2711
|
+
});
|
|
2712
|
+
return;
|
|
2713
|
+
}
|
|
2714
|
+
const item = await db2.from(context.getTableName()).where({ id: req.params.id }).select("*").first();
|
|
2715
|
+
if (!item) {
|
|
2716
|
+
res.status(404).json({
|
|
2717
|
+
message: "Item not found."
|
|
2718
|
+
});
|
|
2719
|
+
return;
|
|
2720
|
+
}
|
|
2721
|
+
console.log("[EXULU] chunks table name.", context.getChunksTableName());
|
|
2722
|
+
const chunks = await db2.from(context.getChunksTableName()).where({ source: req.params.id }).select("id", "content", "source", "embedding", "chunk_index", "created_at", "updated_at");
|
|
2723
|
+
console.log("[EXULU] chunks", chunks);
|
|
2724
|
+
res.status(200).json({
|
|
2725
|
+
...item,
|
|
2726
|
+
chunks: chunks.map((chunk) => ({
|
|
2727
|
+
id: chunk.id,
|
|
2728
|
+
content: chunk.content,
|
|
2729
|
+
source: chunk.source,
|
|
2730
|
+
index: chunk.chunk_index,
|
|
2731
|
+
embedding: chunk.embedding?.length > 0 ? JSON.parse(chunk.embedding)?.length : null,
|
|
2732
|
+
createdAt: chunk.created_at,
|
|
2733
|
+
updatedAt: chunk.updated_at
|
|
2734
|
+
}))
|
|
2735
|
+
});
|
|
2736
|
+
});
|
|
2737
|
+
app.post("/items/:context/:id", async (req, res) => {
|
|
2738
|
+
if (!req.params.context) {
|
|
2739
|
+
res.status(400).json({
|
|
2740
|
+
message: "Missing context in request."
|
|
2741
|
+
});
|
|
2742
|
+
return;
|
|
2743
|
+
}
|
|
2744
|
+
if (!req.params.id) {
|
|
2745
|
+
res.status(400).json({
|
|
2746
|
+
message: "Missing id in request."
|
|
2747
|
+
});
|
|
2748
|
+
return;
|
|
2749
|
+
}
|
|
2750
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2751
|
+
if (!authenticationResult.user?.id) {
|
|
2752
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2753
|
+
return;
|
|
2754
|
+
}
|
|
2755
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2756
|
+
if (!context) {
|
|
2757
|
+
res.status(400).json({
|
|
2758
|
+
message: "Context not found in registry."
|
|
2759
|
+
});
|
|
2760
|
+
return;
|
|
2761
|
+
}
|
|
2762
|
+
const exists = await context.tableExists();
|
|
2763
|
+
if (!exists) {
|
|
2764
|
+
await context.createItemsTable();
|
|
2765
|
+
}
|
|
2766
|
+
const result = await context.updateItem(authenticationResult.user.id, req.params.id, req.body);
|
|
2767
|
+
res.status(200).json({
|
|
2768
|
+
message: "Item updated successfully.",
|
|
2769
|
+
id: result
|
|
2770
|
+
});
|
|
2771
|
+
});
|
|
2772
|
+
app.post("/items/:context", async (req, res) => {
|
|
2773
|
+
try {
|
|
2774
|
+
if (!req.params.context) {
|
|
2775
|
+
res.status(400).json({
|
|
2776
|
+
message: "Missing context in request."
|
|
2777
|
+
});
|
|
2778
|
+
return;
|
|
2779
|
+
}
|
|
2780
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2781
|
+
if (!authenticationResult.user?.id) {
|
|
2782
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2783
|
+
return;
|
|
2784
|
+
}
|
|
2785
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2786
|
+
if (!context) {
|
|
2787
|
+
res.status(400).json({
|
|
2788
|
+
message: "Context not found in registry."
|
|
2789
|
+
});
|
|
2790
|
+
return;
|
|
2791
|
+
}
|
|
2792
|
+
const exists = await context.tableExists();
|
|
2793
|
+
if (!exists) {
|
|
2794
|
+
await context.createItemsTable();
|
|
2795
|
+
}
|
|
2796
|
+
const result = await context.insertItem(authenticationResult.user.id, req.body, !!req.body.upsert);
|
|
2797
|
+
res.status(200).json({
|
|
2798
|
+
message: "Item created successfully.",
|
|
2799
|
+
id: result
|
|
2800
|
+
});
|
|
2801
|
+
} catch (error) {
|
|
2802
|
+
res.status(500).json({
|
|
2803
|
+
message: error?.message || "An error occurred while creating the item."
|
|
2804
|
+
});
|
|
2805
|
+
}
|
|
2806
|
+
});
|
|
2807
|
+
app.get("/items/:context", async (req, res) => {
|
|
2808
|
+
if (!req.params.context) {
|
|
2809
|
+
res.status(400).json({
|
|
2810
|
+
message: "Missing context in request."
|
|
2811
|
+
});
|
|
2812
|
+
return;
|
|
2813
|
+
}
|
|
2814
|
+
let limit = req.query.limit ? parseInt(req.query.limit) : 10;
|
|
2815
|
+
let page = req.query.page ? parseInt(req.query.page) : 1;
|
|
2816
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2817
|
+
if (!authenticationResult.user?.id) {
|
|
2818
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2819
|
+
return;
|
|
2820
|
+
}
|
|
2821
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2822
|
+
if (!context) {
|
|
2823
|
+
res.status(400).json({
|
|
2824
|
+
message: "Context not found in registry."
|
|
2825
|
+
});
|
|
2826
|
+
return;
|
|
2827
|
+
}
|
|
2828
|
+
const exists = await context.tableExists();
|
|
2829
|
+
if (!exists) {
|
|
2830
|
+
await context.createItemsTable();
|
|
2831
|
+
}
|
|
2832
|
+
if (req.query.method && !Object.values(VectorMethodEnum).includes(req.query.method)) {
|
|
2833
|
+
res.status(400).json({
|
|
2834
|
+
message: "Invalid vector lookup method, must be one of: " + Object.values(VectorMethodEnum).join(", ")
|
|
2835
|
+
});
|
|
2836
|
+
return;
|
|
2837
|
+
}
|
|
2838
|
+
const result = await context.getItems({
|
|
2839
|
+
page,
|
|
2840
|
+
limit,
|
|
2841
|
+
archived: req.query.archived === "true",
|
|
2842
|
+
name: typeof req.query.name === "string" ? req.query.name : void 0,
|
|
2843
|
+
method: req.query.method ? req.query.method : void 0,
|
|
2844
|
+
query: req.query.query ? req.query.query : void 0,
|
|
2845
|
+
statistics: {
|
|
2846
|
+
label: context.name,
|
|
2847
|
+
trigger: "api"
|
|
2848
|
+
}
|
|
2849
|
+
});
|
|
2850
|
+
res.status(200).json(result);
|
|
2851
|
+
});
|
|
2852
|
+
routeLogs.push({
|
|
2853
|
+
route: "/items/:context",
|
|
2854
|
+
method: "DELETE",
|
|
2855
|
+
note: `Delete all embeddings for a context.`
|
|
2856
|
+
});
|
|
2857
|
+
app.delete(`items/:context`, async (req, res) => {
|
|
2858
|
+
if (!req.params.context) {
|
|
2859
|
+
res.status(400).json({
|
|
2860
|
+
message: "Missing context in request."
|
|
2861
|
+
});
|
|
2862
|
+
return;
|
|
2863
|
+
}
|
|
2864
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2865
|
+
if (!context) {
|
|
2866
|
+
res.status(400).json({
|
|
2867
|
+
message: "Context not found in registry."
|
|
2868
|
+
});
|
|
2869
|
+
return;
|
|
2870
|
+
}
|
|
2871
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2872
|
+
if (!authenticationResult.user?.id) {
|
|
2873
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2874
|
+
return;
|
|
2875
|
+
}
|
|
2876
|
+
await context.deleteAll();
|
|
2877
|
+
res.status(200).json({
|
|
2878
|
+
message: "All embeddings deleted."
|
|
2879
|
+
});
|
|
2880
|
+
});
|
|
2881
|
+
routeLogs.push({
|
|
2882
|
+
route: `/items/:context/:id`,
|
|
2883
|
+
method: "DELETE",
|
|
2884
|
+
note: `Delete specific embedding for a context.`
|
|
2885
|
+
});
|
|
2886
|
+
app.delete(`items/:context/:id`, async (req, res) => {
|
|
2887
|
+
const id = req.params.id;
|
|
2888
|
+
if (!req.params.context) {
|
|
2889
|
+
res.status(400).json({
|
|
2890
|
+
message: "Missing context in request."
|
|
2891
|
+
});
|
|
2892
|
+
return;
|
|
2893
|
+
}
|
|
2894
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
2895
|
+
if (!context) {
|
|
2896
|
+
res.status(400).json({
|
|
2897
|
+
message: "Context not found in registry."
|
|
2898
|
+
});
|
|
2899
|
+
return;
|
|
2900
|
+
}
|
|
2901
|
+
if (!id) {
|
|
2902
|
+
res.status(400).json({
|
|
2903
|
+
message: "Missing id in request."
|
|
2904
|
+
});
|
|
2905
|
+
return;
|
|
2906
|
+
}
|
|
2907
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2908
|
+
if (!authenticationResult.user?.id) {
|
|
2909
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2910
|
+
return;
|
|
2911
|
+
}
|
|
2912
|
+
await context.deleteOne(id);
|
|
2913
|
+
res.status(200).json({
|
|
2914
|
+
message: "Embedding deleted."
|
|
2915
|
+
});
|
|
2916
|
+
});
|
|
2917
|
+
app.post("/statistics/timeseries", async (req, res) => {
|
|
2918
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2919
|
+
if (!authenticationResult.user?.id) {
|
|
2920
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2921
|
+
return;
|
|
2922
|
+
}
|
|
2923
|
+
const { db: db2 } = await postgresClient();
|
|
2924
|
+
const type = req.body.type;
|
|
2925
|
+
if (!Object.values(STATISTICS_TYPE_ENUM).includes(type)) {
|
|
2926
|
+
res.status(400).json({
|
|
2927
|
+
message: "Invalid type, must be one of: " + Object.values(STATISTICS_TYPE_ENUM).join(", ")
|
|
2928
|
+
});
|
|
2929
|
+
return;
|
|
2930
|
+
}
|
|
2931
|
+
let from = new Date(req.body.from);
|
|
2932
|
+
let to = new Date(req.body.to);
|
|
2933
|
+
if (!from || !to) {
|
|
2934
|
+
from = new Date(Date.now() - 7 * 24 * 60 * 60 * 1e3);
|
|
2935
|
+
to = /* @__PURE__ */ new Date();
|
|
2936
|
+
}
|
|
2937
|
+
const query = db2.from("statistics").select("*");
|
|
2938
|
+
query.where("name", "count");
|
|
2939
|
+
query.andWhere("type", type);
|
|
2940
|
+
query.andWhere("createdAt", ">=", from);
|
|
2941
|
+
query.andWhere("createdAt", "<=", to);
|
|
2942
|
+
const results = await query;
|
|
2943
|
+
const dates = [];
|
|
2944
|
+
for (let i = 0; i < (to.getTime() - from.getTime()) / (1e3 * 60 * 60 * 24); i++) {
|
|
2945
|
+
dates.push(new Date(from.getTime() + i * (1e3 * 60 * 60 * 24)));
|
|
2946
|
+
}
|
|
2947
|
+
const data = dates.map((date) => {
|
|
2948
|
+
const result = results.find((result2) => result2.date === date);
|
|
2949
|
+
if (result) {
|
|
2950
|
+
return result;
|
|
2951
|
+
}
|
|
2952
|
+
return {
|
|
2953
|
+
date,
|
|
2954
|
+
count: 0
|
|
2955
|
+
};
|
|
2956
|
+
});
|
|
2957
|
+
res.status(200).json({
|
|
2958
|
+
data,
|
|
2959
|
+
filter: {
|
|
2960
|
+
from,
|
|
2961
|
+
to
|
|
2962
|
+
}
|
|
2963
|
+
});
|
|
2964
|
+
});
|
|
2965
|
+
app.post("/statistics/totals", async (req, res) => {
|
|
2966
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2967
|
+
if (!authenticationResult.user?.id) {
|
|
2968
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2969
|
+
return;
|
|
2970
|
+
}
|
|
2971
|
+
const { db: db2 } = await postgresClient();
|
|
2972
|
+
let from = new Date(req.body.from);
|
|
2973
|
+
let to = new Date(req.body.to);
|
|
2974
|
+
if (!from || !to) {
|
|
2975
|
+
from = new Date(Date.now() - 7 * 24 * 60 * 60 * 1e3);
|
|
2976
|
+
to = /* @__PURE__ */ new Date();
|
|
2977
|
+
}
|
|
2978
|
+
let promises2 = Object.values(STATISTICS_TYPE_ENUM).map(async (type) => {
|
|
2979
|
+
const result = await db2.from("statistics").where("name", "count").andWhere("type", type).andWhere("createdAt", ">=", from).andWhere("createdAt", "<=", to).sum("total as total");
|
|
2980
|
+
return {
|
|
2981
|
+
[type]: result[0]?.total || 0
|
|
2982
|
+
};
|
|
2983
|
+
});
|
|
2984
|
+
const results = await Promise.all(promises2);
|
|
2985
|
+
res.status(200).json({
|
|
2986
|
+
data: { ...Object.assign({}, ...results) },
|
|
2987
|
+
filter: {
|
|
2988
|
+
from,
|
|
2989
|
+
to
|
|
2990
|
+
}
|
|
2991
|
+
});
|
|
2992
|
+
});
|
|
2993
|
+
app.get("/contexts/statistics", async (req, res) => {
|
|
2994
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
2995
|
+
if (!authenticationResult.user?.id) {
|
|
2996
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
2997
|
+
return;
|
|
2998
|
+
}
|
|
2999
|
+
const { db: db2 } = await postgresClient();
|
|
3000
|
+
const statistics = await db2("statistics").where("name", "count").andWhere("type", "context.retrieve").sum("total as total").first();
|
|
3001
|
+
const response = await db2("jobs").select(db2.raw(`to_char("createdAt", 'YYYY-MM-DD') as date`)).count("* as count").where("type", "embedder").groupByRaw(`to_char("createdAt", 'YYYY-MM-DD')`).then((rows) => ({
|
|
3002
|
+
jobs: rows
|
|
3003
|
+
}));
|
|
3004
|
+
console.log({ response });
|
|
3005
|
+
let jobs = [];
|
|
3006
|
+
if (response[0]) {
|
|
3007
|
+
jobs = response[0].jobs.map((job) => ({
|
|
3008
|
+
date: job.id,
|
|
3009
|
+
count: job.count
|
|
3010
|
+
}));
|
|
3011
|
+
}
|
|
3012
|
+
const embeddingsCountResult = await db2("jobs").where("type", "embedder").count("* as count").first();
|
|
3013
|
+
res.status(200).json({
|
|
3014
|
+
active: contexts.filter((context) => context.active).length,
|
|
3015
|
+
inactive: contexts.filter((context) => !context.active).length,
|
|
3016
|
+
sources: contexts.reduce((acc, context) => acc + context.sources.get().length, 0),
|
|
3017
|
+
queries: statistics?.total || 0,
|
|
3018
|
+
jobs,
|
|
3019
|
+
totals: {
|
|
3020
|
+
embeddings: embeddingsCountResult?.count || 0
|
|
3021
|
+
}
|
|
3022
|
+
});
|
|
3023
|
+
});
|
|
3024
|
+
app.get(`/contexts/:id`, async (req, res) => {
|
|
3025
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3026
|
+
if (!authenticationResult.user?.id) {
|
|
3027
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3028
|
+
return;
|
|
3029
|
+
}
|
|
3030
|
+
const id = req.params.id;
|
|
3031
|
+
if (!id) {
|
|
3032
|
+
res.status(400).json({
|
|
3033
|
+
message: "Missing id in request."
|
|
3034
|
+
});
|
|
3035
|
+
return;
|
|
3036
|
+
}
|
|
3037
|
+
const context = contexts.find((context2) => context2.id === id);
|
|
3038
|
+
if (!context) {
|
|
3039
|
+
res.status(400).json({
|
|
3040
|
+
message: "Context not found."
|
|
3041
|
+
});
|
|
3042
|
+
return;
|
|
3043
|
+
}
|
|
3044
|
+
res.status(200).json({
|
|
3045
|
+
...{
|
|
3046
|
+
id: context.id,
|
|
3047
|
+
name: context.name,
|
|
3048
|
+
description: context.description,
|
|
3049
|
+
embedder: context.embedder.name,
|
|
3050
|
+
slug: "/contexts/" + context.id,
|
|
3051
|
+
active: context.active,
|
|
3052
|
+
fields: context.fields,
|
|
3053
|
+
sources: context.sources.get().map((source) => ({
|
|
3054
|
+
id: source.id,
|
|
3055
|
+
name: source.name,
|
|
3056
|
+
description: source.description,
|
|
3057
|
+
updaters: source.updaters.map((updater) => ({
|
|
3058
|
+
id: updater.id,
|
|
3059
|
+
slug: updater.slug,
|
|
3060
|
+
type: updater.type,
|
|
3061
|
+
configuration: updater.configuration
|
|
3062
|
+
}))
|
|
3063
|
+
}))
|
|
3064
|
+
},
|
|
3065
|
+
agents: []
|
|
3066
|
+
// todo
|
|
3067
|
+
});
|
|
3068
|
+
});
|
|
3069
|
+
app.get(`/items/export/:context`, async (req, res) => {
|
|
3070
|
+
if (!req.params.context) {
|
|
3071
|
+
res.status(400).json({
|
|
3072
|
+
message: "Missing context in request."
|
|
3073
|
+
});
|
|
3074
|
+
return;
|
|
3075
|
+
}
|
|
3076
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3077
|
+
if (!authenticationResult.user?.id) {
|
|
3078
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3079
|
+
return;
|
|
3080
|
+
}
|
|
3081
|
+
const context = contexts.find((context2) => context2.id === req.params.context);
|
|
3082
|
+
if (!context) {
|
|
3083
|
+
res.status(400).json({
|
|
3084
|
+
message: "Context not found."
|
|
3085
|
+
});
|
|
3086
|
+
return;
|
|
3087
|
+
}
|
|
3088
|
+
const items = await context.getItems({
|
|
3089
|
+
page: 1,
|
|
3090
|
+
// todo add pagination
|
|
3091
|
+
limit: 500
|
|
3092
|
+
});
|
|
3093
|
+
const csv = Papa.unparse(items);
|
|
3094
|
+
const ISOTime = (/* @__PURE__ */ new Date()).toISOString();
|
|
3095
|
+
res.status(200).attachment(`${context.name}-items-export-${ISOTime}.csv`).send(csv);
|
|
3096
|
+
});
|
|
3097
|
+
app.get(`/contexts`, async (req, res) => {
|
|
3098
|
+
console.log("contexts!!");
|
|
3099
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3100
|
+
if (!authenticationResult.user?.id) {
|
|
3101
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3102
|
+
return;
|
|
3103
|
+
}
|
|
3104
|
+
console.log("contexts", contexts?.length);
|
|
3105
|
+
res.status(200).json(contexts.map((context) => ({
|
|
3106
|
+
id: context.id,
|
|
3107
|
+
name: context.name,
|
|
3108
|
+
description: context.description,
|
|
3109
|
+
embedder: context.embedder.name,
|
|
3110
|
+
slug: "/contexts/" + context.id,
|
|
3111
|
+
active: context.active,
|
|
3112
|
+
fields: context.fields,
|
|
3113
|
+
sources: context.sources.get().map((source) => ({
|
|
3114
|
+
id: source.id,
|
|
3115
|
+
name: source.name,
|
|
3116
|
+
description: source.description,
|
|
3117
|
+
updaters: source.updaters.map((updater) => ({
|
|
3118
|
+
id: updater.id,
|
|
3119
|
+
slug: updater.slug,
|
|
3120
|
+
type: updater.type,
|
|
3121
|
+
configuration: updater.configuration
|
|
3122
|
+
}))
|
|
3123
|
+
}))
|
|
3124
|
+
})));
|
|
3125
|
+
});
|
|
3126
|
+
app.get(`/workflows`, async (req, res) => {
|
|
3127
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3128
|
+
if (!authenticationResult.user?.id) {
|
|
3129
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3130
|
+
return;
|
|
3131
|
+
}
|
|
3132
|
+
res.status(200).json(workflows.map((workflow) => ({
|
|
3133
|
+
id: workflow.id,
|
|
3134
|
+
name: workflow.name,
|
|
3135
|
+
slug: workflow.slug,
|
|
3136
|
+
enable_batch: workflow.enable_batch,
|
|
3137
|
+
queue: workflow.queue?.name,
|
|
3138
|
+
inputSchema: workflow.inputSchema ? (0, import_zodex.zerialize)(workflow.inputSchema) : null
|
|
3139
|
+
})));
|
|
3140
|
+
});
|
|
3141
|
+
app.get(`/workflows/:id`, async (req, res) => {
|
|
3142
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3143
|
+
if (!authenticationResult.user?.id) {
|
|
3144
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3145
|
+
return;
|
|
3146
|
+
}
|
|
3147
|
+
const id = req.params.id;
|
|
3148
|
+
if (!id) {
|
|
3149
|
+
res.status(400).json({
|
|
3150
|
+
message: "Missing id in request."
|
|
3151
|
+
});
|
|
3152
|
+
return;
|
|
3153
|
+
}
|
|
3154
|
+
const workflow = workflows.find((workflow2) => workflow2.id === id);
|
|
3155
|
+
if (!workflow) {
|
|
3156
|
+
res.status(400).json({
|
|
3157
|
+
message: "Workflow not found."
|
|
3158
|
+
});
|
|
3159
|
+
return;
|
|
3160
|
+
}
|
|
3161
|
+
res.status(200).json({
|
|
3162
|
+
...workflow,
|
|
3163
|
+
queue: workflow.queue?.name,
|
|
3164
|
+
inputSchema: workflow.inputSchema ? (0, import_zodex.zerialize)(workflow.inputSchema) : null,
|
|
3165
|
+
workflow: void 0
|
|
3166
|
+
});
|
|
3167
|
+
});
|
|
3168
|
+
contexts.forEach((context) => {
|
|
3169
|
+
const sources = context.sources.get();
|
|
3170
|
+
if (!Array.isArray(sources)) {
|
|
3171
|
+
return;
|
|
3172
|
+
}
|
|
3173
|
+
sources.forEach((source) => {
|
|
3174
|
+
source.updaters.forEach((updater) => {
|
|
3175
|
+
if (!updater.slug) return;
|
|
3176
|
+
if (updater.type === "webhook" || updater.type === "manual") {
|
|
3177
|
+
routeLogs.push({
|
|
3178
|
+
route: `${updater.slug}/${updater.type}/:context`,
|
|
3179
|
+
method: "POST",
|
|
3180
|
+
note: `Webhook updater for ${context.name}`
|
|
3181
|
+
});
|
|
3182
|
+
app.post(`${updater.slug}/${updater.type}/:context`, async (req, res) => {
|
|
3183
|
+
const { context: id } = req.params;
|
|
3184
|
+
if (!id) {
|
|
3185
|
+
res.status(400).json({
|
|
3186
|
+
message: "Missing context id in request."
|
|
3187
|
+
});
|
|
3188
|
+
return;
|
|
3189
|
+
}
|
|
3190
|
+
const context2 = contexts.find((context3) => context3.id === id);
|
|
3191
|
+
if (!context2) {
|
|
3192
|
+
res.status(400).json({
|
|
3193
|
+
message: `Context for provided id: ${id} not found.`
|
|
3194
|
+
});
|
|
3195
|
+
return;
|
|
3196
|
+
}
|
|
3197
|
+
if (!context2.embedder.queue) {
|
|
3198
|
+
res.status(500).json({ detail: "No queue set for embedder." });
|
|
3199
|
+
return;
|
|
3200
|
+
}
|
|
3201
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3202
|
+
if (!authenticationResult.user?.id) {
|
|
3203
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3204
|
+
return;
|
|
3205
|
+
}
|
|
3206
|
+
const requestValidationResult = requestValidators.embedders(req, updater.configuration);
|
|
3207
|
+
if (requestValidationResult.error) {
|
|
3208
|
+
res.status(requestValidationResult.code || 500).json({ detail: `${requestValidationResult.message}` });
|
|
3209
|
+
return;
|
|
3210
|
+
}
|
|
3211
|
+
const documents = await updater.fn(req.body.configuration);
|
|
3212
|
+
const batches = [];
|
|
3213
|
+
for (let i = 0; i < documents.length; i += context2.embedder.batchSize) {
|
|
3214
|
+
batches.push(documents.slice(i, i + context2.embedder.batchSize));
|
|
3215
|
+
}
|
|
3216
|
+
let promises2 = [];
|
|
3217
|
+
if (batches.length > 0) {
|
|
3218
|
+
promises2 = batches.map((documents2) => {
|
|
3219
|
+
return bullmqDecorator({
|
|
3220
|
+
label: `Job running context '${context2.name}' with embedder '${context2.embedder.name}' for '${req.body.label}'`,
|
|
3221
|
+
type: "embedder",
|
|
3222
|
+
embedder: context2.embedder.id,
|
|
3223
|
+
updater: updater.id,
|
|
3224
|
+
context: context2.id,
|
|
3225
|
+
trigger: updater.type,
|
|
3226
|
+
source: source.id,
|
|
3227
|
+
inputs: req.body.inputs,
|
|
3228
|
+
...updater.configuration && { configuration: req.body.configuration },
|
|
3229
|
+
documents: documents2,
|
|
3230
|
+
queue: context2.embedder.queue,
|
|
3231
|
+
user: authenticationResult.user.id
|
|
3232
|
+
});
|
|
3233
|
+
});
|
|
3234
|
+
}
|
|
3235
|
+
const jobs = await Promise.all(promises2);
|
|
3236
|
+
res.status(200).json(jobs);
|
|
3237
|
+
return;
|
|
3238
|
+
});
|
|
3239
|
+
}
|
|
3240
|
+
});
|
|
3241
|
+
});
|
|
3242
|
+
});
|
|
3243
|
+
agents.forEach((agent) => {
|
|
3244
|
+
const slug = agent.slug;
|
|
3245
|
+
if (!slug) return;
|
|
3246
|
+
routeLogs.push({
|
|
3247
|
+
route: slug + "/:instance",
|
|
3248
|
+
method: "POST",
|
|
3249
|
+
note: `Agent endpoint for ${agent.id}`
|
|
3250
|
+
});
|
|
3251
|
+
app.post(slug + "/:instance", async (req, res) => {
|
|
3252
|
+
const instance = req.params.instance;
|
|
3253
|
+
if (!instance) {
|
|
3254
|
+
res.status(400).json({
|
|
3255
|
+
message: "Missing instance in request."
|
|
3256
|
+
});
|
|
3257
|
+
return;
|
|
3258
|
+
}
|
|
3259
|
+
const { db: db2 } = await postgresClient();
|
|
3260
|
+
const agentInstance = await db2.from("agents").where({
|
|
3261
|
+
id: instance
|
|
3262
|
+
}).first();
|
|
3263
|
+
if (!agentInstance) {
|
|
3264
|
+
res.status(400).json({
|
|
3265
|
+
message: "Agent instance not found."
|
|
3266
|
+
});
|
|
3267
|
+
return;
|
|
3268
|
+
}
|
|
3269
|
+
if (agent.rateLimit) {
|
|
3270
|
+
const limit = await rateLimiter(
|
|
3271
|
+
agent.rateLimit.name || agent.id,
|
|
3272
|
+
agent.rateLimit.rate_limit.time,
|
|
3273
|
+
agent.rateLimit.rate_limit.limit,
|
|
3274
|
+
1
|
|
3275
|
+
);
|
|
3276
|
+
if (!limit.status) {
|
|
3277
|
+
res.status(429).json({
|
|
3278
|
+
message: "Rate limit exceeded.",
|
|
3279
|
+
retryAfter: limit.retryAfter
|
|
3280
|
+
});
|
|
3281
|
+
return;
|
|
3282
|
+
}
|
|
3283
|
+
}
|
|
3284
|
+
const stream = req.headers["stream"] || false;
|
|
3285
|
+
const requestValidationResult = requestValidators.agents(req);
|
|
3286
|
+
if (requestValidationResult.error) {
|
|
3287
|
+
res.status(requestValidationResult.code || 500).json({ detail: `${requestValidationResult.message}` });
|
|
3288
|
+
return;
|
|
3289
|
+
}
|
|
3290
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3291
|
+
if (!authenticationResult.user?.id) {
|
|
3292
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3293
|
+
return;
|
|
3294
|
+
}
|
|
3295
|
+
if (!!stream) {
|
|
3296
|
+
const chatClient = await agent.chat(agentInstance.id);
|
|
3297
|
+
if (!chatClient) {
|
|
3298
|
+
res.status(500).json({
|
|
3299
|
+
message: "Agent instantiation not successful."
|
|
3300
|
+
});
|
|
3301
|
+
return;
|
|
3302
|
+
}
|
|
3303
|
+
const { textStream } = await chatClient.stream(req.body.messages, {
|
|
3304
|
+
threadId: `${req.body.threadId}`,
|
|
3305
|
+
// conversation id
|
|
3306
|
+
resourceId: `${req.body.resourceId}`,
|
|
3307
|
+
// user id
|
|
3308
|
+
...agent.outputSchema && { output: agent.outputSchema },
|
|
3309
|
+
maxRetries: 2,
|
|
3310
|
+
// todo make part of ExuluAgent class
|
|
3311
|
+
maxSteps: 5,
|
|
3312
|
+
// todo make part of ExuluAgent class
|
|
3313
|
+
onError: (error) => console.error("[EXULU] chat stream error.", error),
|
|
3314
|
+
onFinish: ({ response, usage }) => console.info(
|
|
3315
|
+
"[EXULU] chat stream finished.",
|
|
3316
|
+
usage
|
|
3317
|
+
)
|
|
3318
|
+
});
|
|
3319
|
+
for await (const delta of textStream) {
|
|
3320
|
+
res.write(`data: ${delta}
|
|
3321
|
+
|
|
3322
|
+
`);
|
|
3323
|
+
}
|
|
3324
|
+
res.end();
|
|
3325
|
+
return;
|
|
3326
|
+
} else {
|
|
3327
|
+
const response = await agent.chat.generate(req.body.messages, {
|
|
3328
|
+
resourceId: `${authenticationResult.user.id}`,
|
|
3329
|
+
output: agent.outputSchema,
|
|
3330
|
+
threadId: `${req.body.threadId}`,
|
|
3331
|
+
// conversation id
|
|
3332
|
+
maxRetries: 2,
|
|
3333
|
+
// todo make part of ExuluAgent class
|
|
3334
|
+
maxSteps: 5
|
|
3335
|
+
// todo make part of ExuluAgent class
|
|
3336
|
+
});
|
|
3337
|
+
res.status(200).json(response);
|
|
3338
|
+
return;
|
|
3339
|
+
}
|
|
3340
|
+
});
|
|
3341
|
+
});
|
|
3342
|
+
workflows.forEach((workflow) => {
|
|
3343
|
+
routeLogs.push({
|
|
3344
|
+
route: workflow.slug,
|
|
3345
|
+
method: "POST",
|
|
3346
|
+
note: `Execute workflow ${workflow.name}`
|
|
3347
|
+
});
|
|
3348
|
+
app.post(`${workflow.slug}`, async (req, res) => {
|
|
3349
|
+
if (!workflow.queue) {
|
|
3350
|
+
res.status(500).json({ detail: "No queue set for workflow." });
|
|
3351
|
+
return;
|
|
3352
|
+
}
|
|
3353
|
+
const authenticationResult = await requestValidators.authenticate(req);
|
|
3354
|
+
if (!authenticationResult.user?.id) {
|
|
3355
|
+
res.status(authenticationResult.code || 500).json({ detail: `${authenticationResult.message}` });
|
|
3356
|
+
return;
|
|
3357
|
+
}
|
|
3358
|
+
const requestValidationResult = requestValidators.workflows(req);
|
|
3359
|
+
if (requestValidationResult.error) {
|
|
3360
|
+
res.status(requestValidationResult.code || 500).json({ detail: `${requestValidationResult.message}` });
|
|
3361
|
+
return;
|
|
3362
|
+
}
|
|
3363
|
+
const inputs = await preprocessInputs(req.body.inputs);
|
|
3364
|
+
if (workflow.queue) {
|
|
3365
|
+
const job = await bullmqDecorator({
|
|
3366
|
+
label: `Job running '${workflow.name}' for '${req.body.label}'`,
|
|
3367
|
+
agent: req.body.agent,
|
|
3368
|
+
workflow: workflow.id,
|
|
3369
|
+
type: "workflow",
|
|
3370
|
+
inputs,
|
|
3371
|
+
session: req.body.session,
|
|
3372
|
+
queue: workflow.queue,
|
|
3373
|
+
user: authenticationResult.user.id
|
|
3374
|
+
});
|
|
3375
|
+
res.status(200).json({
|
|
3376
|
+
"job": {
|
|
3377
|
+
"status": "waiting",
|
|
3378
|
+
"name": job.name,
|
|
3379
|
+
"queue": workflow.queue.name,
|
|
3380
|
+
"redisId": job.redis,
|
|
3381
|
+
"jobId": job.id
|
|
3382
|
+
},
|
|
3383
|
+
"output": {}
|
|
3384
|
+
});
|
|
3385
|
+
return;
|
|
3386
|
+
}
|
|
3387
|
+
const { runId, start, watch } = workflow.workflow.createRun();
|
|
3388
|
+
console.log("[EXULU] running workflow with inputs.", inputs);
|
|
3389
|
+
const output = await start({
|
|
3390
|
+
triggerData: {
|
|
3391
|
+
...inputs,
|
|
3392
|
+
user: authenticationResult.user.id
|
|
3393
|
+
}
|
|
3394
|
+
});
|
|
3395
|
+
const failedSteps = Object.entries(output.results).filter(([_, step]) => step.status === "failed").map(([id, step]) => `${id}: ${step.error}`);
|
|
3396
|
+
if (failedSteps.length > 0) {
|
|
3397
|
+
const message = `Workflow has failed steps: ${failedSteps.join("\n - ")}`;
|
|
3398
|
+
throw new Error(message);
|
|
3399
|
+
}
|
|
3400
|
+
res.status(200).json({
|
|
3401
|
+
"job": {},
|
|
3402
|
+
"output": output
|
|
3403
|
+
});
|
|
3404
|
+
return;
|
|
3405
|
+
});
|
|
3406
|
+
});
|
|
3407
|
+
await createUppyRoutes(app);
|
|
3408
|
+
console.log("Routes:");
|
|
3409
|
+
console.table(routeLogs);
|
|
3410
|
+
};
|
|
3411
|
+
var preprocessInputs = async (data) => {
|
|
3412
|
+
for (const key in data) {
|
|
3413
|
+
if (key.includes("exulu_file_")) {
|
|
3414
|
+
const url = await getPresignedFileUrl(data[key]);
|
|
3415
|
+
const newKey = key.replace("exulu_file_", "");
|
|
3416
|
+
data[newKey] = url;
|
|
3417
|
+
delete data[key];
|
|
3418
|
+
} else if (Array.isArray(data[key])) {
|
|
3419
|
+
for (let i = 0; i < data[key].length; i++) {
|
|
3420
|
+
if (typeof data[key][i] === "object") {
|
|
3421
|
+
await preprocessInputs(data[key][i]);
|
|
3422
|
+
}
|
|
3423
|
+
}
|
|
3424
|
+
} else if (typeof data[key] === "object") {
|
|
3425
|
+
await preprocessInputs(data[key]);
|
|
3426
|
+
}
|
|
3427
|
+
}
|
|
3428
|
+
return data;
|
|
3429
|
+
};
|
|
3430
|
+
var getPresignedFileUrl = async (key) => {
|
|
3431
|
+
if (!process.env.NEXT_PUBLIC_UPLOAD_URL) {
|
|
3432
|
+
throw new Error("Missing process.env.NEXT_PUBLIC_UPLOAD_URL");
|
|
3433
|
+
}
|
|
3434
|
+
if (!process.env.INTERNAL_SECRET) {
|
|
3435
|
+
throw new Error("Missing process.env.NEXT_PUBLIC_UPLOAD_URL");
|
|
3436
|
+
}
|
|
3437
|
+
console.log(`[EXULU] fetching presigned url for file with key: ${key}`);
|
|
3438
|
+
let url = `${process.env.NEXT_PUBLIC_UPLOAD_URL}/s3/download?key=${key}`;
|
|
3439
|
+
const response = await fetch(url, {
|
|
3440
|
+
method: "GET",
|
|
3441
|
+
headers: {
|
|
3442
|
+
"Content-Type": "application/json",
|
|
3443
|
+
"Internal-Key": process.env.INTERNAL_SECRET
|
|
3444
|
+
}
|
|
3445
|
+
});
|
|
3446
|
+
const json = await response.json();
|
|
3447
|
+
if (!json.url) {
|
|
3448
|
+
throw new Error(`Could not generate presigned url for file with key: ${key}`);
|
|
3449
|
+
}
|
|
3450
|
+
console.log(`[EXULU] presigned url for file with key: ${key}, generated: ${json.url}`);
|
|
3451
|
+
return json.url;
|
|
3452
|
+
};
|
|
3453
|
+
|
|
3454
|
+
// src/registry/workers.ts
|
|
3455
|
+
var import_ioredis = __toESM(require("ioredis"), 1);
|
|
3456
|
+
var import_bullmq8 = require("bullmq");
|
|
3457
|
+
|
|
3458
|
+
// src/registry/utils.ts
|
|
3459
|
+
var import_bullmq7 = require("bullmq");
|
|
3460
|
+
var bullmq = {
|
|
3461
|
+
validate: (job) => {
|
|
3462
|
+
if (!job.data) {
|
|
3463
|
+
throw new Error(`Missing job data for job ${job.id}.`);
|
|
3464
|
+
}
|
|
3465
|
+
if (!job.data.type) {
|
|
3466
|
+
throw new Error(`Missing property "type" in data for job ${job.id}.`);
|
|
3467
|
+
}
|
|
3468
|
+
if (!job.data.inputs) {
|
|
3469
|
+
throw new Error(`Missing property "inputs" in data for job ${job.id}.`);
|
|
3470
|
+
}
|
|
3471
|
+
if (job.data.type !== "embedder" && job.data.type !== "workflow") {
|
|
3472
|
+
throw new Error(`Property "type" in data for job ${job.id} must be of value "embedder" or "workflow".`);
|
|
3473
|
+
}
|
|
3474
|
+
if (!job.data.workflow && !job.data.embedder) {
|
|
3475
|
+
throw new Error(`Property "backend" in data for job ${job.id} missing. Job data: ${JSON.stringify(job)}`);
|
|
3476
|
+
}
|
|
3477
|
+
},
|
|
3478
|
+
process: {
|
|
3479
|
+
workflow: async (job, workflow, logsDir) => {
|
|
3480
|
+
if (!workflow) {
|
|
3481
|
+
throw new Error(`Workflow function with id: ${job.data.backend} not found in registry.`);
|
|
3482
|
+
}
|
|
3483
|
+
const { runId, start, watch } = workflow.workflow.createRun();
|
|
3484
|
+
console.log("[EXULU] starting workflow with job inputs.", job.data.inputs);
|
|
3485
|
+
const logger = new ExuluLogger(job, logsDir);
|
|
3486
|
+
const output = await start({ triggerData: {
|
|
3487
|
+
...job.data.inputs,
|
|
3488
|
+
redis: job.id,
|
|
3489
|
+
logger
|
|
3490
|
+
} });
|
|
3491
|
+
const failedSteps = Object.entries(output.results).filter(([_, step]) => step.status === "failed").map(([id, step]) => `${id}: ${step.error}`);
|
|
3492
|
+
if (failedSteps.length > 0) {
|
|
3493
|
+
const message = `Workflow has failed steps: ${failedSteps.join("\n - ")}`;
|
|
3494
|
+
logger.write(message, "ERROR");
|
|
3495
|
+
throw new Error(message);
|
|
3496
|
+
}
|
|
3497
|
+
await logger.write(`Workflow completed. ${JSON.stringify(output.results)}`, "INFO");
|
|
3498
|
+
return output;
|
|
3499
|
+
}
|
|
3500
|
+
}
|
|
3501
|
+
};
|
|
3502
|
+
|
|
3503
|
+
// src/registry/workers.ts
|
|
3504
|
+
var fs2 = __toESM(require("fs"), 1);
|
|
3505
|
+
var import_path = __toESM(require("path"), 1);
|
|
3506
|
+
var defaultLogsDir = import_path.default.join(process.cwd(), "logs");
|
|
3507
|
+
var redisConnection = new import_ioredis.default({
|
|
3508
|
+
...redisServer,
|
|
3509
|
+
maxRetriesPerRequest: null
|
|
3510
|
+
});
|
|
3511
|
+
var createWorkers = async (queues2, contexts, embedders, workflows, _logsDir) => {
|
|
3512
|
+
const logsDir = _logsDir || defaultLogsDir;
|
|
3513
|
+
const workers = queues2.map((queue) => {
|
|
3514
|
+
console.log(`[EXULU] creating worker for queue ${queue}.`);
|
|
3515
|
+
const worker = new import_bullmq8.Worker(
|
|
3516
|
+
`${queue}`,
|
|
3517
|
+
async (job) => {
|
|
3518
|
+
const { db: db2 } = await postgresClient();
|
|
3519
|
+
try {
|
|
3520
|
+
bullmq.validate(job);
|
|
3521
|
+
if (job.data.type === "embedder") {
|
|
3522
|
+
if (!job.data.updater) {
|
|
3523
|
+
throw new Error("No updater set for embedder job.");
|
|
3524
|
+
}
|
|
3525
|
+
const context = contexts.find((context2) => context2.id === job.data.context);
|
|
3526
|
+
if (!context) {
|
|
3527
|
+
throw new Error(`Context ${job.data.context} not found in the registry.`);
|
|
3528
|
+
}
|
|
3529
|
+
if (!job.data.embedder) {
|
|
3530
|
+
throw new Error(`No embedder set for embedder job.`);
|
|
3531
|
+
}
|
|
3532
|
+
const embedder = embedders.find((embedder2) => embedder2.id === job.data.embedder);
|
|
3533
|
+
if (!embedder) {
|
|
3534
|
+
throw new Error(`Embedder ${job.data.embedder} not found in the registry.`);
|
|
3535
|
+
}
|
|
3536
|
+
if (!job.data.source) {
|
|
3537
|
+
throw new Error("No source set for embedder job.");
|
|
3538
|
+
}
|
|
3539
|
+
const source = context.sources.get(job.data.source);
|
|
3540
|
+
if (!source) {
|
|
3541
|
+
throw new Error(`Source ${job.data.source} not found in the registry.`);
|
|
3542
|
+
}
|
|
3543
|
+
if (!job.data.updater) {
|
|
3544
|
+
throw new Error("No updater set for embedder job.");
|
|
3545
|
+
}
|
|
3546
|
+
const updater = source.updaters.find((updater2) => updater2.id === job.data.updater);
|
|
3547
|
+
if (!updater) {
|
|
3548
|
+
throw new Error(`Updater ${job.data.updater} not found in the registry.`);
|
|
3549
|
+
}
|
|
3550
|
+
if (!job.data.documents) {
|
|
3551
|
+
throw new Error("No input documents set for embedder job.");
|
|
3552
|
+
}
|
|
3553
|
+
if (!Array.isArray(job.data.documents)) {
|
|
3554
|
+
throw new Error("Input documents must be an array.");
|
|
3555
|
+
}
|
|
3556
|
+
const result = await embedder.upsert(job.data.context, job.data.documents, {
|
|
3557
|
+
label: context.name,
|
|
3558
|
+
trigger: job.data.trigger || "unknown"
|
|
3559
|
+
});
|
|
3560
|
+
const mongoRecord = await db2.from("jobs").where({ redis: job.id }).first();
|
|
3561
|
+
if (!mongoRecord) {
|
|
3562
|
+
throw new Error("Job not found in the database.");
|
|
3563
|
+
}
|
|
3564
|
+
const finishedAt = /* @__PURE__ */ new Date();
|
|
3565
|
+
const duration = (finishedAt.getTime() - new Date(mongoRecord.createdAt).getTime()) / 1e3;
|
|
3566
|
+
await db2.from("jobs").where({ redis: job.id }).update({
|
|
3567
|
+
status: "completed",
|
|
3568
|
+
finishedAt,
|
|
3569
|
+
duration,
|
|
3570
|
+
result: JSON.stringify(result)
|
|
3571
|
+
});
|
|
3572
|
+
return result;
|
|
3573
|
+
}
|
|
3574
|
+
if (job.data.type === "workflow") {
|
|
3575
|
+
const workflow = workflows.find((workflow2) => workflow2.id === job.data.workflow);
|
|
3576
|
+
if (!workflow) {
|
|
3577
|
+
throw new Error(`Workflow ${job.data.workflow} not found in the registry.`);
|
|
3578
|
+
}
|
|
3579
|
+
const result = await bullmq.process.workflow(job, workflow, logsDir);
|
|
3580
|
+
const mongoRecord = await db2.from("jobs").where({ redis: job.id }).first();
|
|
3581
|
+
if (!mongoRecord) {
|
|
3582
|
+
throw new Error("Job not found in the database.");
|
|
3583
|
+
}
|
|
3584
|
+
const finishedAt = /* @__PURE__ */ new Date();
|
|
3585
|
+
const duration = (finishedAt.getTime() - new Date(mongoRecord.createdAt).getTime()) / 1e3;
|
|
3586
|
+
await db2.from("jobs").where({ redis: job.id }).update({
|
|
3587
|
+
status: "completed",
|
|
3588
|
+
finishedAt,
|
|
3589
|
+
duration,
|
|
3590
|
+
result: JSON.stringify(result)
|
|
3591
|
+
});
|
|
3592
|
+
return result;
|
|
3593
|
+
}
|
|
3594
|
+
} catch (error) {
|
|
3595
|
+
await db2.from("jobs").where({ redis: job.id }).update({
|
|
3596
|
+
status: "failed",
|
|
3597
|
+
finishedAt: /* @__PURE__ */ new Date(),
|
|
3598
|
+
error: error instanceof Error ? error.message : String(error)
|
|
3599
|
+
});
|
|
3600
|
+
throw new Error(error instanceof Error ? error.message : String(error));
|
|
3601
|
+
}
|
|
3602
|
+
},
|
|
3603
|
+
{ connection: redisConnection }
|
|
3604
|
+
);
|
|
3605
|
+
worker.on("completed", (job, returnvalue) => {
|
|
3606
|
+
console.log(`[EXULU] completed job ${job.id}.`, returnvalue);
|
|
3607
|
+
});
|
|
3608
|
+
worker.on("failed", (job, error, prev) => {
|
|
3609
|
+
if (job?.id) {
|
|
3610
|
+
console.error(`[EXULU] failed job ${job.id}.`);
|
|
3611
|
+
}
|
|
3612
|
+
console.error(`[EXULU] job error.`, error);
|
|
3613
|
+
});
|
|
3614
|
+
worker.on("progress", (job, progress) => {
|
|
3615
|
+
console.log(`[EXULU] job progress ${job.id}.`, progress);
|
|
3616
|
+
});
|
|
3617
|
+
return worker;
|
|
3618
|
+
});
|
|
3619
|
+
const logsCleaner = createLogsCleanerWorker(logsDir);
|
|
3620
|
+
workers.push(logsCleaner);
|
|
3621
|
+
return workers;
|
|
3622
|
+
};
|
|
3623
|
+
var createLogsCleanerWorker = (logsDir) => {
|
|
3624
|
+
const logsCleaner = new import_bullmq8.Worker(
|
|
3625
|
+
global_queues.logs_cleaner,
|
|
3626
|
+
async (job) => {
|
|
3627
|
+
console.log(`[EXULU] recurring job ${job.id}.`);
|
|
3628
|
+
const folder = fs2.readdirSync(logsDir);
|
|
3629
|
+
const files = folder.filter((file) => file.endsWith(".log"));
|
|
3630
|
+
const now = /* @__PURE__ */ new Date();
|
|
3631
|
+
const daysToKeep = job.data.ttld;
|
|
3632
|
+
const dateToKeep = new Date(now.getTime() - daysToKeep * 24 * 60 * 60 * 1e3);
|
|
3633
|
+
files.forEach((file) => {
|
|
3634
|
+
const filePath = import_path.default.join(logsDir, file);
|
|
3635
|
+
const fileStats = fs2.statSync(filePath);
|
|
3636
|
+
if (fileStats.mtime < dateToKeep) {
|
|
3637
|
+
fs2.unlinkSync(filePath);
|
|
3638
|
+
}
|
|
3639
|
+
});
|
|
3640
|
+
},
|
|
3641
|
+
{ connection: redisConnection }
|
|
3642
|
+
);
|
|
3643
|
+
logsCleaner.on("completed", (job, returnvalue) => {
|
|
3644
|
+
console.log(`[EXULU] completed logs cleaner ${job.id}.`, returnvalue);
|
|
3645
|
+
});
|
|
3646
|
+
logsCleaner.on("failed", (job, error, prev) => {
|
|
3647
|
+
if (job?.id) {
|
|
3648
|
+
console.error(`[EXULU] failed logs cleaner ${job.id}.`);
|
|
3649
|
+
}
|
|
3650
|
+
console.error(`[EXULU] job error logs cleaner.`, error);
|
|
3651
|
+
});
|
|
3652
|
+
return logsCleaner;
|
|
3653
|
+
};
|
|
3654
|
+
|
|
3655
|
+
// src/registry/index.ts
|
|
3656
|
+
var ExuluApp = class {
|
|
3657
|
+
_agents;
|
|
3658
|
+
_workflows;
|
|
3659
|
+
_config;
|
|
3660
|
+
_embedders;
|
|
3661
|
+
_queues = [];
|
|
3662
|
+
_contexts;
|
|
3663
|
+
_tools;
|
|
3664
|
+
constructor({ contexts, embedders, agents, workflows, config, tools }) {
|
|
3665
|
+
this._embedders = embedders ?? [];
|
|
3666
|
+
this._workflows = workflows ?? [];
|
|
3667
|
+
this._contexts = contexts ?? {};
|
|
3668
|
+
this._agents = agents ?? [];
|
|
3669
|
+
this._config = config;
|
|
3670
|
+
this._tools = tools ?? [];
|
|
3671
|
+
const queues2 = [
|
|
3672
|
+
...embedders?.length ? embedders.map((agent) => agent.queue?.name || null) : [],
|
|
3673
|
+
...workflows?.length ? workflows.map((workflow) => workflow.queue?.name || null) : []
|
|
3674
|
+
];
|
|
3675
|
+
this._queues = [...new Set(queues2.filter((o) => !!o))];
|
|
3676
|
+
}
|
|
3677
|
+
embedder(id) {
|
|
3678
|
+
return this._embedders.find((x) => x.id === id);
|
|
3679
|
+
}
|
|
3680
|
+
tool(id) {
|
|
3681
|
+
return this._tools.find((x) => x.id === id);
|
|
3682
|
+
}
|
|
3683
|
+
tools() {
|
|
3684
|
+
return this._tools;
|
|
3685
|
+
}
|
|
3686
|
+
context(id) {
|
|
3687
|
+
return Object.values(this._contexts ?? {}).find((x) => x.id === id);
|
|
3688
|
+
}
|
|
3689
|
+
agent(id) {
|
|
3690
|
+
return this._agents.find((x) => x.id === id);
|
|
3691
|
+
}
|
|
3692
|
+
workflow(id) {
|
|
3693
|
+
return this._workflows.find((x) => x.id === id);
|
|
3694
|
+
}
|
|
3695
|
+
get embedders() {
|
|
3696
|
+
return this._embedders;
|
|
3697
|
+
}
|
|
3698
|
+
get contexts() {
|
|
3699
|
+
return Object.values(this._contexts ?? {});
|
|
3700
|
+
}
|
|
3701
|
+
get workflows() {
|
|
3702
|
+
return this._workflows;
|
|
3703
|
+
}
|
|
3704
|
+
get agents() {
|
|
3705
|
+
return this._agents;
|
|
3706
|
+
}
|
|
3707
|
+
bullmq = {
|
|
3708
|
+
workers: {
|
|
3709
|
+
create: async () => {
|
|
3710
|
+
return await createWorkers(
|
|
3711
|
+
this._queues,
|
|
3712
|
+
Object.values(this._contexts ?? {}),
|
|
3713
|
+
this._embedders,
|
|
3714
|
+
this._workflows,
|
|
3715
|
+
this._config.workers?.logsDir
|
|
3716
|
+
);
|
|
3717
|
+
}
|
|
3718
|
+
}
|
|
3719
|
+
};
|
|
3720
|
+
server = {
|
|
3721
|
+
express: {
|
|
3722
|
+
init: async (app) => {
|
|
3723
|
+
return await createExpressRoutes(
|
|
3724
|
+
app,
|
|
3725
|
+
this._agents,
|
|
3726
|
+
this._embedders,
|
|
3727
|
+
this._tools,
|
|
3728
|
+
this._workflows,
|
|
3729
|
+
Object.values(this._contexts ?? {})
|
|
3730
|
+
);
|
|
3731
|
+
}
|
|
3732
|
+
}
|
|
3733
|
+
};
|
|
3734
|
+
};
|
|
3735
|
+
|
|
3736
|
+
// src/index.ts
|
|
3737
|
+
var ExuluJobs = {
|
|
3738
|
+
redis: redisClient,
|
|
3739
|
+
jobs: {
|
|
3740
|
+
validate: validateJob
|
|
3741
|
+
}
|
|
3742
|
+
};
|
|
3743
|
+
var ExuluDatabase = {
|
|
3744
|
+
init: async () => {
|
|
3745
|
+
await execute();
|
|
3746
|
+
}
|
|
3747
|
+
};
|
|
3748
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
3749
|
+
0 && (module.exports = {
|
|
3750
|
+
EXULU_STATISTICS_TYPE_ENUM,
|
|
3751
|
+
ExuluAgent,
|
|
3752
|
+
ExuluApp,
|
|
3753
|
+
ExuluAuthentication,
|
|
3754
|
+
ExuluContext,
|
|
3755
|
+
ExuluDatabase,
|
|
3756
|
+
ExuluEmbedder,
|
|
3757
|
+
ExuluJobs,
|
|
3758
|
+
ExuluQueues,
|
|
3759
|
+
ExuluSource,
|
|
3760
|
+
ExuluTool,
|
|
3761
|
+
ExuluWorkflow,
|
|
3762
|
+
ExuluZodFileType
|
|
3763
|
+
});
|