@props-labs/mesh-os 0.1.23 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/__fixtures__/mock_responses.d.ts +318 -0
- package/dist/core/__fixtures__/mock_responses.js +333 -0
- package/dist/core/__fixtures__/sample_embeddings.d.ts +33 -0
- package/dist/core/__fixtures__/sample_embeddings.js +12355 -0
- package/dist/core/agents.d.ts +51 -0
- package/dist/core/agents.js +170 -0
- package/dist/core/memories.d.ts +138 -0
- package/dist/core/memories.js +417 -0
- package/dist/core/workflows.d.ts +84 -25
- package/dist/core/workflows.js +224 -135
- package/package.json +3 -3
- package/src/templates/hasura/metadata/actions.yaml +6 -0
- package/src/templates/hasura/metadata/cron_triggers.yaml +1 -0
- package/src/templates/hasura/metadata/databases/databases.yaml +1 -1
- package/src/templates/hasura/metadata/databases/default/functions/functions.yaml +80 -0
- package/src/templates/hasura/metadata/databases/default/tables/tables.yaml +274 -9
- package/src/templates/hasura/metadata/query_collections.yaml +1 -0
- package/src/templates/hasura/metadata/rest_endpoints.yaml +1 -0
- package/src/templates/hasura/migrations/default/0_cleanup/down.sql +2 -0
- package/src/templates/hasura/migrations/default/0_cleanup/up.sql +59 -0
- package/src/templates/hasura/migrations/default/1_init/down.sql +27 -21
- package/src/templates/hasura/migrations/default/1_init/up.sql +446 -174
- package/src/templates/hasura/migrations/default/2_sample_data/down.sql +3 -0
- package/src/templates/hasura/migrations/default/2_sample_data/up.sql +288 -0
- package/src/templates/hasura/migrations/default/3_agent_relations/down.sql +76 -0
- package/src/templates/hasura/migrations/default/3_agent_relations/up.sql +469 -0
- package/src/templates/hasura/metadata/config.yaml +0 -1
- package/src/templates/hasura/metadata/databases/default/tables/public_agents.yaml +0 -14
- package/src/templates/hasura/metadata/databases/default/tables/public_memories.yaml +0 -23
- package/src/templates/hasura/metadata/databases/default/tables/public_memory_edges.yaml +0 -57
- package/src/templates/hasura/metadata/databases/default/tables/track_tables.yaml +0 -14
- package/src/templates/hasura/metadata/metadata.json +0 -80
- package/src/templates/hasura/migrations/default/2_metadata_filtering/down.sql +0 -4
- package/src/templates/hasura/migrations/default/2_metadata_filtering/up.sql +0 -44
- package/src/templates/hasura/migrations/default/3_memory_expiry/down.sql +0 -55
- package/src/templates/hasura/migrations/default/3_memory_expiry/up.sql +0 -108
- package/src/templates/hasura/migrations/default/4_remove_slug_validation/down.sql +0 -20
- package/src/templates/hasura/migrations/default/4_remove_slug_validation/up.sql +0 -5
- package/src/templates/hasura/migrations/default/5_entities/down.sql +0 -13
- package/src/templates/hasura/migrations/default/5_entities/up.sql +0 -155
package/dist/core/workflows.js
CHANGED
@@ -4,240 +4,329 @@ exports.WorkflowManager = void 0;
|
|
4
4
|
/**
|
5
5
|
* Workflow management functionality for MeshOS.
|
6
6
|
*/
|
7
|
-
const
|
7
|
+
const zod_1 = require("zod");
|
8
|
+
// Validation schemas
|
9
|
+
const workflowSchemaSchema = zod_1.z.object({
|
10
|
+
type: zod_1.z.string(),
|
11
|
+
input_schema: zod_1.z.record(zod_1.z.any()),
|
12
|
+
output_schema: zod_1.z.record(zod_1.z.any()),
|
13
|
+
metadata_schema: zod_1.z.record(zod_1.z.any()).nullable().optional(),
|
14
|
+
validation_rules: zod_1.z.record(zod_1.z.any()).nullable().optional(),
|
15
|
+
behaviors: zod_1.z.record(zod_1.z.any()).nullable().optional(),
|
16
|
+
created_at: zod_1.z.string(),
|
17
|
+
updated_at: zod_1.z.string()
|
18
|
+
});
|
19
|
+
const workflowRunSchema = zod_1.z.object({
|
20
|
+
id: zod_1.z.string().uuid(),
|
21
|
+
type: zod_1.z.string(),
|
22
|
+
status: zod_1.z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']),
|
23
|
+
input: zod_1.z.record(zod_1.z.any()),
|
24
|
+
metadata: zod_1.z.record(zod_1.z.any()),
|
25
|
+
created_at: zod_1.z.string(),
|
26
|
+
updated_at: zod_1.z.string(),
|
27
|
+
agent_id: zod_1.z.string().uuid().nullable().optional()
|
28
|
+
});
|
29
|
+
const workflowResultSchema = zod_1.z.object({
|
30
|
+
id: zod_1.z.string().uuid(),
|
31
|
+
workflow_id: zod_1.z.string().uuid(),
|
32
|
+
type: zod_1.z.enum(['interim', 'final']),
|
33
|
+
result: zod_1.z.record(zod_1.z.any()),
|
34
|
+
metadata: zod_1.z.record(zod_1.z.any()),
|
35
|
+
created_at: zod_1.z.string(),
|
36
|
+
updated_at: zod_1.z.string(),
|
37
|
+
agent_id: zod_1.z.string().uuid().nullable().optional()
|
38
|
+
});
|
8
39
|
class WorkflowManager {
|
9
40
|
constructor(url, headers) {
|
10
41
|
this.url = url;
|
11
42
|
this.headers = headers;
|
12
43
|
}
|
13
44
|
/**
|
14
|
-
* Execute a GraphQL query
|
45
|
+
* Execute a GraphQL query against Hasura
|
15
46
|
*/
|
16
47
|
async executeQuery(query, variables) {
|
17
48
|
const response = await fetch(this.url, {
|
18
49
|
method: 'POST',
|
19
|
-
headers:
|
50
|
+
headers: {
|
51
|
+
'Content-Type': 'application/json',
|
52
|
+
...this.headers
|
53
|
+
},
|
20
54
|
body: JSON.stringify({
|
21
55
|
query,
|
22
|
-
variables
|
56
|
+
variables
|
23
57
|
})
|
24
58
|
});
|
25
59
|
if (!response.ok) {
|
26
|
-
throw new Error(`
|
60
|
+
throw new Error(`Failed to execute query: ${response.statusText}`);
|
27
61
|
}
|
28
|
-
const result = await response.json();
|
62
|
+
const result = (await response.json());
|
29
63
|
if (result.errors) {
|
30
|
-
throw new Error(result.errors[0].message);
|
64
|
+
throw new Error(`GraphQL error: ${result.errors[0].message}`);
|
31
65
|
}
|
32
66
|
return result.data;
|
33
67
|
}
|
34
68
|
/**
|
35
|
-
*
|
69
|
+
* List available workflow schemas
|
36
70
|
*/
|
37
|
-
async
|
71
|
+
async listSchemas() {
|
38
72
|
const query = `
|
39
|
-
query
|
40
|
-
|
73
|
+
query ListWorkflowSchemas {
|
74
|
+
workflow_schemas {
|
75
|
+
type
|
76
|
+
input_schema
|
77
|
+
output_schema
|
78
|
+
metadata_schema
|
79
|
+
validation_rules
|
80
|
+
behaviors
|
81
|
+
created_at
|
82
|
+
updated_at
|
83
|
+
}
|
84
|
+
}
|
85
|
+
`;
|
86
|
+
const result = await this.executeQuery(query, {});
|
87
|
+
return result.workflow_schemas.map(schema => workflowSchemaSchema.parse(schema));
|
88
|
+
}
|
89
|
+
/**
|
90
|
+
* Get a workflow schema by type
|
91
|
+
*/
|
92
|
+
async getSchema(type) {
|
93
|
+
const query = `
|
94
|
+
query GetWorkflowSchema($type: String!) {
|
95
|
+
workflow_schemas_by_pk(type: $type) {
|
96
|
+
type
|
97
|
+
input_schema
|
98
|
+
output_schema
|
99
|
+
metadata_schema
|
100
|
+
validation_rules
|
101
|
+
behaviors
|
102
|
+
created_at
|
103
|
+
updated_at
|
104
|
+
}
|
105
|
+
}
|
106
|
+
`;
|
107
|
+
const result = await this.executeQuery(query, { type });
|
108
|
+
const schema = result.workflow_schemas_by_pk;
|
109
|
+
if (!schema) {
|
110
|
+
throw new Error(`Workflow schema not found for type: ${type}`);
|
111
|
+
}
|
112
|
+
return workflowSchemaSchema.parse(schema);
|
113
|
+
}
|
114
|
+
/**
|
115
|
+
* Create a new workflow run
|
116
|
+
*/
|
117
|
+
async createRun(input) {
|
118
|
+
// First get the workflow schema to validate input
|
119
|
+
const schema = await this.getSchema(input.type);
|
120
|
+
const query = `
|
121
|
+
mutation CreateWorkflowRun($run: workflow_runs_insert_input!) {
|
122
|
+
insert_workflow_runs_one(object: $run) {
|
41
123
|
id
|
42
|
-
job_id
|
43
124
|
type
|
44
125
|
status
|
45
|
-
|
126
|
+
input
|
46
127
|
metadata
|
47
128
|
created_at
|
48
129
|
updated_at
|
130
|
+
agent_id
|
49
131
|
}
|
50
132
|
}
|
51
133
|
`;
|
52
|
-
const result = await this.executeQuery(query, {
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
updatedAt: updated_at,
|
63
|
-
};
|
134
|
+
const result = await this.executeQuery(query, {
|
135
|
+
run: {
|
136
|
+
type: input.type,
|
137
|
+
input: input.input,
|
138
|
+
metadata: input.metadata || {},
|
139
|
+
status: 'pending',
|
140
|
+
agent_id: input.agent_id
|
141
|
+
}
|
142
|
+
});
|
143
|
+
return workflowRunSchema.parse(result.insert_workflow_runs_one);
|
64
144
|
}
|
65
145
|
/**
|
66
|
-
*
|
146
|
+
* Update a workflow run's status
|
67
147
|
*/
|
68
|
-
async
|
69
|
-
// Validate workflow data
|
70
|
-
const validatedData = taxonomy_1.workflowSchema.omit({ id: true, createdAt: true, updatedAt: true }).parse(workflow);
|
148
|
+
async updateStatus(input) {
|
71
149
|
const query = `
|
72
|
-
mutation
|
73
|
-
|
150
|
+
mutation UpdateWorkflowStatus($id: uuid!, $status: String!, $metadata: jsonb) {
|
151
|
+
update_workflow_runs_by_pk(
|
152
|
+
pk_columns: {id: $id},
|
153
|
+
_set: {
|
154
|
+
status: $status,
|
155
|
+
metadata: $metadata
|
156
|
+
}
|
157
|
+
) {
|
74
158
|
id
|
75
|
-
job_id
|
76
159
|
type
|
77
160
|
status
|
161
|
+
input
|
162
|
+
metadata
|
163
|
+
created_at
|
164
|
+
updated_at
|
165
|
+
agent_id
|
166
|
+
}
|
167
|
+
}
|
168
|
+
`;
|
169
|
+
const result = await this.executeQuery(query, {
|
170
|
+
id: input.id,
|
171
|
+
status: input.status,
|
172
|
+
metadata: input.metadata
|
173
|
+
});
|
174
|
+
return workflowRunSchema.parse(result.update_workflow_runs_by_pk);
|
175
|
+
}
|
176
|
+
/**
|
177
|
+
* Create a workflow result
|
178
|
+
*/
|
179
|
+
async createResult(input) {
|
180
|
+
const query = `
|
181
|
+
mutation CreateWorkflowResult($result: workflow_results_insert_input!) {
|
182
|
+
insert_workflow_results_one(object: $result) {
|
183
|
+
id
|
184
|
+
workflow_id
|
185
|
+
type
|
78
186
|
result
|
79
187
|
metadata
|
80
188
|
created_at
|
81
189
|
updated_at
|
190
|
+
agent_id
|
82
191
|
}
|
83
192
|
}
|
84
193
|
`;
|
85
194
|
const result = await this.executeQuery(query, {
|
86
|
-
|
87
|
-
|
88
|
-
type:
|
89
|
-
|
90
|
-
|
91
|
-
|
195
|
+
result: {
|
196
|
+
workflow_id: input.workflow_id,
|
197
|
+
type: input.type,
|
198
|
+
result: input.result,
|
199
|
+
metadata: input.metadata || {},
|
200
|
+
agent_id: input.agent_id
|
92
201
|
}
|
93
202
|
});
|
94
|
-
|
95
|
-
const { job_id, created_at, updated_at, ...rest } = result.insert_workflows_one;
|
96
|
-
return {
|
97
|
-
...rest,
|
98
|
-
jobId: job_id,
|
99
|
-
createdAt: created_at,
|
100
|
-
updatedAt: updated_at,
|
101
|
-
};
|
203
|
+
return workflowResultSchema.parse(result.insert_workflow_results_one);
|
102
204
|
}
|
103
205
|
/**
|
104
|
-
* Get a workflow by ID
|
206
|
+
* Get a workflow run by ID
|
105
207
|
*/
|
106
|
-
async
|
208
|
+
async getRun(id) {
|
107
209
|
const query = `
|
108
|
-
query
|
109
|
-
|
210
|
+
query GetWorkflowRun($id: uuid!) {
|
211
|
+
workflow_runs_by_pk(id: $id) {
|
110
212
|
id
|
111
|
-
job_id
|
112
213
|
type
|
113
214
|
status
|
114
|
-
|
215
|
+
input
|
115
216
|
metadata
|
116
217
|
created_at
|
117
218
|
updated_at
|
219
|
+
agent_id
|
118
220
|
}
|
119
221
|
}
|
120
222
|
`;
|
121
223
|
const result = await this.executeQuery(query, { id });
|
122
|
-
|
123
|
-
|
224
|
+
const run = result.workflow_runs_by_pk;
|
225
|
+
if (!run) {
|
226
|
+
throw new Error(`Workflow run not found with id: ${id}`);
|
124
227
|
}
|
125
|
-
|
126
|
-
const { job_id, created_at, updated_at, ...rest } = result.workflows_by_pk;
|
127
|
-
return {
|
128
|
-
...rest,
|
129
|
-
jobId: job_id,
|
130
|
-
createdAt: created_at,
|
131
|
-
updatedAt: updated_at,
|
132
|
-
};
|
228
|
+
return workflowRunSchema.parse(run);
|
133
229
|
}
|
134
230
|
/**
|
135
|
-
* Get
|
231
|
+
* Get all results for a workflow run
|
136
232
|
*/
|
137
|
-
async
|
138
|
-
const { where, orderBy = [{ column: 'createdAt', order: 'desc' }], limit, offset } = options;
|
139
|
-
// Convert orderBy to Hasura format
|
140
|
-
const hasuraOrderBy = orderBy.map(({ column, order }) => {
|
141
|
-
// Convert camelCase to snake_case for column names
|
142
|
-
const snakeCaseColumn = column.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
143
|
-
return { [snakeCaseColumn]: order };
|
144
|
-
});
|
233
|
+
async getResults(workflowId) {
|
145
234
|
const query = `
|
146
|
-
query
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
$offset: Int
|
151
|
-
) {
|
152
|
-
workflows(
|
153
|
-
where: $where,
|
154
|
-
order_by: $orderBy,
|
155
|
-
limit: $limit,
|
156
|
-
offset: $offset
|
235
|
+
query GetWorkflowResults($workflow_id: uuid!) {
|
236
|
+
workflow_results(
|
237
|
+
where: {workflow_id: {_eq: $workflow_id}}
|
238
|
+
order_by: {created_at: asc}
|
157
239
|
) {
|
158
240
|
id
|
159
|
-
|
241
|
+
workflow_id
|
160
242
|
type
|
161
|
-
status
|
162
243
|
result
|
163
244
|
metadata
|
164
245
|
created_at
|
165
246
|
updated_at
|
247
|
+
agent_id
|
166
248
|
}
|
167
249
|
}
|
168
250
|
`;
|
169
|
-
const result = await this.executeQuery(query, {
|
170
|
-
|
171
|
-
orderBy: hasuraOrderBy,
|
172
|
-
limit,
|
173
|
-
offset
|
174
|
-
});
|
175
|
-
return result.workflows.map(workflow => {
|
176
|
-
const { job_id, created_at, updated_at, ...rest } = workflow;
|
177
|
-
return {
|
178
|
-
...rest,
|
179
|
-
jobId: job_id,
|
180
|
-
createdAt: created_at,
|
181
|
-
updatedAt: updated_at,
|
182
|
-
};
|
183
|
-
});
|
251
|
+
const result = await this.executeQuery(query, { workflow_id: workflowId });
|
252
|
+
return result.workflow_results.map(r => workflowResultSchema.parse(r));
|
184
253
|
}
|
185
254
|
/**
|
186
|
-
*
|
255
|
+
* Get the final result for a workflow run
|
187
256
|
*/
|
188
|
-
async
|
257
|
+
async getFinalResult(workflowId) {
|
189
258
|
const query = `
|
190
|
-
|
191
|
-
|
259
|
+
query GetWorkflowFinalResult($workflow_id: uuid!) {
|
260
|
+
workflow_results(
|
261
|
+
where: {
|
262
|
+
workflow_id: {_eq: $workflow_id},
|
263
|
+
type: {_eq: "final"}
|
264
|
+
}
|
265
|
+
limit: 1
|
266
|
+
order_by: {created_at: desc}
|
267
|
+
) {
|
192
268
|
id
|
193
|
-
|
269
|
+
workflow_id
|
194
270
|
type
|
195
|
-
status
|
196
271
|
result
|
197
272
|
metadata
|
198
273
|
created_at
|
199
274
|
updated_at
|
275
|
+
agent_id
|
200
276
|
}
|
201
277
|
}
|
202
278
|
`;
|
203
|
-
|
204
|
-
|
205
|
-
if (updates.jobId !== undefined)
|
206
|
-
updateData.job_id = updates.jobId;
|
207
|
-
if (updates.type !== undefined)
|
208
|
-
updateData.type = updates.type;
|
209
|
-
if (updates.status !== undefined)
|
210
|
-
updateData.status = updates.status;
|
211
|
-
if (updates.result !== undefined)
|
212
|
-
updateData.result = updates.result;
|
213
|
-
if (updates.metadata !== undefined)
|
214
|
-
updateData.metadata = updates.metadata;
|
215
|
-
const result = await this.executeQuery(query, {
|
216
|
-
id,
|
217
|
-
updates: updateData
|
218
|
-
});
|
219
|
-
// Convert snake_case to camelCase
|
220
|
-
const { job_id, created_at, updated_at, ...rest } = result.update_workflows_by_pk;
|
221
|
-
return {
|
222
|
-
...rest,
|
223
|
-
jobId: job_id,
|
224
|
-
createdAt: created_at,
|
225
|
-
updatedAt: updated_at,
|
226
|
-
};
|
279
|
+
const result = await this.executeQuery(query, { workflow_id: workflowId });
|
280
|
+
return result.workflow_results[0] ? workflowResultSchema.parse(result.workflow_results[0]) : null;
|
227
281
|
}
|
228
282
|
/**
|
229
|
-
*
|
283
|
+
* List workflow runs with optional filtering and sorting
|
230
284
|
*/
|
231
|
-
async
|
285
|
+
async listRuns(options = {}) {
|
286
|
+
const { type, status, agent_id, limit = 10, offset = 0, order_by = [{ field: 'created_at', direction: 'desc' }] } = options;
|
287
|
+
// Build where clause
|
288
|
+
const where = {};
|
289
|
+
if (type)
|
290
|
+
where.type = { _eq: type };
|
291
|
+
if (status)
|
292
|
+
where.status = { _eq: status };
|
293
|
+
if (agent_id)
|
294
|
+
where.agent_id = { _eq: agent_id };
|
295
|
+
// Convert order_by to Hasura format
|
296
|
+
const orderBy = order_by.map(({ field, direction }) => ({
|
297
|
+
[field]: direction
|
298
|
+
}));
|
232
299
|
const query = `
|
233
|
-
|
234
|
-
|
300
|
+
query ListWorkflowRuns(
|
301
|
+
$where: workflow_runs_bool_exp!,
|
302
|
+
$limit: Int!,
|
303
|
+
$offset: Int!,
|
304
|
+
$order_by: [workflow_runs_order_by!]!
|
305
|
+
) {
|
306
|
+
workflow_runs(
|
307
|
+
where: $where,
|
308
|
+
limit: $limit,
|
309
|
+
offset: $offset,
|
310
|
+
order_by: $order_by
|
311
|
+
) {
|
235
312
|
id
|
313
|
+
type
|
314
|
+
status
|
315
|
+
input
|
316
|
+
metadata
|
317
|
+
created_at
|
318
|
+
updated_at
|
319
|
+
agent_id
|
236
320
|
}
|
237
321
|
}
|
238
322
|
`;
|
239
|
-
const result = await this.executeQuery(query, {
|
240
|
-
|
323
|
+
const result = await this.executeQuery(query, {
|
324
|
+
where,
|
325
|
+
limit,
|
326
|
+
offset,
|
327
|
+
order_by: orderBy
|
328
|
+
});
|
329
|
+
return result.workflow_runs.map(run => workflowRunSchema.parse(run));
|
241
330
|
}
|
242
331
|
}
|
243
332
|
exports.WorkflowManager = WorkflowManager;
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@props-labs/mesh-os",
|
3
|
-
"version": "0.
|
3
|
+
"version": "0.2.0",
|
4
4
|
"description": "MeshOS - A memory system for AI agents",
|
5
5
|
"main": "dist/index.js",
|
6
6
|
"types": "dist/index.d.ts",
|
@@ -48,7 +48,7 @@
|
|
48
48
|
"build": "tsc",
|
49
49
|
"start": "ts-node src/main.ts",
|
50
50
|
"lint": "eslint . --ext .ts",
|
51
|
-
"test": "vitest
|
52
|
-
"test:watch": "vitest"
|
51
|
+
"test": "node --max_old_space_size=4096 node_modules/vitest/vitest.mjs",
|
52
|
+
"test:watch": "node --max_old_space_size=4096 node_modules/vitest/vitest.mjs --watch"
|
53
53
|
}
|
54
54
|
}
|
@@ -0,0 +1 @@
|
|
1
|
+
[]
|
@@ -0,0 +1,80 @@
|
|
1
|
+
- function:
|
2
|
+
schema: public
|
3
|
+
name: get_connected_memories
|
4
|
+
configuration:
|
5
|
+
custom_name: getConnectedMemories
|
6
|
+
exposed: true
|
7
|
+
arguments:
|
8
|
+
- name: memory_id
|
9
|
+
type: uuid!
|
10
|
+
- name: relationship_type
|
11
|
+
type: String
|
12
|
+
- name: max_depth
|
13
|
+
type: Int
|
14
|
+
|
15
|
+
- function:
|
16
|
+
schema: public
|
17
|
+
name: inspect_memory_embeddings
|
18
|
+
configuration:
|
19
|
+
custom_name: inspectMemoryEmbeddings
|
20
|
+
exposed: true
|
21
|
+
column_config:
|
22
|
+
memory_id:
|
23
|
+
custom_name: memoryId
|
24
|
+
embedding_norm:
|
25
|
+
custom_name: embeddingNorm
|
26
|
+
is_normalized:
|
27
|
+
custom_name: isNormalized
|
28
|
+
|
29
|
+
- function:
|
30
|
+
schema: public
|
31
|
+
name: search_memory_chunks
|
32
|
+
configuration:
|
33
|
+
custom_name: searchMemoryChunks
|
34
|
+
exposed: true
|
35
|
+
arguments:
|
36
|
+
- name: query_embedding
|
37
|
+
type: vector!
|
38
|
+
- name: match_threshold
|
39
|
+
type: Float!
|
40
|
+
- name: match_count
|
41
|
+
type: Int!
|
42
|
+
- name: filter_agent_id
|
43
|
+
type: uuid
|
44
|
+
- name: memory_metadata_filter
|
45
|
+
type: jsonb
|
46
|
+
- name: chunk_metadata_filter
|
47
|
+
type: jsonb
|
48
|
+
- name: created_at_filter
|
49
|
+
type: jsonb
|
50
|
+
column_config:
|
51
|
+
chunk_id:
|
52
|
+
custom_name: chunkId
|
53
|
+
memory_id:
|
54
|
+
custom_name: memoryId
|
55
|
+
chunk_index:
|
56
|
+
custom_name: chunkIndex
|
57
|
+
chunk_content:
|
58
|
+
custom_name: chunkContent
|
59
|
+
chunk_metadata:
|
60
|
+
custom_name: chunkMetadata
|
61
|
+
chunk_created_at:
|
62
|
+
custom_name: chunkCreatedAt
|
63
|
+
chunk_updated_at:
|
64
|
+
custom_name: chunkUpdatedAt
|
65
|
+
memory_content:
|
66
|
+
custom_name: memoryContent
|
67
|
+
memory_type:
|
68
|
+
custom_name: memoryType
|
69
|
+
memory_status:
|
70
|
+
custom_name: memoryStatus
|
71
|
+
memory_metadata:
|
72
|
+
custom_name: memoryMetadata
|
73
|
+
memory_created_at:
|
74
|
+
custom_name: memoryCreatedAt
|
75
|
+
memory_updated_at:
|
76
|
+
custom_name: memoryUpdatedAt
|
77
|
+
agent_id:
|
78
|
+
custom_name: agentId
|
79
|
+
similarity:
|
80
|
+
custom_name: similarity
|