@props-labs/mesh-os 0.1.23 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/core/__fixtures__/mock_responses.d.ts +337 -0
  2. package/dist/core/__fixtures__/mock_responses.js +355 -0
  3. package/dist/core/__fixtures__/sample_embeddings.d.ts +33 -0
  4. package/dist/core/__fixtures__/sample_embeddings.js +12355 -0
  5. package/dist/core/agents.d.ts +51 -0
  6. package/dist/core/agents.js +170 -0
  7. package/dist/core/client.d.ts +5 -7
  8. package/dist/core/client.js +5 -16
  9. package/dist/core/memories.d.ts +138 -0
  10. package/dist/core/memories.js +417 -0
  11. package/dist/core/workflows.d.ts +88 -25
  12. package/dist/core/workflows.js +247 -133
  13. package/package.json +3 -3
  14. package/src/templates/hasura/metadata/actions.yaml +6 -0
  15. package/src/templates/hasura/metadata/cron_triggers.yaml +1 -0
  16. package/src/templates/hasura/metadata/databases/databases.yaml +1 -1
  17. package/src/templates/hasura/metadata/databases/default/functions/functions.yaml +80 -0
  18. package/src/templates/hasura/metadata/databases/default/tables/tables.yaml +274 -9
  19. package/src/templates/hasura/metadata/query_collections.yaml +1 -0
  20. package/src/templates/hasura/metadata/rest_endpoints.yaml +1 -0
  21. package/src/templates/hasura/migrations/default/0_cleanup/down.sql +2 -0
  22. package/src/templates/hasura/migrations/default/0_cleanup/up.sql +59 -0
  23. package/src/templates/hasura/migrations/default/1_init/down.sql +27 -21
  24. package/src/templates/hasura/migrations/default/1_init/up.sql +446 -174
  25. package/src/templates/hasura/migrations/default/2_sample_data/down.sql +3 -0
  26. package/src/templates/hasura/migrations/default/2_sample_data/up.sql +288 -0
  27. package/src/templates/hasura/migrations/default/3_agent_relations/down.sql +76 -0
  28. package/src/templates/hasura/migrations/default/3_agent_relations/up.sql +469 -0
  29. package/dist/core/entities.d.ts +0 -58
  30. package/dist/core/entities.js +0 -347
  31. package/src/templates/hasura/metadata/config.yaml +0 -1
  32. package/src/templates/hasura/metadata/databases/default/tables/public_agents.yaml +0 -14
  33. package/src/templates/hasura/metadata/databases/default/tables/public_memories.yaml +0 -23
  34. package/src/templates/hasura/metadata/databases/default/tables/public_memory_edges.yaml +0 -57
  35. package/src/templates/hasura/metadata/databases/default/tables/track_tables.yaml +0 -14
  36. package/src/templates/hasura/metadata/metadata.json +0 -80
  37. package/src/templates/hasura/migrations/default/2_metadata_filtering/down.sql +0 -4
  38. package/src/templates/hasura/migrations/default/2_metadata_filtering/up.sql +0 -44
  39. package/src/templates/hasura/migrations/default/3_memory_expiry/down.sql +0 -55
  40. package/src/templates/hasura/migrations/default/3_memory_expiry/up.sql +0 -108
  41. package/src/templates/hasura/migrations/default/4_remove_slug_validation/down.sql +0 -20
  42. package/src/templates/hasura/migrations/default/4_remove_slug_validation/up.sql +0 -5
  43. package/src/templates/hasura/migrations/default/5_entities/down.sql +0 -13
  44. package/src/templates/hasura/migrations/default/5_entities/up.sql +0 -155
@@ -4,240 +4,354 @@ exports.WorkflowManager = void 0;
4
4
  /**
5
5
  * Workflow management functionality for MeshOS.
6
6
  */
7
- const taxonomy_1 = require("./taxonomy");
7
+ const zod_1 = require("zod");
8
+ // Validation schemas
9
+ const workflowSchemaSchema = zod_1.z.object({
10
+ type: zod_1.z.string(),
11
+ input_schema: zod_1.z.record(zod_1.z.any()),
12
+ output_schema: zod_1.z.record(zod_1.z.any()),
13
+ metadata_schema: zod_1.z.record(zod_1.z.any()).nullable().optional(),
14
+ validation_rules: zod_1.z.record(zod_1.z.any()).nullable().optional(),
15
+ behaviors: zod_1.z.record(zod_1.z.any()).nullable().optional(),
16
+ created_at: zod_1.z.string(),
17
+ updated_at: zod_1.z.string()
18
+ });
19
+ const workflowRunSchema = zod_1.z.object({
20
+ id: zod_1.z.string().uuid(),
21
+ type: zod_1.z.string(),
22
+ status: zod_1.z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']),
23
+ input: zod_1.z.record(zod_1.z.any()),
24
+ metadata: zod_1.z.record(zod_1.z.any()),
25
+ created_at: zod_1.z.string(),
26
+ updated_at: zod_1.z.string(),
27
+ agent_id: zod_1.z.string().uuid().nullable().optional()
28
+ });
29
+ const workflowResultSchema = zod_1.z.object({
30
+ id: zod_1.z.string().uuid(),
31
+ workflow_id: zod_1.z.string().uuid(),
32
+ type: zod_1.z.enum(['interim', 'final']),
33
+ result: zod_1.z.record(zod_1.z.any()),
34
+ metadata: zod_1.z.record(zod_1.z.any()),
35
+ created_at: zod_1.z.string(),
36
+ updated_at: zod_1.z.string(),
37
+ agent_id: zod_1.z.string().uuid().nullable().optional()
38
+ });
8
39
  class WorkflowManager {
9
40
  constructor(url, headers) {
10
41
  this.url = url;
11
42
  this.headers = headers;
12
43
  }
13
44
  /**
14
- * Execute a GraphQL query.
45
+ * Execute a GraphQL query against Hasura
15
46
  */
16
47
  async executeQuery(query, variables) {
17
48
  const response = await fetch(this.url, {
18
49
  method: 'POST',
19
- headers: this.headers,
50
+ headers: {
51
+ 'Content-Type': 'application/json',
52
+ ...this.headers
53
+ },
20
54
  body: JSON.stringify({
21
55
  query,
22
- variables: variables || {}
56
+ variables
23
57
  })
24
58
  });
25
59
  if (!response.ok) {
26
- throw new Error(`HTTP error! status: ${response.status}`);
60
+ throw new Error(`Failed to execute query: ${response.statusText}`);
27
61
  }
28
- const result = await response.json();
62
+ const result = (await response.json());
29
63
  if (result.errors) {
30
- throw new Error(result.errors[0].message);
64
+ throw new Error(`GraphQL error: ${result.errors[0].message}`);
31
65
  }
32
66
  return result.data;
33
67
  }
34
68
  /**
35
- * Get a workflow by job ID.
69
+ * List available workflow schemas
36
70
  */
37
- async getByJobId(jobId) {
71
+ async listSchemas() {
38
72
  const query = `
39
- query GetWorkflowByJobId($jobId: String!) {
40
- workflows(where: {job_id: {_eq: $jobId}}, limit: 1) {
73
+ query ListWorkflowSchemas {
74
+ workflow_schemas {
75
+ type
76
+ input_schema
77
+ output_schema
78
+ metadata_schema
79
+ validation_rules
80
+ behaviors
81
+ created_at
82
+ updated_at
83
+ }
84
+ }
85
+ `;
86
+ const result = await this.executeQuery(query, {});
87
+ return result.workflow_schemas.map(schema => workflowSchemaSchema.parse(schema));
88
+ }
89
+ /**
90
+ * Get a workflow schema by type
91
+ */
92
+ async getSchema(type) {
93
+ const query = `
94
+ query GetWorkflowSchema($type: String!) {
95
+ workflow_schemas_by_pk(type: $type) {
96
+ type
97
+ input_schema
98
+ output_schema
99
+ metadata_schema
100
+ validation_rules
101
+ behaviors
102
+ created_at
103
+ updated_at
104
+ }
105
+ }
106
+ `;
107
+ const result = await this.executeQuery(query, { type });
108
+ const schema = result.workflow_schemas_by_pk;
109
+ if (!schema) {
110
+ throw new Error(`Workflow schema not found for type: ${type}`);
111
+ }
112
+ return workflowSchemaSchema.parse(schema);
113
+ }
114
+ /**
115
+ * Create a new workflow run
116
+ */
117
+ async createRun(input) {
118
+ // First get the workflow schema to validate input
119
+ const schema = await this.getSchema(input.type);
120
+ const query = `
121
+ mutation CreateWorkflowRun($run: workflow_runs_insert_input!) {
122
+ insert_workflow_runs_one(object: $run) {
41
123
  id
42
- job_id
43
124
  type
44
125
  status
45
- result
126
+ input
46
127
  metadata
47
128
  created_at
48
129
  updated_at
130
+ agent_id
49
131
  }
50
132
  }
51
133
  `;
52
- const result = await this.executeQuery(query, { jobId });
53
- if (!result.workflows[0]) {
54
- return null;
55
- }
56
- // Convert snake_case to camelCase
57
- const { job_id, created_at, updated_at, ...rest } = result.workflows[0];
58
- return {
59
- ...rest,
60
- jobId: job_id,
61
- createdAt: created_at,
62
- updatedAt: updated_at,
63
- };
134
+ const result = await this.executeQuery(query, {
135
+ run: {
136
+ type: input.type,
137
+ input: input.input,
138
+ metadata: input.metadata || {},
139
+ status: 'pending',
140
+ agent_id: input.agent_id
141
+ }
142
+ });
143
+ return workflowRunSchema.parse(result.insert_workflow_runs_one);
64
144
  }
65
145
  /**
66
- * Create a new workflow.
146
+ * Update a workflow run's status
67
147
  */
68
- async create(workflow) {
69
- // Validate workflow data
70
- const validatedData = taxonomy_1.workflowSchema.omit({ id: true, createdAt: true, updatedAt: true }).parse(workflow);
148
+ async updateStatus(input) {
71
149
  const query = `
72
- mutation CreateWorkflow($workflow: workflows_insert_input!) {
73
- insert_workflows_one(object: $workflow) {
150
+ mutation UpdateWorkflowStatus($id: uuid!, $status: String!, $metadata: jsonb) {
151
+ update_workflow_runs_by_pk(
152
+ pk_columns: {id: $id},
153
+ _set: {
154
+ status: $status,
155
+ metadata: $metadata
156
+ }
157
+ ) {
74
158
  id
75
- job_id
76
159
  type
77
160
  status
161
+ input
162
+ metadata
163
+ created_at
164
+ updated_at
165
+ agent_id
166
+ }
167
+ }
168
+ `;
169
+ const result = await this.executeQuery(query, {
170
+ id: input.id,
171
+ status: input.status,
172
+ metadata: input.metadata
173
+ });
174
+ return workflowRunSchema.parse(result.update_workflow_runs_by_pk);
175
+ }
176
+ /**
177
+ * Create a workflow result
178
+ */
179
+ async createResult(input) {
180
+ const query = `
181
+ mutation CreateWorkflowResult($result: workflow_results_insert_input!) {
182
+ insert_workflow_results_one(object: $result) {
183
+ id
184
+ workflow_id
185
+ type
78
186
  result
79
187
  metadata
80
188
  created_at
81
189
  updated_at
190
+ agent_id
82
191
  }
83
192
  }
84
193
  `;
85
194
  const result = await this.executeQuery(query, {
86
- workflow: {
87
- job_id: validatedData.jobId,
88
- type: validatedData.type,
89
- status: validatedData.status,
90
- result: validatedData.result,
91
- metadata: validatedData.metadata
195
+ result: {
196
+ workflow_id: input.workflow_id,
197
+ type: input.type,
198
+ result: input.result,
199
+ metadata: input.metadata || {},
200
+ agent_id: input.agent_id
92
201
  }
93
202
  });
94
- // Convert snake_case to camelCase
95
- const { job_id, created_at, updated_at, ...rest } = result.insert_workflows_one;
96
- return {
97
- ...rest,
98
- jobId: job_id,
99
- createdAt: created_at,
100
- updatedAt: updated_at,
101
- };
203
+ return workflowResultSchema.parse(result.insert_workflow_results_one);
102
204
  }
103
205
  /**
104
- * Get a workflow by ID.
206
+ * Get a workflow run by ID
105
207
  */
106
- async get(id) {
208
+ async getRun(id) {
107
209
  const query = `
108
- query GetWorkflow($id: uuid!) {
109
- workflows_by_pk(id: $id) {
210
+ query GetWorkflowRun($id: uuid!) {
211
+ workflow_runs_by_pk(id: $id) {
110
212
  id
111
- job_id
112
213
  type
113
214
  status
114
- result
215
+ input
115
216
  metadata
116
217
  created_at
117
218
  updated_at
219
+ agent_id
118
220
  }
119
221
  }
120
222
  `;
121
223
  const result = await this.executeQuery(query, { id });
122
- if (!result.workflows_by_pk) {
123
- return null;
224
+ const run = result.workflow_runs_by_pk;
225
+ if (!run) {
226
+ throw new Error(`Workflow run not found with id: ${id}`);
124
227
  }
125
- // Convert snake_case to camelCase
126
- const { job_id, created_at, updated_at, ...rest } = result.workflows_by_pk;
127
- return {
128
- ...rest,
129
- jobId: job_id,
130
- createdAt: created_at,
131
- updatedAt: updated_at,
132
- };
228
+ return workflowRunSchema.parse(run);
133
229
  }
134
230
  /**
135
- * Get workflows with optional filtering and sorting.
231
+ * Get all results for a workflow run
136
232
  */
137
- async list(options = {}) {
138
- const { where, orderBy = [{ column: 'createdAt', order: 'desc' }], limit, offset } = options;
139
- // Convert orderBy to Hasura format
140
- const hasuraOrderBy = orderBy.map(({ column, order }) => {
141
- // Convert camelCase to snake_case for column names
142
- const snakeCaseColumn = column.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
143
- return { [snakeCaseColumn]: order };
144
- });
233
+ async getResults(workflowId) {
145
234
  const query = `
146
- query GetWorkflows(
147
- $where: workflows_bool_exp,
148
- $orderBy: [workflows_order_by!],
149
- $limit: Int,
150
- $offset: Int
151
- ) {
152
- workflows(
153
- where: $where,
154
- order_by: $orderBy,
155
- limit: $limit,
156
- offset: $offset
235
+ query GetWorkflowResults($workflow_id: uuid!) {
236
+ workflow_results(
237
+ where: {workflow_id: {_eq: $workflow_id}}
238
+ order_by: {created_at: asc}
157
239
  ) {
158
240
  id
159
- job_id
241
+ workflow_id
160
242
  type
161
- status
162
243
  result
163
244
  metadata
164
245
  created_at
165
246
  updated_at
247
+ agent_id
166
248
  }
167
249
  }
168
250
  `;
169
- const result = await this.executeQuery(query, {
170
- where,
171
- orderBy: hasuraOrderBy,
172
- limit,
173
- offset
174
- });
175
- return result.workflows.map(workflow => {
176
- const { job_id, created_at, updated_at, ...rest } = workflow;
177
- return {
178
- ...rest,
179
- jobId: job_id,
180
- createdAt: created_at,
181
- updatedAt: updated_at,
182
- };
183
- });
251
+ const result = await this.executeQuery(query, { workflow_id: workflowId });
252
+ return result.workflow_results.map(r => workflowResultSchema.parse(r));
184
253
  }
185
254
  /**
186
- * Update a workflow.
255
+ * Get the final result for a workflow run
187
256
  */
188
- async update(id, updates) {
257
+ async getFinalResult(workflowId) {
189
258
  const query = `
190
- mutation UpdateWorkflow($id: uuid!, $updates: workflows_set_input!) {
191
- update_workflows_by_pk(pk_columns: {id: $id}, _set: $updates) {
259
+ query GetWorkflowFinalResult($workflow_id: uuid!) {
260
+ workflow_results(
261
+ where: {
262
+ workflow_id: {_eq: $workflow_id},
263
+ type: {_eq: "final"}
264
+ }
265
+ limit: 1
266
+ order_by: {created_at: desc}
267
+ ) {
192
268
  id
193
- job_id
269
+ workflow_id
194
270
  type
195
- status
196
271
  result
197
272
  metadata
198
273
  created_at
199
274
  updated_at
275
+ agent_id
276
+ }
277
+ }
278
+ `;
279
+ const result = await this.executeQuery(query, { workflow_id: workflowId });
280
+ return result.workflow_results[0] ? workflowResultSchema.parse(result.workflow_results[0]) : null;
281
+ }
282
+ /**
283
+ * List workflow runs with optional filtering and sorting
284
+ */
285
+ async listRuns(options = {}) {
286
+ const { type, status, agent_id, limit = 10, offset = 0, order_by = [{ field: 'created_at', direction: 'desc' }] } = options;
287
+ // Build where clause
288
+ const where = {};
289
+ if (type)
290
+ where.type = { _eq: type };
291
+ if (status)
292
+ where.status = { _eq: status };
293
+ if (agent_id)
294
+ where.agent_id = { _eq: agent_id };
295
+ // Convert order_by to Hasura format
296
+ const orderBy = order_by.map(({ field, direction }) => ({
297
+ [field]: direction
298
+ }));
299
+ const query = `
300
+ query ListWorkflowRuns(
301
+ $where: workflow_runs_bool_exp!,
302
+ $limit: Int!,
303
+ $offset: Int!,
304
+ $order_by: [workflow_runs_order_by!]!
305
+ ) {
306
+ workflow_runs(
307
+ where: $where,
308
+ limit: $limit,
309
+ offset: $offset,
310
+ order_by: $order_by
311
+ ) {
312
+ id
313
+ type
314
+ status
315
+ input
316
+ metadata
317
+ created_at
318
+ updated_at
319
+ agent_id
200
320
  }
201
321
  }
202
322
  `;
203
- // Convert camelCase to snake_case for update fields
204
- const updateData = {};
205
- if (updates.jobId !== undefined)
206
- updateData.job_id = updates.jobId;
207
- if (updates.type !== undefined)
208
- updateData.type = updates.type;
209
- if (updates.status !== undefined)
210
- updateData.status = updates.status;
211
- if (updates.result !== undefined)
212
- updateData.result = updates.result;
213
- if (updates.metadata !== undefined)
214
- updateData.metadata = updates.metadata;
215
323
  const result = await this.executeQuery(query, {
216
- id,
217
- updates: updateData
324
+ where,
325
+ limit,
326
+ offset,
327
+ order_by: orderBy
218
328
  });
219
- // Convert snake_case to camelCase
220
- const { job_id, created_at, updated_at, ...rest } = result.update_workflows_by_pk;
221
- return {
222
- ...rest,
223
- jobId: job_id,
224
- createdAt: created_at,
225
- updatedAt: updated_at,
226
- };
329
+ return result.workflow_runs.map(run => workflowRunSchema.parse(run));
227
330
  }
228
331
  /**
229
- * Delete a workflow.
332
+ * Get the last result for a workflow run, regardless of type
230
333
  */
231
- async delete(id) {
334
+ async getLastResult(workflowId) {
232
335
  const query = `
233
- mutation DeleteWorkflow($id: uuid!) {
234
- delete_workflows_by_pk(id: $id) {
336
+ query GetLastWorkflowResult($workflow_id: uuid!) {
337
+ workflow_results(
338
+ where: {workflow_id: {_eq: $workflow_id}}
339
+ order_by: {created_at: desc}
340
+ limit: 1
341
+ ) {
235
342
  id
343
+ workflow_id
344
+ type
345
+ result
346
+ metadata
347
+ created_at
348
+ updated_at
349
+ agent_id
236
350
  }
237
351
  }
238
352
  `;
239
- const result = await this.executeQuery(query, { id });
240
- return result.delete_workflows_by_pk !== null;
353
+ const result = await this.executeQuery(query, { workflow_id: workflowId });
354
+ return result.workflow_results[0] ? workflowResultSchema.parse(result.workflow_results[0]) : null;
241
355
  }
242
356
  }
243
357
  exports.WorkflowManager = WorkflowManager;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@props-labs/mesh-os",
3
- "version": "0.1.23",
3
+ "version": "0.2.2",
4
4
  "description": "MeshOS - A memory system for AI agents",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -48,7 +48,7 @@
48
48
  "build": "tsc",
49
49
  "start": "ts-node src/main.ts",
50
50
  "lint": "eslint . --ext .ts",
51
- "test": "vitest run",
52
- "test:watch": "vitest"
51
+ "test": "node --max_old_space_size=4096 node_modules/vitest/vitest.mjs",
52
+ "test:watch": "node --max_old_space_size=4096 node_modules/vitest/vitest.mjs --watch"
53
53
  }
54
54
  }
@@ -0,0 +1,6 @@
1
+ actions: []
2
+ custom_types:
3
+ enums: []
4
+ input_objects: []
5
+ objects: []
6
+ scalars: []
@@ -12,4 +12,4 @@
12
12
  retries: 1
13
13
  use_prepared_statements: true
14
14
  tables: "!include default/tables/tables.yaml"
15
- functions: []
15
+ functions: "!include default/functions/functions.yaml"
@@ -0,0 +1,80 @@
1
+ - function:
2
+ schema: public
3
+ name: get_connected_memories
4
+ configuration:
5
+ custom_name: getConnectedMemories
6
+ exposed: true
7
+ arguments:
8
+ - name: memory_id
9
+ type: uuid!
10
+ - name: relationship_type
11
+ type: String
12
+ - name: max_depth
13
+ type: Int
14
+
15
+ - function:
16
+ schema: public
17
+ name: inspect_memory_embeddings
18
+ configuration:
19
+ custom_name: inspectMemoryEmbeddings
20
+ exposed: true
21
+ column_config:
22
+ memory_id:
23
+ custom_name: memoryId
24
+ embedding_norm:
25
+ custom_name: embeddingNorm
26
+ is_normalized:
27
+ custom_name: isNormalized
28
+
29
+ - function:
30
+ schema: public
31
+ name: search_memory_chunks
32
+ configuration:
33
+ custom_name: searchMemoryChunks
34
+ exposed: true
35
+ arguments:
36
+ - name: query_embedding
37
+ type: vector!
38
+ - name: match_threshold
39
+ type: Float!
40
+ - name: match_count
41
+ type: Int!
42
+ - name: filter_agent_id
43
+ type: uuid
44
+ - name: memory_metadata_filter
45
+ type: jsonb
46
+ - name: chunk_metadata_filter
47
+ type: jsonb
48
+ - name: created_at_filter
49
+ type: jsonb
50
+ column_config:
51
+ chunk_id:
52
+ custom_name: chunkId
53
+ memory_id:
54
+ custom_name: memoryId
55
+ chunk_index:
56
+ custom_name: chunkIndex
57
+ chunk_content:
58
+ custom_name: chunkContent
59
+ chunk_metadata:
60
+ custom_name: chunkMetadata
61
+ chunk_created_at:
62
+ custom_name: chunkCreatedAt
63
+ chunk_updated_at:
64
+ custom_name: chunkUpdatedAt
65
+ memory_content:
66
+ custom_name: memoryContent
67
+ memory_type:
68
+ custom_name: memoryType
69
+ memory_status:
70
+ custom_name: memoryStatus
71
+ memory_metadata:
72
+ custom_name: memoryMetadata
73
+ memory_created_at:
74
+ custom_name: memoryCreatedAt
75
+ memory_updated_at:
76
+ custom_name: memoryUpdatedAt
77
+ agent_id:
78
+ custom_name: agentId
79
+ similarity:
80
+ custom_name: similarity