@props-labs/mesh-os 0.1.23 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/core/__fixtures__/mock_responses.d.ts +318 -0
  2. package/dist/core/__fixtures__/mock_responses.js +333 -0
  3. package/dist/core/__fixtures__/sample_embeddings.d.ts +33 -0
  4. package/dist/core/__fixtures__/sample_embeddings.js +12355 -0
  5. package/dist/core/agents.d.ts +51 -0
  6. package/dist/core/agents.js +170 -0
  7. package/dist/core/memories.d.ts +138 -0
  8. package/dist/core/memories.js +417 -0
  9. package/dist/core/workflows.d.ts +84 -25
  10. package/dist/core/workflows.js +224 -135
  11. package/package.json +3 -3
  12. package/src/templates/hasura/metadata/actions.yaml +6 -0
  13. package/src/templates/hasura/metadata/cron_triggers.yaml +1 -0
  14. package/src/templates/hasura/metadata/databases/databases.yaml +1 -1
  15. package/src/templates/hasura/metadata/databases/default/functions/functions.yaml +80 -0
  16. package/src/templates/hasura/metadata/databases/default/tables/tables.yaml +274 -9
  17. package/src/templates/hasura/metadata/query_collections.yaml +1 -0
  18. package/src/templates/hasura/metadata/rest_endpoints.yaml +1 -0
  19. package/src/templates/hasura/migrations/default/0_cleanup/down.sql +2 -0
  20. package/src/templates/hasura/migrations/default/0_cleanup/up.sql +59 -0
  21. package/src/templates/hasura/migrations/default/1_init/down.sql +27 -21
  22. package/src/templates/hasura/migrations/default/1_init/up.sql +446 -174
  23. package/src/templates/hasura/migrations/default/2_sample_data/down.sql +3 -0
  24. package/src/templates/hasura/migrations/default/2_sample_data/up.sql +288 -0
  25. package/src/templates/hasura/migrations/default/3_agent_relations/down.sql +76 -0
  26. package/src/templates/hasura/migrations/default/3_agent_relations/up.sql +469 -0
  27. package/src/templates/hasura/metadata/config.yaml +0 -1
  28. package/src/templates/hasura/metadata/databases/default/tables/public_agents.yaml +0 -14
  29. package/src/templates/hasura/metadata/databases/default/tables/public_memories.yaml +0 -23
  30. package/src/templates/hasura/metadata/databases/default/tables/public_memory_edges.yaml +0 -57
  31. package/src/templates/hasura/metadata/databases/default/tables/track_tables.yaml +0 -14
  32. package/src/templates/hasura/metadata/metadata.json +0 -80
  33. package/src/templates/hasura/migrations/default/2_metadata_filtering/down.sql +0 -4
  34. package/src/templates/hasura/migrations/default/2_metadata_filtering/up.sql +0 -44
  35. package/src/templates/hasura/migrations/default/3_memory_expiry/down.sql +0 -55
  36. package/src/templates/hasura/migrations/default/3_memory_expiry/up.sql +0 -108
  37. package/src/templates/hasura/migrations/default/4_remove_slug_validation/down.sql +0 -20
  38. package/src/templates/hasura/migrations/default/4_remove_slug_validation/up.sql +0 -5
  39. package/src/templates/hasura/migrations/default/5_entities/down.sql +0 -13
  40. package/src/templates/hasura/migrations/default/5_entities/up.sql +0 -155
@@ -0,0 +1,288 @@
1
+ -- Insert sample type schemas
2
+ INSERT INTO type_schemas (type, schema, metadata_schema, embedding_config, chunking_config) VALUES
3
+ ('text_document',
4
+ jsonb_build_object(
5
+ 'type', 'object',
6
+ 'properties', jsonb_build_object(
7
+ 'title', jsonb_build_object(
8
+ 'type', 'string',
9
+ 'description', 'The title or heading of the text document'
10
+ ),
11
+ 'content', jsonb_build_object(
12
+ 'type', 'string',
13
+ 'description', 'The main content or body of the text document'
14
+ )
15
+ )
16
+ ),
17
+ jsonb_build_object(
18
+ 'type', 'object',
19
+ 'properties', jsonb_build_object(
20
+ 'source', jsonb_build_object(
21
+ 'type', 'string',
22
+ 'description', 'The origin or source of the document'
23
+ ),
24
+ 'tags', jsonb_build_object(
25
+ 'type', 'array',
26
+ 'description', 'List of tags or categories for the document',
27
+ 'items', jsonb_build_object('type', 'string')
28
+ )
29
+ )
30
+ ),
31
+ jsonb_build_object(
32
+ 'model', 'text-embedding-3-small',
33
+ 'dimensions', 1536
34
+ ),
35
+ jsonb_build_object(
36
+ 'chunk_size', 1000,
37
+ 'chunk_overlap', 100
38
+ )
39
+ ),
40
+ ('code_snippet',
41
+ jsonb_build_object(
42
+ 'type', 'object',
43
+ 'properties', jsonb_build_object(
44
+ 'language', jsonb_build_object(
45
+ 'type', 'string',
46
+ 'description', 'The programming language of the code snippet'
47
+ ),
48
+ 'code', jsonb_build_object(
49
+ 'type', 'string',
50
+ 'description', 'The actual code content'
51
+ )
52
+ )
53
+ ),
54
+ jsonb_build_object(
55
+ 'type', 'object',
56
+ 'properties', jsonb_build_object(
57
+ 'repository', jsonb_build_object(
58
+ 'type', 'string',
59
+ 'description', 'The source repository containing the code'
60
+ ),
61
+ 'path', jsonb_build_object(
62
+ 'type', 'string',
63
+ 'description', 'The file path within the repository'
64
+ ),
65
+ 'tags', jsonb_build_object(
66
+ 'type', 'array',
67
+ 'description', 'List of tags or categories for the code snippet',
68
+ 'items', jsonb_build_object('type', 'string')
69
+ )
70
+ )
71
+ ),
72
+ jsonb_build_object(
73
+ 'model', 'text-embedding-3-small',
74
+ 'dimensions', 1536
75
+ ),
76
+ jsonb_build_object(
77
+ 'chunk_size', 500,
78
+ 'chunk_overlap', 50
79
+ )
80
+ ),
81
+ ('thought',
82
+ jsonb_build_object(
83
+ 'type', 'object',
84
+ 'properties', jsonb_build_object(
85
+ 'content', jsonb_build_object(
86
+ 'type', 'string',
87
+ 'description', 'The main thought or idea content'
88
+ ),
89
+ 'links', jsonb_build_object(
90
+ 'type', 'array',
91
+ 'description', 'Optional list of related links or references',
92
+ 'items', jsonb_build_object(
93
+ 'type', 'object',
94
+ 'properties', jsonb_build_object(
95
+ 'url', jsonb_build_object('type', 'string'),
96
+ 'title', jsonb_build_object('type', 'string'),
97
+ 'description', jsonb_build_object('type', 'string')
98
+ )
99
+ )
100
+ )
101
+ ),
102
+ 'required', array['content']
103
+ ),
104
+ jsonb_build_object(
105
+ 'type', 'object',
106
+ 'properties', jsonb_build_object(
107
+ 'context', jsonb_build_object(
108
+ 'type', 'string',
109
+ 'description', 'The context in which the thought occurred'
110
+ ),
111
+ 'tags', jsonb_build_object(
112
+ 'type', 'array',
113
+ 'description', 'List of tags or categories for the thought',
114
+ 'items', jsonb_build_object('type', 'string')
115
+ ),
116
+ 'confidence', jsonb_build_object(
117
+ 'type', 'number',
118
+ 'description', 'Confidence level in the thought (0-1)',
119
+ 'minimum', 0,
120
+ 'maximum', 1
121
+ )
122
+ )
123
+ ),
124
+ jsonb_build_object(
125
+ 'model', 'text-embedding-3-small',
126
+ 'dimensions', 1536
127
+ ),
128
+ jsonb_build_object(
129
+ 'chunk_size', 300,
130
+ 'chunk_overlap', 30
131
+ )
132
+ );
133
+
134
+ -- Insert sample workflow schemas
135
+ INSERT INTO workflow_schemas (type, input_schema, output_schema, metadata_schema) VALUES
136
+ ('document_analysis',
137
+ jsonb_build_object(
138
+ 'type', 'object',
139
+ 'properties', jsonb_build_object(
140
+ 'document_id', jsonb_build_object(
141
+ 'type', 'string',
142
+ 'description', 'The unique identifier of the document to analyze'
143
+ ),
144
+ 'analysis_type', jsonb_build_object(
145
+ 'type', 'string',
146
+ 'description', 'The type of analysis to perform on the document',
147
+ 'enum', array['sentiment', 'summary', 'key_points']
148
+ )
149
+ )
150
+ ),
151
+ jsonb_build_object(
152
+ 'type', 'object',
153
+ 'properties', jsonb_build_object(
154
+ 'result', jsonb_build_object(
155
+ 'type', 'object',
156
+ 'description', 'The analysis results for the document'
157
+ ),
158
+ 'confidence', jsonb_build_object(
159
+ 'type', 'number',
160
+ 'description', 'Confidence score of the analysis result'
161
+ )
162
+ )
163
+ ),
164
+ jsonb_build_object(
165
+ 'type', 'object',
166
+ 'properties', jsonb_build_object(
167
+ 'model_version', jsonb_build_object(
168
+ 'type', 'string',
169
+ 'description', 'Version of the model used for analysis'
170
+ ),
171
+ 'processing_time', jsonb_build_object(
172
+ 'type', 'number',
173
+ 'description', 'Time taken to process the document in seconds'
174
+ )
175
+ )
176
+ )
177
+ ),
178
+ ('code_review',
179
+ jsonb_build_object(
180
+ 'type', 'object',
181
+ 'properties', jsonb_build_object(
182
+ 'code_id', jsonb_build_object(
183
+ 'type', 'string',
184
+ 'description', 'The unique identifier of the code to review'
185
+ ),
186
+ 'review_type', jsonb_build_object(
187
+ 'type', 'string',
188
+ 'description', 'The type of code review to perform',
189
+ 'enum', array['security', 'performance', 'style']
190
+ )
191
+ )
192
+ ),
193
+ jsonb_build_object(
194
+ 'type', 'object',
195
+ 'properties', jsonb_build_object(
196
+ 'issues', jsonb_build_object(
197
+ 'type', 'array',
198
+ 'description', 'List of identified issues in the code'
199
+ ),
200
+ 'suggestions', jsonb_build_object(
201
+ 'type', 'array',
202
+ 'description', 'List of improvement suggestions for the code'
203
+ )
204
+ )
205
+ ),
206
+ jsonb_build_object(
207
+ 'type', 'object',
208
+ 'properties', jsonb_build_object(
209
+ 'reviewer_version', jsonb_build_object(
210
+ 'type', 'string',
211
+ 'description', 'Version of the code review system used'
212
+ ),
213
+ 'review_duration', jsonb_build_object(
214
+ 'type', 'number',
215
+ 'description', 'Time taken to complete the code review in seconds'
216
+ )
217
+ )
218
+ )
219
+ ),
220
+ ('twitter_thread_generator',
221
+ jsonb_build_object(
222
+ 'type', 'object',
223
+ 'properties', jsonb_build_object(
224
+ 'content', jsonb_build_object(
225
+ 'type', 'string',
226
+ 'description', 'The main content to generate the thread from'
227
+ ),
228
+ 'links', jsonb_build_object(
229
+ 'type', 'array',
230
+ 'description', 'Optional list of reference links to include',
231
+ 'items', jsonb_build_object('type', 'string')
232
+ ),
233
+ 'use_deep_research', jsonb_build_object(
234
+ 'type', 'boolean',
235
+ 'description', 'Whether to perform deep research for additional context'
236
+ )
237
+ ),
238
+ 'required', array['content', 'use_deep_research']
239
+ ),
240
+ jsonb_build_object(
241
+ 'type', 'object',
242
+ 'properties', jsonb_build_object(
243
+ 'posts', jsonb_build_object(
244
+ 'type', 'array',
245
+ 'description', 'Array of posts making up the thread',
246
+ 'items', jsonb_build_object(
247
+ 'type', 'object',
248
+ 'properties', jsonb_build_object(
249
+ 'text', jsonb_build_object(
250
+ 'type', 'string',
251
+ 'description', 'The text content of the post'
252
+ ),
253
+ 'media_url', jsonb_build_object(
254
+ 'type', 'string',
255
+ 'description', 'Optional URL to media attachment'
256
+ ),
257
+ 'media_type', jsonb_build_object(
258
+ 'type', 'string',
259
+ 'description', 'Type of media (image, video, gif)',
260
+ 'enum', array['image', 'video', 'gif']
261
+ )
262
+ ),
263
+ 'required', array['text']
264
+ )
265
+ )
266
+ ),
267
+ 'required', array['posts']
268
+ ),
269
+ jsonb_build_object(
270
+ 'type', 'object',
271
+ 'properties', jsonb_build_object(
272
+ 'generation_model', jsonb_build_object(
273
+ 'type', 'string',
274
+ 'description', 'Model used for thread generation'
275
+ ),
276
+ 'research_depth', jsonb_build_object(
277
+ 'type', 'integer',
278
+ 'description', 'Depth of research performed (1-5)',
279
+ 'minimum', 1,
280
+ 'maximum', 5
281
+ ),
282
+ 'processing_time', jsonb_build_object(
283
+ 'type', 'number',
284
+ 'description', 'Time taken to generate the thread in seconds'
285
+ )
286
+ )
287
+ )
288
+ );
@@ -0,0 +1,76 @@
1
+ -- Remove sample data in reverse order
2
+ DELETE FROM memory_edges WHERE agent_id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
3
+ DELETE FROM workflow_results WHERE agent_id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
4
+ DELETE FROM workflow_runs WHERE agent_id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
5
+ DELETE FROM memory_chunks WHERE agent_id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
6
+ DELETE FROM memories WHERE agent_id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
7
+ DELETE FROM agents WHERE id = 'd7f3668d-5ebf-4f95-9b5c-07301f5d4c62';
8
+
9
+ -- Drop views that depend on memories table
10
+ DROP VIEW IF EXISTS memory_chunks_with_details CASCADE;
11
+ DROP VIEW IF EXISTS memory_search_results CASCADE;
12
+
13
+ -- Drop function
14
+ DROP FUNCTION IF EXISTS search_memory_chunks(vector, double precision, integer, uuid, jsonb, jsonb, jsonb);
15
+
16
+ -- Drop indexes
17
+ DROP INDEX IF EXISTS idx_memories_agent_id;
18
+ DROP INDEX IF EXISTS idx_memory_chunks_agent_id;
19
+ DROP INDEX IF EXISTS idx_memory_edges_agent_id;
20
+ DROP INDEX IF EXISTS idx_workflow_runs_agent_id;
21
+ DROP INDEX IF EXISTS idx_workflow_results_agent_id;
22
+
23
+ -- Drop agent_id columns
24
+ ALTER TABLE workflow_results DROP COLUMN IF EXISTS agent_id;
25
+ ALTER TABLE workflow_runs DROP COLUMN IF EXISTS agent_id;
26
+ ALTER TABLE memory_edges DROP COLUMN IF EXISTS agent_id;
27
+ ALTER TABLE memory_chunks DROP COLUMN IF EXISTS agent_id;
28
+ ALTER TABLE memories DROP COLUMN IF EXISTS agent_id;
29
+
30
+ -- Drop trigger
31
+ DROP TRIGGER IF EXISTS update_agents_updated_at ON agents;
32
+
33
+ -- Drop agents table
34
+ DROP TABLE IF EXISTS agents CASCADE;
35
+
36
+ -- Recreate the view without agent_id
37
+ CREATE OR REPLACE VIEW memory_chunks_with_details AS
38
+ SELECT
39
+ mc.id as chunk_id,
40
+ mc.memory_id,
41
+ mc.chunk_index,
42
+ mc.content as chunk_content,
43
+ mc.embedding,
44
+ mc.metadata as chunk_metadata,
45
+ mc.created_at as chunk_created_at,
46
+ mc.updated_at as chunk_updated_at,
47
+ m.content as memory_content,
48
+ m.type as memory_type,
49
+ m.status as memory_status,
50
+ m.metadata as memory_metadata,
51
+ m.created_at as memory_created_at,
52
+ m.updated_at as memory_updated_at,
53
+ NULL::uuid as agent_id
54
+ FROM memory_chunks mc
55
+ JOIN memories m ON mc.memory_id = m.id;
56
+
57
+ -- Recreate the search results view
58
+ CREATE OR REPLACE VIEW memory_search_results AS
59
+ SELECT
60
+ chunk_id,
61
+ memory_id,
62
+ chunk_index,
63
+ chunk_content,
64
+ chunk_metadata,
65
+ chunk_created_at,
66
+ chunk_updated_at,
67
+ memory_content,
68
+ memory_type,
69
+ memory_status,
70
+ memory_metadata,
71
+ memory_created_at,
72
+ memory_updated_at,
73
+ agent_id,
74
+ 0.0::float as similarity
75
+ FROM memory_chunks_with_details
76
+ WHERE false;