memra 0.2.15__py3-none-any.whl → 0.2.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
memra/__init__.py CHANGED
@@ -6,7 +6,7 @@ Think of it as "Kubernetes for business logic" where agents are the pods and
6
6
  departments are the deployments.
7
7
  """
8
8
 
9
- __version__ = "0.2.12"
9
+ __version__ = "0.2.15"
10
10
 
11
11
  # Core imports
12
12
  from .models import Agent, Department, Tool, LLM
memra/cli.py CHANGED
@@ -24,9 +24,6 @@ def run_demo():
24
24
  print("🔧 Configuring environment...")
25
25
  setup_environment()
26
26
 
27
- # Step 2.5: Install dependencies
28
- install_dependencies()
29
-
30
27
  # Step 3: Start Docker containers
31
28
  print("🐳 Starting Docker services...")
32
29
  if not start_docker_services(demo_dir):
@@ -37,18 +34,11 @@ def run_demo():
37
34
  print("⏳ Waiting for services to be ready...")
38
35
  wait_for_services()
39
36
 
40
- # Step 5: Start MCP bridge server
41
- print("🔌 Starting MCP bridge server...")
42
- if not start_mcp_bridge_server(demo_dir):
43
- print("❌ Failed to start MCP bridge server.")
44
- print(" You can start it manually: cd memra-ops && python mcp_bridge_server.py")
45
- return False
46
-
47
- # Step 6: Run the demo
37
+ # Step 5: Run the demo
48
38
  print("🎯 Running ETL workflow...")
49
39
  success = run_etl_workflow(demo_dir)
50
40
 
51
- # Step 7: Show results
41
+ # Step 6: Show results
52
42
  if success:
53
43
  print("=" * 50)
54
44
  print("🎉 Demo completed successfully!")
@@ -60,7 +50,6 @@ def run_demo():
60
50
  print(" • Check database: docker exec -it memra_postgres psql -U postgres -d local_workflow")
61
51
  print(" • View data: SELECT * FROM invoices ORDER BY created_at DESC;")
62
52
  print(" • Stop services: cd memra-ops && docker compose down")
63
- print(" • Stop MCP server: pkill -f mcp_bridge_server.py")
64
53
  print(" • Explore code: Check the extracted files in the demo directory")
65
54
  else:
66
55
  print("❌ Demo failed. Check the logs above for details.")
@@ -87,217 +76,15 @@ def setup_demo_environment():
87
76
  def extract_bundled_files(demo_dir):
88
77
  """Extract files bundled with the PyPI package"""
89
78
  try:
90
- import pkg_resources
91
- import shutil
92
- from pathlib import Path
93
-
94
- # Extract demo files from package data
95
- demo_dir.mkdir(exist_ok=True)
96
-
97
- # Copy the main ETL demo script
98
- try:
99
- demo_script = pkg_resources.resource_filename('memra', 'demos/etl_invoice_processing/etl_invoice_demo.py')
100
- if Path(demo_script).exists():
101
- shutil.copy2(demo_script, demo_dir / "etl_invoice_demo.py")
102
- print("✅ Copied ETL demo script")
103
- else:
104
- print("⚠️ ETL demo script not found in package")
105
- except Exception as e:
106
- print(f"⚠️ Could not copy ETL demo script: {e}")
107
-
108
- # Copy supporting Python files
109
- demo_files = [
110
- "database_monitor_agent.py",
111
- "simple_pdf_processor.py",
112
- "setup_demo_data.py"
113
- ]
114
-
115
- for file_name in demo_files:
116
- try:
117
- file_path = pkg_resources.resource_filename('memra', f'demos/etl_invoice_processing/{file_name}')
118
- if Path(file_path).exists():
119
- shutil.copy2(file_path, demo_dir / file_name)
120
- print(f"✅ Copied {file_name}")
121
- else:
122
- print(f"⚠️ {file_name} not found in package")
123
- except Exception as e:
124
- print(f"⚠️ Could not copy {file_name}: {e}")
125
-
126
- # Copy sample data directory
127
- try:
128
- data_source = pkg_resources.resource_filename('memra', 'demos/etl_invoice_processing/data')
129
- if Path(data_source).exists():
130
- data_dir = demo_dir / "data"
131
- shutil.copytree(data_source, data_dir, dirs_exist_ok=True)
132
- print("✅ Copied sample invoice data")
79
+ # Extract from package data
80
+ with pkg_resources.path('memra', 'demo_files') as demo_files_path:
81
+ if demo_files_path.exists():
82
+ # Copy all files from the bundled demo_files directory
83
+ shutil.copytree(demo_files_path, demo_dir, dirs_exist_ok=True)
133
84
  else:
134
- print("⚠️ Sample data not found in package")
135
- except Exception as e:
136
- print(f"⚠️ Could not copy sample data: {e}")
137
-
138
- # Create memra-ops directory with docker-compose
139
- ops_dir = demo_dir / "memra-ops"
140
- ops_dir.mkdir(exist_ok=True)
141
-
142
- # Create basic docker-compose.yml
143
- compose_content = """version: '3.8'
144
- services:
145
- postgres:
146
- image: postgres:15
147
- environment:
148
- POSTGRES_DB: local_workflow
149
- POSTGRES_USER: postgres
150
- POSTGRES_PASSWORD: postgres
151
- ports:
152
- - "5432:5432"
153
- volumes:
154
- - postgres_data:/var/lib/postgresql/data
155
-
156
- volumes:
157
- postgres_data:
158
- """
159
-
160
- with open(ops_dir / "docker-compose.yml", "w") as f:
161
- f.write(compose_content)
162
-
163
- # Create basic MCP bridge server
164
- mcp_content = """#!/usr/bin/env python3
165
- import asyncio
166
- import aiohttp
167
- from aiohttp import web
168
- import json
169
- import psycopg2
170
- import os
171
-
172
- class MCPBridgeServer:
173
- def __init__(self):
174
- self.db_url = os.getenv('DATABASE_URL', 'postgresql://postgres:postgres@localhost:5432/local_workflow')
175
-
176
- async def health_handler(self, request):
177
- return web.json_response({"status": "healthy", "server": "MCP Bridge"})
178
-
179
- async def execute_tool_handler(self, request):
180
- try:
181
- data = await request.json()
182
- tool_name = data.get('tool_name', 'unknown')
183
- tool_params = data.get('parameters', {})
184
-
185
- if tool_name == 'SQLExecutor':
186
- return await self.execute_sql(tool_params)
187
- elif tool_name == 'PostgresInsert':
188
- return await self.insert_data(tool_params)
189
- elif tool_name == 'DataValidator':
190
- return await self.validate_data(tool_params)
191
- else:
192
- return web.json_response({
193
- "success": True,
194
- "message": f"Demo {tool_name} executed",
195
- "data": {"demo": True}
196
- })
197
- except Exception as e:
198
- return web.json_response({
199
- "success": False,
200
- "error": str(e)
201
- }, status=500)
202
-
203
- async def execute_sql(self, params):
204
- try:
205
- query = params.get('query', 'SELECT 1')
206
- conn = psycopg2.connect(self.db_url)
207
- cursor = conn.cursor()
208
- cursor.execute(query)
209
- results = cursor.fetchall()
210
- cursor.close()
211
- conn.close()
212
-
213
- return web.json_response({
214
- "success": True,
215
- "results": results,
216
- "query": query
217
- })
218
- except Exception as e:
219
- return web.json_response({
220
- "success": False,
221
- "error": f"SQL execution failed: {str(e)}"
222
- }, status=500)
223
-
224
- async def insert_data(self, params):
225
- try:
226
- table_name = params.get('table_name', 'invoices')
227
- data = params.get('data', {})
228
-
229
- conn = psycopg2.connect(self.db_url)
230
- cursor = conn.cursor()
231
-
232
- # Simple insert logic
233
- columns = list(data.keys())
234
- values = list(data.values())
235
- placeholders = ', '.join(['%s'] * len(values))
236
- column_list = ', '.join(columns)
237
-
238
- query = f"INSERT INTO {table_name} ({column_list}) VALUES ({placeholders}) RETURNING id"
239
- cursor.execute(query, values)
240
- record_id = cursor.fetchone()[0]
241
-
242
- conn.commit()
243
- cursor.close()
244
- conn.close()
245
-
246
- return web.json_response({
247
- "success": True,
248
- "record_id": record_id,
249
- "message": f"Inserted into {table_name}"
250
- })
251
- except Exception as e:
252
- return web.json_response({
253
- "success": False,
254
- "error": f"Insert failed: {str(e)}"
255
- }, status=500)
256
-
257
- async def validate_data(self, params):
258
- try:
259
- data = params.get('data', {})
260
-
261
- # Simple validation
262
- is_valid = True
263
- errors = []
264
-
265
- if not data.get('vendor_name'):
266
- is_valid = False
267
- errors.append("Missing vendor name")
268
-
269
- if not data.get('amount') or float(data.get('amount', 0)) <= 0:
270
- is_valid = False
271
- errors.append("Invalid amount")
272
-
273
- return web.json_response({
274
- "success": True,
275
- "is_valid": is_valid,
276
- "errors": errors,
277
- "validated_data": data
278
- })
279
- except Exception as e:
280
- return web.json_response({
281
- "success": False,
282
- "error": f"Validation failed: {str(e)}"
283
- }, status=500)
284
-
285
- # Create server instance
286
- server = MCPBridgeServer()
287
-
288
- # Create web application
289
- app = web.Application()
290
- app.router.add_get('/health', server.health_handler)
291
- app.router.add_post('/execute_tool', server.execute_tool_handler)
292
-
293
- if __name__ == '__main__':
294
- print("🚀 Starting MCP Bridge Server on port 8081...")
295
- web.run_app(app, host='0.0.0.0', port=8081)
296
- """
297
-
298
- with open(ops_dir / "mcp_bridge_server.py", "w") as f:
299
- f.write(mcp_content)
300
-
85
+ # Fallback: create minimal demo structure
86
+ create_minimal_demo(demo_dir)
87
+
301
88
  except Exception as e:
302
89
  print(f"⚠️ Could not extract bundled files: {e}")
303
90
  print("Creating minimal demo structure...")
@@ -336,173 +123,45 @@ import asyncio
336
123
  import aiohttp
337
124
  from aiohttp import web
338
125
  import json
339
- import psycopg2
340
- import os
341
126
 
342
- class MCPBridgeServer:
343
- def __init__(self):
344
- self.db_url = os.getenv('DATABASE_URL', 'postgresql://postgres:postgres@localhost:5432/local_workflow')
345
-
346
- async def health_handler(self, request):
347
- return web.json_response({"status": "healthy", "server": "MCP Bridge"})
348
-
349
- async def execute_tool_handler(self, request):
350
- try:
351
- data = await request.json()
352
- tool_name = data.get('tool_name', 'unknown')
353
- tool_params = data.get('parameters', {})
354
-
355
- if tool_name == 'SQLExecutor':
356
- return await self.execute_sql(tool_params)
357
- elif tool_name == 'PostgresInsert':
358
- return await self.insert_data(tool_params)
359
- elif tool_name == 'DataValidator':
360
- return await self.validate_data(tool_params)
361
- else:
362
- return web.json_response({
363
- "success": True,
364
- "message": f"Demo {tool_name} executed",
365
- "data": {"demo": True}
366
- })
367
- except Exception as e:
368
- return web.json_response({
369
- "success": False,
370
- "error": str(e)
371
- }, status=500)
372
-
373
- async def execute_sql(self, params):
374
- try:
375
- query = params.get('query', 'SELECT 1')
376
- conn = psycopg2.connect(self.db_url)
377
- cursor = conn.cursor()
378
- cursor.execute(query)
379
- results = cursor.fetchall()
380
- cursor.close()
381
- conn.close()
382
-
383
- return web.json_response({
384
- "success": True,
385
- "results": results,
386
- "query": query
387
- })
388
- except Exception as e:
389
- return web.json_response({
390
- "success": False,
391
- "error": f"SQL execution failed: {str(e)}"
392
- }, status=500)
393
-
394
- async def insert_data(self, params):
395
- try:
396
- table_name = params.get('table_name', 'invoices')
397
- data = params.get('data', {})
398
-
399
- conn = psycopg2.connect(self.db_url)
400
- cursor = conn.cursor()
401
-
402
- # Simple insert logic
403
- columns = list(data.keys())
404
- values = list(data.values())
405
- placeholders = ', '.join(['%s'] * len(values))
406
- column_list = ', '.join(columns)
407
-
408
- query = f"INSERT INTO {table_name} ({column_list}) VALUES ({placeholders}) RETURNING id"
409
- cursor.execute(query, values)
410
- record_id = cursor.fetchone()[0]
411
-
412
- conn.commit()
413
- cursor.close()
414
- conn.close()
415
-
416
- return web.json_response({
417
- "success": True,
418
- "record_id": record_id,
419
- "message": f"Inserted into {table_name}"
420
- })
421
- except Exception as e:
422
- return web.json_response({
423
- "success": False,
424
- "error": f"Insert failed: {str(e)}"
425
- }, status=500)
426
-
427
- async def validate_data(self, params):
428
- try:
429
- data = params.get('data', {})
430
-
431
- # Simple validation
432
- is_valid = True
433
- errors = []
434
-
435
- if not data.get('vendor_name'):
436
- is_valid = False
437
- errors.append("Missing vendor name")
438
-
439
- if not data.get('amount') or float(data.get('amount', 0)) <= 0:
440
- is_valid = False
441
- errors.append("Invalid amount")
442
-
443
- return web.json_response({
444
- "success": True,
445
- "is_valid": is_valid,
446
- "errors": errors,
447
- "validated_data": data
448
- })
449
- except Exception as e:
450
- return web.json_response({
451
- "success": False,
452
- "error": f"Validation failed: {str(e)}"
453
- }, status=500)
127
+ async def health_handler(request):
128
+ return web.json_response({"status": "healthy"})
454
129
 
455
- # Create server instance
456
- server = MCPBridgeServer()
130
+ async def execute_tool_handler(request):
131
+ data = await request.json()
132
+ tool_name = data.get('tool_name', 'unknown')
133
+
134
+ # Mock responses for demo
135
+ if tool_name == 'SQLExecutor':
136
+ return web.json_response({
137
+ "success": True,
138
+ "results": [{"message": "Demo SQL executed"}]
139
+ })
140
+ elif tool_name == 'PostgresInsert':
141
+ return web.json_response({
142
+ "success": True,
143
+ "id": 1
144
+ })
145
+ else:
146
+ return web.json_response({
147
+ "success": True,
148
+ "message": f"Demo {tool_name} executed"
149
+ })
457
150
 
458
- # Create web application
459
151
  app = web.Application()
460
- app.router.add_get('/health', server.health_handler)
461
- app.router.add_post('/execute_tool', server.execute_tool_handler)
152
+ app.router.add_get('/health', health_handler)
153
+ app.router.add_post('/execute_tool', execute_tool_handler)
462
154
 
463
155
  if __name__ == '__main__':
464
- print("🚀 Starting MCP Bridge Server on port 8081...")
465
156
  web.run_app(app, host='0.0.0.0', port=8081)
466
157
  """
467
158
 
468
159
  with open(ops_dir / "mcp_bridge_server.py", "w") as f:
469
160
  f.write(mcp_content)
470
161
 
471
- # Copy the real ETL demo if available
162
+ # Create demo workflow
472
163
  demo_dir.mkdir(exist_ok=True)
473
- import shutil
474
-
475
- try:
476
- # Try to copy from demos directory
477
- source_demo = Path("demos/etl_invoice_processing/etl_invoice_demo.py")
478
- if source_demo.exists():
479
- # Copy the main demo script
480
- shutil.copy2(source_demo, demo_dir / "etl_invoice_demo.py")
481
- print("✅ Copied real ETL demo script")
482
-
483
- # Copy all necessary Python dependencies
484
- demo_files = [
485
- "database_monitor_agent.py",
486
- "simple_pdf_processor.py",
487
- "setup_demo_data.py"
488
- ]
489
-
490
- for file_name in demo_files:
491
- source_file = Path(f"demos/etl_invoice_processing/{file_name}")
492
- if source_file.exists():
493
- shutil.copy2(source_file, demo_dir / file_name)
494
- print(f"✅ Copied {file_name}")
495
-
496
- # Copy sample data
497
- data_dir = demo_dir / "data"
498
- data_dir.mkdir(exist_ok=True)
499
- source_data = Path("demos/etl_invoice_processing/data")
500
- if source_data.exists():
501
- shutil.copytree(source_data, data_dir, dirs_exist_ok=True)
502
- print("✅ Copied sample invoice data")
503
- else:
504
- # Create a basic demo if real one not found
505
- demo_content = """#!/usr/bin/env python3
164
+ demo_content = """#!/usr/bin/env python3
506
165
  import os
507
166
  import sys
508
167
  import time
@@ -535,47 +194,9 @@ def main():
535
194
  if __name__ == "__main__":
536
195
  main()
537
196
  """
538
- with open(demo_dir / "etl_demo.py", "w") as f:
539
- f.write(demo_content)
540
- print("⚠️ Using simplified demo (real demo not found)")
541
- except Exception as e:
542
- print(f"Warning: Could not copy ETL demo: {e}")
543
- # Fallback to basic demo
544
- demo_content = """#!/usr/bin/env python3
545
- import os
546
- import sys
547
- import time
548
-
549
- def main():
550
- print("🚀 Starting ETL Invoice Processing Demo...")
551
- print("🏢 Starting ETL Invoice Processing Department")
552
- print("📋 Mission: Complete end-to-end ETL process with comprehensive monitoring")
553
- print("👥 Team: Pre-ETL Database Monitor, Data Engineer, Invoice Parser, Data Entry Specialist, Post-ETL Database Monitor")
554
- print("👔 Manager: ETL Process Manager")
555
197
 
556
- steps = [
557
- ("Pre-ETL Database Monitor", "Database state captured: 2 rows"),
558
- ("Data Engineer", "Schema extracted successfully"),
559
- ("Invoice Parser", "Invoice data extracted: $270.57"),
560
- ("Data Entry Specialist", "Record inserted: ID 1"),
561
- ("Post-ETL Database Monitor", "Database state captured: 3 rows")
562
- ]
563
-
564
- for i, (step, result) in enumerate(steps, 1):
565
- print(f"\\n🔄 Step {i}/5: {step}")
566
- time.sleep(1)
567
- print(f"✅ {result}")
568
-
569
- print("\\n🎉 ETL Invoice Processing Department workflow completed!")
570
- print("⏱️ Total time: 5.2s")
571
- print("\\n📊 Demo completed successfully!")
572
- print("This was a simplified demo. For the full experience, check out the complete ETL workflow.")
573
-
574
- if __name__ == "__main__":
575
- main()
576
- """
577
- with open(demo_dir / "etl_demo.py", "w") as f:
578
- f.write(demo_content)
198
+ with open(demo_dir / "etl_demo.py", "w") as f:
199
+ f.write(demo_content)
579
200
 
580
201
  def setup_environment():
581
202
  """Set up environment variables for the demo"""
@@ -588,38 +209,6 @@ def setup_environment():
588
209
  os.environ['DATABASE_URL'] = 'postgresql://postgres:postgres@localhost:5432/local_workflow'
589
210
  print("✅ Set DATABASE_URL")
590
211
 
591
- def install_dependencies():
592
- """Install required dependencies for the demo"""
593
- try:
594
- print("📦 Installing demo dependencies...")
595
- dependencies = [
596
- 'requests==2.31.0',
597
- 'fastapi==0.104.1',
598
- 'uvicorn[standard]==0.24.0',
599
- 'pydantic==2.5.0',
600
- 'aiohttp',
601
- 'psycopg2-binary',
602
- 'httpx',
603
- 'huggingface_hub'
604
- ]
605
-
606
- for dep in dependencies:
607
- print(f" Installing {dep}...")
608
- result = subprocess.run([
609
- sys.executable, '-m', 'pip', 'install', dep
610
- ], capture_output=True, text=True)
611
-
612
- if result.returncode != 0:
613
- print(f"⚠️ Warning: Failed to install {dep}: {result.stderr}")
614
- else:
615
- print(f" ✅ {dep} installed")
616
-
617
- print("✅ Dependencies installed")
618
-
619
- except Exception as e:
620
- print(f"⚠️ Warning: Could not install dependencies: {e}")
621
- print(" You may need to install them manually: pip install requests fastapi uvicorn pydantic")
622
-
623
212
  def start_docker_services(demo_dir):
624
213
  """Start Docker containers using docker-compose"""
625
214
  try:
@@ -657,23 +246,9 @@ def wait_for_services():
657
246
  """Wait for services to be ready"""
658
247
  print("⏳ Waiting for PostgreSQL to be ready...")
659
248
 
660
- # Wait for PostgreSQL - try both possible container names
249
+ # Wait for PostgreSQL
661
250
  for i in range(30): # Wait up to 30 seconds
662
251
  try:
663
- # Try the memra-ops container name first
664
- result = subprocess.run([
665
- 'docker', 'exec', 'memra-ops_postgres_1',
666
- 'pg_isready', '-U', 'postgres', '-d', 'local_workflow'
667
- ], capture_output=True, text=True)
668
-
669
- if result.returncode == 0:
670
- print("✅ PostgreSQL is ready")
671
- break
672
- except:
673
- pass
674
-
675
- try:
676
- # Fallback to the old container name
677
252
  result = subprocess.run([
678
253
  'docker', 'exec', 'memra_postgres',
679
254
  'pg_isready', '-U', 'postgres', '-d', 'local_workflow'
@@ -694,101 +269,17 @@ def wait_for_services():
694
269
  def run_etl_workflow(demo_dir):
695
270
  """Run the ETL workflow"""
696
271
  try:
697
- # Try to run the real ETL demo first
698
- real_demo_script = demo_dir / "etl_invoice_demo.py"
699
- if real_demo_script.exists():
700
- print("🎯 Running real ETL workflow...")
701
- print(f"📁 Working directory: {demo_dir}")
702
- print(f"📄 Demo script: {real_demo_script}")
703
-
704
- # Check if data directory exists
705
- data_dir = demo_dir / "data"
706
- invoices_dir = data_dir / "invoices"
707
- if invoices_dir.exists():
708
- pdf_files = list(invoices_dir.glob("*.PDF"))
709
- print(f"📊 Found {len(pdf_files)} PDF files in {invoices_dir}")
710
- if pdf_files:
711
- print(f" First few files: {[f.name for f in pdf_files[:3]]}")
712
- else:
713
- print(f"⚠️ Warning: {invoices_dir} does not exist")
714
- print(f" Available directories in {demo_dir}:")
715
- for item in demo_dir.iterdir():
716
- if item.is_dir():
717
- print(f" - {item.name}/")
718
-
719
- print("⏱️ Processing 15 files with delays - this may take 10-15 minutes")
720
-
721
- # Set the working directory to the demo directory so the script can find data/invoices/
722
- result = subprocess.run(
723
- [sys.executable, str(real_demo_script)],
724
- cwd=demo_dir, # This is crucial - sets working directory
725
- timeout=1800 # 30 minute timeout
726
- )
272
+ # Run the demo script
273
+ demo_script = demo_dir / "etl_demo.py"
274
+ if demo_script.exists():
275
+ result = subprocess.run([sys.executable, str(demo_script)], cwd=demo_dir)
727
276
  return result.returncode == 0
728
277
  else:
729
- # Fallback to simplified demo
730
- demo_script = demo_dir / "etl_demo.py"
731
- if demo_script.exists():
732
- print("🎯 Running simplified demo...")
733
- result = subprocess.run([sys.executable, str(demo_script)], cwd=demo_dir)
734
- return result.returncode == 0
735
- else:
736
- print("❌ No demo script found")
737
- print(f" Looking for: {real_demo_script}")
738
- print(f" Available files in {demo_dir}:")
739
- for item in demo_dir.iterdir():
740
- print(f" - {item.name}")
741
- return False
742
-
743
- except subprocess.TimeoutExpired:
744
- print("⏰ ETL workflow timed out after 30 minutes")
745
- print("This is normal for large batches - the demo processes 15 files with delays")
746
- return False
747
- except Exception as e:
748
- print(f"❌ Error running ETL workflow: {e}")
749
- return False
750
-
751
- def start_mcp_bridge_server(demo_dir):
752
- """Start the MCP bridge server"""
753
- try:
754
- ops_dir = demo_dir / "memra-ops"
755
- bridge_script = ops_dir / "mcp_bridge_server.py"
756
-
757
- if not bridge_script.exists():
758
- print("❌ MCP bridge server script not found")
278
+ print("❌ Demo script not found")
759
279
  return False
760
-
761
- # Start the bridge server in the background
762
- if os.name == 'nt': # Windows
763
- # Use start command to run in background
764
- result = subprocess.run([
765
- 'start', '/B', 'python', str(bridge_script)
766
- ], cwd=ops_dir, shell=True, capture_output=True, text=True)
767
- else: # Unix/Linux/Mac
768
- result = subprocess.run([
769
- 'python', str(bridge_script)
770
- ], cwd=ops_dir, start_new_session=True, capture_output=True, text=True)
771
-
772
- # Wait a moment for the server to start
773
- time.sleep(3)
774
-
775
- # Check if the server is responding
776
- try:
777
- import requests
778
- response = requests.get('http://localhost:8081/health', timeout=5)
779
- if response.status_code == 200:
780
- print("✅ MCP bridge server started successfully")
781
- return True
782
- else:
783
- print(f"⚠️ MCP bridge server responded with status {response.status_code}")
784
- return False
785
- except Exception as e:
786
- print(f"⚠️ Could not verify MCP bridge server: {e}")
787
- print(" Server may still be starting up...")
788
- return True # Assume it's working
789
280
 
790
281
  except Exception as e:
791
- print(f"❌ Error starting MCP bridge server: {e}")
282
+ print(f"❌ Error running ETL workflow: {e}")
792
283
  return False
793
284
 
794
285
  def main():
@@ -791,8 +791,10 @@ def process_database_insertion(agent, tool_results, **kwargs):
791
791
  # Inject the properly formatted data into the tool parameters
792
792
  if 'parameters' not in result:
793
793
  result['parameters'] = {}
794
- result['parameters']['data'] = db_data
795
- print(f"\n✅ [AGENT 4] Injected transformed data into PostgresInsert parameters")
794
+ # Pass the data in the format expected by PostgresInsert tool
795
+ result['parameters']['invoice_data'] = invoice_data # Pass the original invoice_data
796
+ result['parameters']['table_name'] = 'invoices'
797
+ print(f"\n✅ [AGENT 4] Injected invoice_data into PostgresInsert parameters")
796
798
 
797
799
  # Call the original print function for debugging
798
800
  print_database_data(agent, tool_results, invoice_data)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: memra
3
- Version: 0.2.15
3
+ Version: 0.2.16
4
4
  Summary: Declarative framework for enterprise workflows with MCP integration - Client SDK
5
5
  Home-page: https://github.com/memra/memra-sdk
6
6
  Author: Memra
@@ -20,18 +20,22 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: pydantic >=1.8.0
24
- Requires-Dist: httpx >=0.24.0
25
- Requires-Dist: typing-extensions >=4.0.0
26
- Requires-Dist: aiohttp >=3.8.0
27
- Requires-Dist: aiohttp-cors >=0.7.0
23
+ Requires-Dist: pydantic>=1.8.0
24
+ Requires-Dist: httpx>=0.24.0
25
+ Requires-Dist: typing-extensions>=4.0.0
26
+ Requires-Dist: aiohttp>=3.8.0
27
+ Requires-Dist: aiohttp-cors>=0.7.0
28
28
  Provides-Extra: dev
29
- Requires-Dist: pytest >=6.0 ; extra == 'dev'
30
- Requires-Dist: pytest-asyncio ; extra == 'dev'
31
- Requires-Dist: black ; extra == 'dev'
32
- Requires-Dist: flake8 ; extra == 'dev'
29
+ Requires-Dist: pytest>=6.0; extra == "dev"
30
+ Requires-Dist: pytest-asyncio; extra == "dev"
31
+ Requires-Dist: black; extra == "dev"
32
+ Requires-Dist: flake8; extra == "dev"
33
33
  Provides-Extra: mcp
34
- Requires-Dist: psycopg2-binary >=2.9.0 ; extra == 'mcp'
34
+ Requires-Dist: psycopg2-binary>=2.9.0; extra == "mcp"
35
+ Dynamic: author
36
+ Dynamic: home-page
37
+ Dynamic: license-file
38
+ Dynamic: requires-python
35
39
 
36
40
  # Memra SDK
37
41
 
@@ -1,5 +1,5 @@
1
- memra/__init__.py,sha256=6i82jodsWZPgtRhUaDF3wuQuRDSaboIpew8D3zDN__s,1109
2
- memra/cli.py,sha256=_IlOrTBlv_zBElxxQs13JYsdAuOn9wO1UiogmnwO1Qg,29430
1
+ memra/__init__.py,sha256=DEOu1dmnPnJWyfCgmarnjSFZZSgiUiv3VdGA7thdxYs,1109
2
+ memra/cli.py,sha256=DSkgwlko8NqUMdXxt-_Y7hMdqMGkGmF6PuzhQqCyjto,10310
3
3
  memra/discovery.py,sha256=yJIQnrDQu1nyzKykCIuzG_5SW5dIXHCEBLLKRWacIoY,480
4
4
  memra/discovery_client.py,sha256=AbnKn6qhyrf7vmOvknEeDzH4tiGHsqPHtDaein_qaW0,1271
5
5
  memra/execution.py,sha256=OXpBKxwBIjhACWL_qh8KHNndO8HUgB6gBF81AiQBBm0,34751
@@ -12,7 +12,7 @@ memra/demos/etl_invoice_processing/check_recent_db.py,sha256=tyO47DfwJkFH6IsdoM7
12
12
  memra/demos/etl_invoice_processing/database_monitor_agent.py,sha256=_A2mqweJTDOtbf57GCt20F5JG8RoH2UyUxET104dgAI,3497
13
13
  memra/demos/etl_invoice_processing/debug_mcp.py,sha256=xga1xzI0wycqF7aF5dsp3bL_o8aTnBYjOH0ZnjUZUtM,2066
14
14
  memra/demos/etl_invoice_processing/debug_schema.py,sha256=zirxgrgEtvE56oLNXvk4rL_kopIT53fIviKUQg1Ise4,1416
15
- memra/demos/etl_invoice_processing/etl_invoice_demo.py,sha256=6wVEAS8OMKwy4EPvDKoCov0r0QmgnfJ_Xz8zAs75haU,52813
15
+ memra/demos/etl_invoice_processing/etl_invoice_demo.py,sha256=_ogkYQpiG95a1Sc-JCgjP9zRp8HbPQL_wlc2LHIFXCE,52998
16
16
  memra/demos/etl_invoice_processing/modify_database.py,sha256=qHzBf8ukeHouaOsy0kjsfR00xCbvPUOrt1gwc4y7Xkc,1939
17
17
  memra/demos/etl_invoice_processing/run_etl_batch.py,sha256=czX-gfUuVnYb1ZjzirK7w9aa1fAUuIRCs3044AucT_Y,1928
18
18
  memra/demos/etl_invoice_processing/setup_demo_data.py,sha256=aeOZtFBBl5SZFZ5IqM35Tcc_PjEJHuc2cfY_LRWXkBM,4875
@@ -58,9 +58,9 @@ memra/demos/etl_invoice_processing/data/invoices/10352262702.PDF,sha256=aNWnxbYq
58
58
  memra/demos/etl_invoice_processing/data/invoices/10352262884.PDF,sha256=G0eszEhpTOS15hIlMyPMM6iyVw6UZPKycXvS3P42xRc,1010830
59
59
  memra/demos/etl_invoice_processing/data/invoices/10352263346.PDF,sha256=NMfsgrmaNtvNu6xk2aLtubI05I9cuVIbwJMxv_pYPhQ,1089624
60
60
  memra/demos/etl_invoice_processing/data/invoices/10352263429.PDF,sha256=1IzJbmnsKDE1cV6CtyNMENn0Rmpq2tA_BDnZYTYhNhQ,1082893
61
- memra-0.2.15.dist-info/LICENSE,sha256=8OrnTd8DWwLWmUEj5srSLvT4PREfW1Qo1T5gEUIHPws,1062
62
- memra-0.2.15.dist-info/METADATA,sha256=-a6F6PGuriDeFNJjtGn30f0XFdt64O1pmkZjKwvzCB8,9427
63
- memra-0.2.15.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
64
- memra-0.2.15.dist-info/entry_points.txt,sha256=LBVjwWoxWJRzNLgeByPn6xUvWFIRnqnemvAZgIoSt08,41
65
- memra-0.2.15.dist-info/top_level.txt,sha256=pXWcTRS1zctdiSUivW4iyKpJ4tcfIu-1BW_fpbal3OY,6
66
- memra-0.2.15.dist-info/RECORD,,
61
+ memra-0.2.16.dist-info/licenses/LICENSE,sha256=8OrnTd8DWwLWmUEj5srSLvT4PREfW1Qo1T5gEUIHPws,1062
62
+ memra-0.2.16.dist-info/METADATA,sha256=u-MAjWfwdJ9f79TA7SSGouLxrKm-RDkX12GsXR-HnXI,9497
63
+ memra-0.2.16.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
+ memra-0.2.16.dist-info/entry_points.txt,sha256=LBVjwWoxWJRzNLgeByPn6xUvWFIRnqnemvAZgIoSt08,41
65
+ memra-0.2.16.dist-info/top_level.txt,sha256=pXWcTRS1zctdiSUivW4iyKpJ4tcfIu-1BW_fpbal3OY,6
66
+ memra-0.2.16.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5