wizelit-sdk 0.1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,11 @@
1
+ """Wizelit SDK package."""
2
+
3
+ from wizelit_sdk.agent_wrapper import WizelitAgentWrapper
4
+ from wizelit_sdk.database import DatabaseManager
5
+ from wizelit_sdk.agent_wrapper.job import Job
6
+ from wizelit_sdk.agent_wrapper.streaming import LogStreamer
7
+ from wizelit_sdk.models.base import BaseModel
8
+ from wizelit_sdk.models.job import JobModel, JobLogModel, JobStatus
9
+
10
+ __all__ = ["WizelitAgentWrapper", "DatabaseManager", "Job", "LogStreamer", "BaseModel", "JobModel", "JobLogModel", "JobStatus"]
11
+
@@ -0,0 +1,9 @@
1
+ """Agent Wrapper - Internal utility package."""
2
+
3
+ # Import main functions
4
+ from .utils import greet, greet_many, greet_many3
5
+ from .agent_wrapper import WizelitAgentWrapper
6
+ from .job import Job
7
+ from .streaming import LogStreamer
8
+
9
+ __all__ = ["greet", "greet_many", "greet_many3", "WizelitAgentWrapper", "Job", "LogStreamer"]
@@ -0,0 +1,615 @@
1
+ # wizelit_sdk/core.py
2
+ import asyncio
3
+ import inspect
4
+ import logging
5
+ import os
6
+ from typing import Callable, Any, Optional, Literal, Dict, TYPE_CHECKING
7
+ from contextvars import ContextVar
8
+ from fastmcp import FastMCP, Context
9
+ from fastmcp.dependencies import CurrentContext
10
+ from wizelit_sdk.agent_wrapper.job import Job
11
+
12
+ if TYPE_CHECKING:
13
+ from wizelit_sdk.database import DatabaseManager
14
+
15
+ # Reusable framework constants
16
+ LLM_FRAMEWORK_CREWAI = "crewai"
17
+ LLM_FRAMEWORK_LANGCHAIN = "langchain"
18
+ LLM_FRAMEWORK_LANGGRAPH = "langraph"
19
+
20
+ LlmFrameworkType = Literal["crewai", "langchain", "langraph", None]
21
+
22
+ # Context variable for current Job instance
23
+ _current_job: ContextVar[Optional[Job]] = ContextVar("_current_job", default=None)
24
+
25
+
26
+ class CurrentJob:
27
+ """
28
+ Dependency injection class for Job instances.
29
+ Similar to CurrentContext(), returns the current Job instance from context.
30
+ """
31
+
32
+ def __call__(self) -> Optional[Job]:
33
+ """Return the current Job instance from context."""
34
+ return _current_job.get()
35
+
36
+
37
+ class WizelitAgentWrapper:
38
+ """
39
+ Main wrapper class that converts Python functions into MCP server tools.
40
+ Built on top of fast-mcp with enhanced streaming and agent framework support.
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ name: str,
46
+ transport: str = "streamable-http",
47
+ host: str = "0.0.0.0",
48
+ port: int = 8080,
49
+ version: str = "1.0.0",
50
+ db_manager: Optional["DatabaseManager"] = None,
51
+ enable_streaming: bool = True,
52
+ ):
53
+ """
54
+ Initialize the Wizelit Agent.
55
+
56
+ Args:
57
+ name: Name of the MCP server
58
+ transport: Transport protocol (sse, streamable-http, stdio)
59
+ host: Host address
60
+ port: Port number
61
+ version: Version string for the server
62
+ db_manager: Optional DatabaseManager for job persistence
63
+ enable_streaming: Enable real-time log streaming via Redis
64
+ """
65
+ self._mcp = FastMCP(name=name)
66
+ self._name = name
67
+ self._version = version
68
+ self._tools = {}
69
+ self._jobs: Dict[str, Job] = {} # Store jobs by job_id
70
+ self._host = host
71
+ self._transport = transport
72
+ self._port = port
73
+ self._db_manager = db_manager
74
+ self._log_streamer = None
75
+
76
+ # Initialize log streamer if enabled
77
+ if enable_streaming:
78
+ redis_url = os.getenv("REDIS_URL", "redis://localhost:6379")
79
+ try:
80
+ from .streaming import LogStreamer
81
+
82
+ self._log_streamer = LogStreamer(redis_url)
83
+ print(f"Log streaming enabled via Redis: {redis_url}")
84
+ except ImportError:
85
+ print("Warning: redis package not installed. Log streaming disabled.")
86
+ except Exception as e:
87
+ print(f"Warning: Failed to initialize log streamer: {e}")
88
+
89
+ print(
90
+ f"WizelitAgentWrapper initialized with name: {name}, transport: {transport}, host: {host}, port: {port}"
91
+ )
92
+
93
+ def ingest(
94
+ self,
95
+ is_long_running: bool = False,
96
+ description: Optional[str] = None,
97
+ response_handling: Optional[Dict[str, Any]] = None,
98
+ ):
99
+ """
100
+ Decorator to convert a function into an MCP tool.
101
+
102
+ Args:
103
+ is_long_running: If True, enables progress reporting
104
+ description: Human-readable description of the tool
105
+ response_handling: Optional dict configuring how tool responses are handled:
106
+ {
107
+ "mode": "direct" | "formatted" | "default", # Default: "default"
108
+ "extract_path": "content[0].text", # Optional: path to extract value. Default: "content[0].text" (MCP format)
109
+ "template": "Message: {value}", # Optional: template for formatted mode. Default: "{value}"
110
+ "content_type": "text" | "json" | "auto" # Default: "text"
111
+ }
112
+
113
+ Mode options:
114
+ - "direct": Return response directly to user (bypass LLM processing)
115
+ - "formatted": Format response using template before returning to user
116
+ - "default": Normal LLM processing (let LLM interpret and respond)
117
+
118
+ Content type options:
119
+ - "text": Always convert content to plain string using str(). Use for human-readable text responses.
120
+ Example: "Hello world" -> "Hello world", {"key": "value"} -> "{'key': 'value'}"
121
+
122
+ - "json": Format content as pretty-printed JSON. If content is a string, tries to parse it as JSON first.
123
+ Use when you want structured data displayed as formatted JSON.
124
+ Example: {"key": "value"} -> '{\n "key": "value"\n}', "Hello" -> "Hello" (if not valid JSON)
125
+
126
+ - "auto": Smart formatting - strings returned as-is, dicts/lists converted to JSON, other types to string.
127
+ Use when content type is unknown or mixed.
128
+ Example: "Hello" -> "Hello", {"key": "value"} -> '{\n "key": "value"\n}', 123 -> "123"
129
+
130
+ Usage:
131
+ @agent.ingest(
132
+ is_long_running=True,
133
+ description="Start a job",
134
+ response_handling={
135
+ "mode": "formatted",
136
+ "extract_path": "content[0].text",
137
+ "template": "Job started. ID: {value}",
138
+ "content_type": "text"
139
+ }
140
+ )
141
+ def start_job(code: str, job: Job) -> str:
142
+ return job.id
143
+ """
144
+
145
+ def decorator(func: Callable) -> Callable:
146
+ # Store original function metadata
147
+ tool_name = func.__name__
148
+ tool_description = description or func.__doc__ or f"Execute {tool_name}"
149
+
150
+ # Detect if function is async
151
+ is_async = inspect.iscoroutinefunction(func)
152
+
153
+ # Get function signature
154
+ sig = inspect.signature(func)
155
+
156
+ # Build new signature with ctx: Context = CurrentContext() as LAST parameter
157
+ # This follows fast-mcp v2.14+ convention for dependency injection
158
+ params_list = list(sig.parameters.values())
159
+
160
+ # Check if function has 'job' parameter (for backward compatibility)
161
+ has_job_param = sig.parameters.get("job") is not None
162
+
163
+ if is_long_running and not has_job_param:
164
+ raise ValueError(
165
+ "is_long_running is True but 'job' parameter is not provided"
166
+ )
167
+
168
+ # Remove original 'job' parameter if it exists
169
+ if has_job_param:
170
+ params_list = [p for p in params_list if p.name != "job"]
171
+
172
+ # Add ctx as the last parameter with CurrentContext() as default
173
+ ctx_param = inspect.Parameter(
174
+ "ctx",
175
+ inspect.Parameter.KEYWORD_ONLY,
176
+ default=CurrentContext(),
177
+ annotation=Context,
178
+ )
179
+ params_list.append(ctx_param)
180
+
181
+ # Add job parameter if function signature includes it
182
+ # Use None as default - we'll resolve CurrentJob() in the wrapper at call time
183
+ if has_job_param:
184
+ job_param = inspect.Parameter(
185
+ "job",
186
+ inspect.Parameter.KEYWORD_ONLY,
187
+ default=None,
188
+ annotation=Any, # Use Any to avoid Pydantic issues
189
+ )
190
+ params_list.append(job_param)
191
+
192
+ new_sig = sig.replace(parameters=params_list)
193
+
194
+ # Create the wrapper function
195
+ async def tool_wrapper(*args, **kwargs):
196
+ """MCP-compliant wrapper with streaming."""
197
+ # Extract ctx from kwargs (injected by fast-mcp via CurrentContext())
198
+ ctx = kwargs.pop("ctx", None)
199
+ if ctx is None:
200
+ raise ValueError("Context not injected by fast-mcp")
201
+
202
+ # Extract job from kwargs if present
203
+ # Handle case where fast-mcp might pass CurrentJob instance instead of Job
204
+ job = None
205
+ if has_job_param:
206
+ job = kwargs.pop("job", None)
207
+ # If job is a CurrentJob instance, call it to get the actual Job
208
+ if isinstance(job, CurrentJob):
209
+ job = job()
210
+ # If job is still None, _execute_tool will create it
211
+
212
+ # Bind all arguments (including positional) to the original function signature
213
+ # This ensures parameters are correctly passed even if fast-mcp uses positional args
214
+ # Create a signature without 'job' since we've already extracted it
215
+ func_sig = inspect.signature(func)
216
+ if has_job_param and "job" in func_sig.parameters:
217
+ # Remove 'job' from signature for binding since we handle it separately
218
+ params_without_job = {
219
+ name: param
220
+ for name, param in func_sig.parameters.items()
221
+ if name != "job"
222
+ }
223
+ func_sig = func_sig.replace(
224
+ parameters=list(params_without_job.values())
225
+ )
226
+
227
+ try:
228
+ bound_args = func_sig.bind(*args, **kwargs)
229
+ bound_args.apply_defaults()
230
+ func_kwargs = bound_args.arguments
231
+ except TypeError as e:
232
+ # Fallback: if binding fails, use kwargs as-is (shouldn't happen normally)
233
+ logging.warning(
234
+ f"Failed to bind arguments for {tool_name}: {e}. Args: {args}, Kwargs: {kwargs}"
235
+ )
236
+ func_kwargs = kwargs
237
+
238
+ return await self._execute_tool(
239
+ func, ctx, is_async, is_long_running, tool_name, job, **func_kwargs
240
+ )
241
+
242
+ # Set the signature with ctx as last parameter with CurrentContext() default
243
+ tool_wrapper.__signature__ = new_sig
244
+ tool_wrapper.__name__ = tool_name
245
+ tool_wrapper.__doc__ = tool_description
246
+
247
+ # Copy annotations and add Context
248
+ # Note: We don't add job annotation here since we use Any and exclude it from schema
249
+ new_annotations = {}
250
+ if hasattr(func, "__annotations__"):
251
+ new_annotations.update(func.__annotations__)
252
+ new_annotations["ctx"] = Context
253
+ if has_job_param:
254
+ new_annotations["job"] = (
255
+ Any # Use Any instead of Job to avoid Pydantic schema issues
256
+ )
257
+ tool_wrapper.__annotations__ = new_annotations
258
+
259
+ # Register with fast-mcp
260
+ # Exclude ctx and job from schema generation since they're dependency-injected
261
+ exclude_args = ["ctx"]
262
+ if has_job_param:
263
+ exclude_args.append("job")
264
+
265
+ # Prepare tool kwargs
266
+ tool_kwargs = {
267
+ "description": tool_description,
268
+ "exclude_args": exclude_args,
269
+ }
270
+
271
+ # Add response_handling metadata to tool's meta field (exposed via MCP protocol)
272
+ if response_handling:
273
+ tool_kwargs["meta"] = {"wizelit_response_handling": response_handling}
274
+
275
+ registered_tool = self._mcp.tool(**tool_kwargs)(tool_wrapper)
276
+
277
+ # Store tool metadata
278
+ self._tools[tool_name] = {
279
+ 'function': func,
280
+ 'wrapper': registered_tool,
281
+ 'is_long_running': is_long_running,
282
+ }
283
+
284
+ # Return original function so it can still be called directly
285
+ return func
286
+
287
+ return decorator
288
+
289
+ async def _execute_tool(
290
+ self,
291
+ func: Callable,
292
+ ctx: Context,
293
+ is_async: bool,
294
+ is_long_running: bool,
295
+ tool_name: str,
296
+ job: Optional[Job] = None,
297
+ **kwargs,
298
+ ) -> Any:
299
+ """Central execution method for all tools."""
300
+
301
+ token = None
302
+ # Create Job instance if not provided
303
+ if job is None and is_long_running:
304
+ job = Job(ctx, db_manager=self._db_manager, log_streamer=self._log_streamer)
305
+
306
+ # Persist job to database BEFORE any logs are emitted
307
+ if self._db_manager:
308
+ await job.persist_to_db()
309
+
310
+ # Store job in jobs dictionary for later retrieval
311
+ self._jobs[job.id] = job
312
+
313
+ # Set CurrentJob context so CurrentJob() can retrieve it
314
+ token = _current_job.set(job)
315
+
316
+ try:
317
+ try:
318
+ # Add job to kwargs if function signature includes it
319
+ func_sig = inspect.signature(func)
320
+ if "job" in func_sig.parameters:
321
+ # For non-long-running tools, create a minimal job if needed
322
+ if job is None and not is_long_running:
323
+ # Create a lightweight job for non-long-running tools that require it
324
+ job = Job(
325
+ ctx,
326
+ db_manager=self._db_manager,
327
+ log_streamer=self._log_streamer,
328
+ )
329
+ # Don't persist to DB for fast tools, just create in memory
330
+ if job is not None:
331
+ kwargs["job"] = job
332
+
333
+ # Execute function (async or sync)
334
+ logging.info(f"kwargs: {kwargs}")
335
+ if is_async:
336
+ result = await func(**kwargs)
337
+ else:
338
+ result = await asyncio.to_thread(func, **kwargs)
339
+
340
+ # Ensure result is never None for functions that should return strings
341
+ func_sig = inspect.signature(func)
342
+ if result is None:
343
+ return_annotation = func_sig.return_annotation
344
+ # Check if return type is str (handle both direct str and Optional[str])
345
+ is_str_return = (
346
+ return_annotation is str
347
+ or (
348
+ hasattr(return_annotation, "__origin__")
349
+ and return_annotation.__origin__ is str
350
+ )
351
+ or (
352
+ hasattr(return_annotation, "__args__")
353
+ and str in getattr(return_annotation, "__args__", [])
354
+ )
355
+ )
356
+ if is_str_return:
357
+ logging.warning(
358
+ f"Function {tool_name} returned None but should return str. Returning empty string."
359
+ )
360
+ result = ""
361
+
362
+ return result
363
+
364
+ except Exception as e:
365
+ # Mark job as failed (only if job exists)
366
+ if job is not None:
367
+ job.status = "failed"
368
+
369
+ # Stream error information
370
+ await ctx.report_progress(
371
+ progress=0, message=f"Error in {tool_name}: {str(e)}"
372
+ )
373
+ raise
374
+ finally:
375
+ # Reset CurrentJob context only if we set it
376
+ if token is not None:
377
+ _current_job.reset(token)
378
+
379
+ def run(
380
+ self,
381
+ transport: Optional[str] = None,
382
+ host: Optional[str] = None,
383
+ port: Optional[int] = None,
384
+ **kwargs,
385
+ ):
386
+ """
387
+ Start the MCP server.
388
+
389
+ Args:
390
+ transport: MCP transport type ('stdio', 'http', 'streamable-http')
391
+ host: Host to bind to (for HTTP transports)
392
+ port: Port to bind to (for HTTP transports)
393
+ **kwargs: Additional arguments passed to fast-mcp
394
+ """
395
+ transport = transport or self._transport
396
+ host = host or self._host
397
+ port = port or self._port
398
+ print(f"🚀 Starting {self._name} MCP Server")
399
+
400
+ if transport in ["http", "streamable-http"]:
401
+ print(f"🌐 Listening on {host}:{port}")
402
+
403
+ print(f"🔧 Registered {len(self._tools)} tool(s):")
404
+ for tool_name, tool_info in self._tools.items():
405
+ lr_status = "⏱️ long-running" if tool_info["is_long_running"] else "⚡ fast"
406
+ print(f" • {tool_name} [{lr_status}]")
407
+
408
+ # Start the server
409
+ self._mcp.run(transport=transport, host=host, port=port, **kwargs)
410
+
411
+ def list_tools(self) -> dict:
412
+ """Return metadata about all registered tools."""
413
+ return {
414
+ name: {
415
+ 'is_long_running': info['is_long_running'],
416
+ 'llm_framework': info['llm_framework']
417
+ }
418
+ for name, info in self._tools.items()
419
+ }
420
+
421
+ def get_job_logs(self, job_id: str) -> Optional[list]:
422
+ """
423
+ Get logs for a specific job by job_id.
424
+
425
+ Args:
426
+ job_id: The job identifier
427
+
428
+ Returns:
429
+ List of log messages (timestamped strings) if job exists, None otherwise
430
+ """
431
+ job = self._jobs.get(job_id)
432
+ if job is None:
433
+ return None
434
+ return job.logs
435
+
436
+ def get_job_status(self, job_id: str) -> Optional[str]:
437
+ """
438
+ Get status for a specific job by job_id.
439
+
440
+ Args:
441
+ job_id: The job identifier
442
+
443
+ Returns:
444
+ Job status ("running", "completed", "failed") if job exists, None otherwise
445
+ """
446
+ job = self._jobs.get(job_id)
447
+ if job is None:
448
+ return None
449
+ return job.status
450
+
451
+ def get_job(self, job_id: str) -> Optional[Job]:
452
+ """
453
+ Get a Job instance by job_id.
454
+ First checks in-memory cache, then falls back to database.
455
+
456
+ Args:
457
+ job_id: The job identifier
458
+
459
+ Returns:
460
+ Job instance if exists, None otherwise
461
+ """
462
+ # Check in-memory first
463
+ job = self._jobs.get(job_id)
464
+ if job:
465
+ return job
466
+
467
+ # If not in memory and DB is available, try to load from DB
468
+ # Note: This returns None for now as we'd need async context
469
+ # Use get_job_from_db_async for async retrieval
470
+ return None
471
+
472
+ async def get_job_from_db(self, job_id: str) -> Optional[Dict[str, Any]]:
473
+ """
474
+ Retrieve job data from database asynchronously.
475
+
476
+ Args:
477
+ job_id: The job identifier
478
+
479
+ Returns:
480
+ Dict with job data or None if not found
481
+ """
482
+ if not self._db_manager:
483
+ return None
484
+
485
+ try:
486
+ from models.job import JobModel
487
+ from sqlalchemy import select
488
+
489
+ async with self._db_manager.get_session() as session:
490
+ result = await session.execute(
491
+ select(JobModel).where(JobModel.id == job_id)
492
+ )
493
+ job_model = result.scalar_one_or_none()
494
+
495
+ if not job_model:
496
+ return None
497
+
498
+ return {
499
+ "id": job_model.id,
500
+ "status": job_model.status,
501
+ "result": job_model.result,
502
+ "error": job_model.error,
503
+ "created_at": (
504
+ job_model.created_at.isoformat()
505
+ if job_model.created_at
506
+ else None
507
+ ),
508
+ "updated_at": (
509
+ job_model.updated_at.isoformat()
510
+ if job_model.updated_at
511
+ else None
512
+ ),
513
+ }
514
+ except Exception as e:
515
+ logging.error(f"Error retrieving job from database: {e}")
516
+ return None
517
+
518
+ async def get_job_logs_from_db(
519
+ self, job_id: str, limit: int = 100
520
+ ) -> Optional[list]:
521
+ """
522
+ Retrieve job logs from database asynchronously.
523
+
524
+ Args:
525
+ job_id: The job identifier
526
+ limit: Maximum number of logs to retrieve
527
+
528
+ Returns:
529
+ List of log messages or None if job not found
530
+ """
531
+ if not self._db_manager:
532
+ return None
533
+
534
+ try:
535
+ from models.job import JobLogModel
536
+ from sqlalchemy import select
537
+
538
+ async with self._db_manager.get_session() as session:
539
+ result = await session.execute(
540
+ select(JobLogModel)
541
+ .where(JobLogModel.job_id == job_id)
542
+ .order_by(JobLogModel.timestamp.asc())
543
+ .limit(limit)
544
+ )
545
+ log_models = result.scalars().all()
546
+
547
+ return [
548
+ f"[{log.level}] [{log.timestamp.strftime('%H:%M:%S')}] {log.message}"
549
+ for log in log_models
550
+ ]
551
+ except Exception as e:
552
+ logging.error(f"Error retrieving logs from database: {e}")
553
+ return None
554
+
555
+ def get_jobs(self) -> list[Job]:
556
+ """
557
+ Get all Job instances.
558
+
559
+ Returns:
560
+ List of Job instances
561
+ """
562
+ return list(self._jobs.values())
563
+
564
+ def set_job_status(self, job_id: str, status: str) -> bool:
565
+ """
566
+ Set the status of a job by job_id.
567
+
568
+ Args:
569
+ job_id: The job identifier
570
+ status: New status ("running", "completed", "failed")
571
+
572
+ Returns:
573
+ True if job exists and status was updated, False otherwise
574
+ """
575
+ job = self._jobs.get(job_id)
576
+ if job is None:
577
+ return False
578
+ job.status = status
579
+ return True
580
+
581
+ def set_job_result(
582
+ self, job_id: str, result: Optional[str | dict[str, Any]]
583
+ ) -> bool:
584
+ """
585
+ Set the result of a job by job_id.
586
+
587
+ Args:
588
+ job_id: The job identifier
589
+ result: The job result
590
+
591
+ Returns:
592
+ True if job exists and result was updated, False otherwise
593
+ """
594
+ job = self._jobs.get(job_id)
595
+ if job is None:
596
+ return False
597
+ job.result = result
598
+ return True
599
+
600
+ def set_job_error(self, job_id: str, error: Optional[str]) -> bool:
601
+ """
602
+ Set the error message of a job by job_id.
603
+
604
+ Args:
605
+ job_id: The job identifier
606
+ error: The error message
607
+
608
+ Returns:
609
+ True if job exists and error was updated, False otherwise
610
+ """
611
+ job = self._jobs.get(job_id)
612
+ if job is None:
613
+ return False
614
+ job.error = error
615
+ return True