genxai-framework 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. cli/commands/__init__.py +3 -1
  2. cli/commands/connector.py +309 -0
  3. cli/commands/workflow.py +80 -0
  4. cli/main.py +3 -1
  5. genxai/__init__.py +33 -0
  6. genxai/agents/__init__.py +8 -0
  7. genxai/agents/presets.py +53 -0
  8. genxai/connectors/__init__.py +10 -0
  9. genxai/connectors/base.py +3 -3
  10. genxai/connectors/config_store.py +106 -0
  11. genxai/connectors/github.py +117 -0
  12. genxai/connectors/google_workspace.py +124 -0
  13. genxai/connectors/jira.py +108 -0
  14. genxai/connectors/notion.py +97 -0
  15. genxai/connectors/slack.py +121 -0
  16. genxai/core/agent/config_io.py +32 -1
  17. genxai/core/agent/runtime.py +41 -4
  18. genxai/core/graph/__init__.py +3 -0
  19. genxai/core/graph/engine.py +218 -11
  20. genxai/core/graph/executor.py +103 -10
  21. genxai/core/graph/nodes.py +28 -0
  22. genxai/core/graph/workflow_io.py +199 -0
  23. genxai/flows/__init__.py +33 -0
  24. genxai/flows/auction.py +66 -0
  25. genxai/flows/base.py +134 -0
  26. genxai/flows/conditional.py +45 -0
  27. genxai/flows/coordinator_worker.py +62 -0
  28. genxai/flows/critic_review.py +62 -0
  29. genxai/flows/ensemble_voting.py +49 -0
  30. genxai/flows/loop.py +42 -0
  31. genxai/flows/map_reduce.py +61 -0
  32. genxai/flows/p2p.py +146 -0
  33. genxai/flows/parallel.py +27 -0
  34. genxai/flows/round_robin.py +24 -0
  35. genxai/flows/router.py +45 -0
  36. genxai/flows/selector.py +63 -0
  37. genxai/flows/subworkflow.py +35 -0
  38. genxai/llm/factory.py +17 -10
  39. genxai/llm/providers/anthropic.py +116 -1
  40. genxai/observability/logging.py +2 -2
  41. genxai/security/auth.py +10 -6
  42. genxai/security/cost_control.py +6 -6
  43. genxai/security/jwt.py +2 -2
  44. genxai/security/pii.py +2 -2
  45. genxai/tools/builtin/__init__.py +3 -0
  46. genxai/tools/builtin/communication/human_input.py +32 -0
  47. genxai/tools/custom/test-2.py +19 -0
  48. genxai/tools/custom/test_tool_ui.py +9 -0
  49. genxai/tools/persistence/service.py +3 -3
  50. genxai/triggers/schedule.py +2 -2
  51. genxai/utils/tokens.py +6 -0
  52. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/METADATA +63 -12
  53. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/RECORD +57 -28
  54. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/WHEEL +0 -0
  55. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/entry_points.txt +0 -0
  56. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/licenses/LICENSE +0 -0
  57. {genxai_framework-0.1.0.dist-info → genxai_framework-0.1.2.dist-info}/top_level.txt +0 -0
genxai/flows/router.py ADDED
@@ -0,0 +1,45 @@
1
+ """Rule-based routing flow orchestrator."""
2
+
3
+ from typing import Callable, List
4
+
5
+ from genxai.core.graph.engine import Graph
6
+ from genxai.core.graph.edges import Edge
7
+ from genxai.core.graph.nodes import AgentNode, InputNode, OutputNode
8
+ from genxai.flows.base import FlowOrchestrator
9
+
10
+
11
+ class RouterFlow(FlowOrchestrator):
12
+ """Route to an agent based on deterministic routing rules."""
13
+
14
+ def __init__(
15
+ self,
16
+ agents: List,
17
+ router: Callable[[dict], str],
18
+ name: str = "router_flow",
19
+ llm_provider=None,
20
+ ) -> None:
21
+ super().__init__(agents=agents, name=name, llm_provider=llm_provider)
22
+ self.router = router
23
+
24
+ def build_graph(self) -> Graph:
25
+ graph = Graph(name=self.name)
26
+ start = InputNode(id="input")
27
+ graph.add_node(start)
28
+
29
+ end = OutputNode(id="output")
30
+ graph.add_node(end)
31
+ nodes = self._agent_nodes()
32
+ for node in nodes:
33
+ graph.add_node(node)
34
+ graph.add_edge(
35
+ Edge(
36
+ source=start.id,
37
+ target=node.id,
38
+ condition=lambda state, agent_id=node.id: self.router(state) == agent_id,
39
+ )
40
+ )
41
+ graph.add_edge(Edge(source=node.id, target=end.id))
42
+
43
+ graph.add_edge(Edge(source=start.id, target=end.id))
44
+
45
+ return graph
@@ -0,0 +1,63 @@
1
+ """Selector-based flow orchestrator."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any, Callable, Dict, List, Optional
6
+
7
+ from genxai.core.graph.engine import Graph
8
+ from genxai.core.graph.nodes import AgentNode
9
+ from genxai.flows.base import FlowOrchestrator
10
+
11
+
12
+ class SelectorFlow(FlowOrchestrator):
13
+ """Route to the next agent using a selector function.
14
+
15
+ The selector function returns the agent ID to execute next.
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ agents: List[Any],
21
+ selector: Callable[[Dict[str, Any], List[str]], str],
22
+ name: str = "selector_flow",
23
+ llm_provider: Any = None,
24
+ max_hops: int = 1,
25
+ ) -> None:
26
+ super().__init__(agents=agents, name=name, llm_provider=llm_provider)
27
+ self.selector = selector
28
+ self.max_hops = max_hops
29
+
30
+ def build_graph(self) -> Graph:
31
+ graph = Graph(name=self.name)
32
+ nodes: List[AgentNode] = self._agent_nodes()
33
+
34
+ for node in nodes:
35
+ graph.add_node(node)
36
+
37
+ return graph
38
+
39
+ async def run(
40
+ self,
41
+ input_data: Any,
42
+ state: Optional[Dict[str, Any]] = None,
43
+ max_iterations: int = 100,
44
+ ) -> Dict[str, Any]:
45
+ graph = self.build_graph()
46
+ if state is None:
47
+ state = {}
48
+
49
+ state["input"] = input_data
50
+ agent_ids = [agent.id for agent in self.agents]
51
+
52
+ for hop in range(self.max_hops):
53
+ selected = self.selector(state, agent_ids)
54
+ if selected not in agent_ids:
55
+ raise ValueError(
56
+ f"SelectorFlow returned unknown agent id '{selected}'."
57
+ )
58
+
59
+ state["next_agent"] = selected
60
+ state["selector_hop"] = hop + 1
61
+ await graph._execute_node(selected, state, max_iterations)
62
+
63
+ return state
@@ -0,0 +1,35 @@
1
+ """Subworkflow flow orchestrator."""
2
+
3
+ from typing import Any, Dict, Optional
4
+
5
+ from genxai.core.graph.engine import Graph
6
+ from genxai.flows.base import FlowOrchestrator
7
+
8
+
9
+ class SubworkflowFlow(FlowOrchestrator):
10
+ """Execute a pre-built subgraph as a flow."""
11
+
12
+ def __init__(
13
+ self,
14
+ graph: Graph,
15
+ name: str = "subworkflow_flow",
16
+ llm_provider: Any = None,
17
+ ) -> None:
18
+ super().__init__(agents=[], name=name, llm_provider=llm_provider, allow_empty_agents=True)
19
+ self.graph = graph
20
+
21
+ def build_graph(self) -> Graph:
22
+ return self.graph
23
+
24
+ async def run(
25
+ self,
26
+ input_data: Any,
27
+ state: Optional[Dict[str, Any]] = None,
28
+ max_iterations: int = 100,
29
+ ) -> Dict[str, Any]:
30
+ return await self.graph.run(
31
+ input_data=input_data,
32
+ state=state,
33
+ max_iterations=max_iterations,
34
+ llm_provider=self.llm_provider,
35
+ )
genxai/llm/factory.py CHANGED
@@ -37,6 +37,11 @@ class LLMProviderFactory:
37
37
  "claude-3-opus": "genxai.llm.providers.anthropic.AnthropicProvider",
38
38
  "claude-3-sonnet": "genxai.llm.providers.anthropic.AnthropicProvider",
39
39
  "claude-3-haiku": "genxai.llm.providers.anthropic.AnthropicProvider",
40
+ "claude-3-5-sonnet-20241022": "genxai.llm.providers.anthropic.AnthropicProvider",
41
+ "claude-3-5-sonnet-20240620": "genxai.llm.providers.anthropic.AnthropicProvider",
42
+ "claude-3-opus-20240229": "genxai.llm.providers.anthropic.AnthropicProvider",
43
+ "claude-3-sonnet-20240229": "genxai.llm.providers.anthropic.AnthropicProvider",
44
+ "claude-3-haiku-20240307": "genxai.llm.providers.anthropic.AnthropicProvider",
40
45
  "google": "genxai.llm.providers.google.GoogleProvider",
41
46
  "gemini-pro": "genxai.llm.providers.google.GoogleProvider",
42
47
  "gemini-ultra": "genxai.llm.providers.google.GoogleProvider",
@@ -235,41 +240,43 @@ class LLMProviderFactory:
235
240
  Returns:
236
241
  Provider class or None
237
242
  """
243
+ model_key = model.lower()
244
+
238
245
  # Direct match in pre-loaded providers
239
- if model in cls._providers:
240
- return cls._providers[model]
246
+ if model_key in cls._providers:
247
+ return cls._providers[model_key]
241
248
 
242
249
  # Check lazy-loaded providers
243
- if model in cls._provider_modules:
244
- provider_class = cls._load_provider_class(cls._provider_modules[model])
250
+ if model_key in cls._provider_modules:
251
+ provider_class = cls._load_provider_class(cls._provider_modules[model_key])
245
252
  if provider_class:
246
253
  # Cache it for future use
247
- cls._providers[model] = provider_class
254
+ cls._providers[model_key] = provider_class
248
255
  return provider_class
249
256
 
250
257
  # Check if model starts with known provider prefix
251
- model_lower = model.lower()
258
+ model_lower = model_key
252
259
  if model_lower.startswith("gpt"):
253
260
  return OpenAIProvider
254
261
  elif model_lower.startswith("claude"):
255
262
  provider_class = cls._load_provider_class("genxai.llm.providers.anthropic.AnthropicProvider")
256
263
  if provider_class:
257
- cls._providers[model] = provider_class
264
+ cls._providers[model_key] = provider_class
258
265
  return provider_class
259
266
  elif model_lower.startswith("gemini"):
260
267
  provider_class = cls._load_provider_class("genxai.llm.providers.google.GoogleProvider")
261
268
  if provider_class:
262
- cls._providers[model] = provider_class
269
+ cls._providers[model_key] = provider_class
263
270
  return provider_class
264
271
  elif model_lower.startswith("command"):
265
272
  provider_class = cls._load_provider_class("genxai.llm.providers.cohere.CohereProvider")
266
273
  if provider_class:
267
- cls._providers[model] = provider_class
274
+ cls._providers[model_key] = provider_class
268
275
  return provider_class
269
276
  elif model_lower.startswith("llama") or model_lower.startswith("mistral") or model_lower.startswith("phi"):
270
277
  provider_class = cls._load_provider_class("genxai.llm.providers.ollama.OllamaProvider")
271
278
  if provider_class:
272
- cls._providers[model] = provider_class
279
+ cls._providers[model_key] = provider_class
273
280
  return provider_class
274
281
 
275
282
  return None
@@ -12,6 +12,23 @@ logger = logging.getLogger(__name__)
12
12
  class AnthropicProvider(LLMProvider):
13
13
  """Anthropic Claude LLM provider."""
14
14
 
15
+ _MODEL_ALIASES = {
16
+ # Claude 4.5 models
17
+ "claude-sonnet-4-5": "claude-sonnet-4-5-20250929",
18
+ "claude-haiku-4-5": "claude-haiku-4-5-20251001",
19
+ "claude-opus-4-5": "claude-opus-4-5-20251101",
20
+ # Claude 4 models
21
+ "claude-sonnet-4": "claude-sonnet-4-20250514",
22
+ "claude-opus-4": "claude-opus-4-20250514",
23
+ "claude-opus-4-1": "claude-opus-4-1-20250805",
24
+ # Claude 3.5 models
25
+ "claude-3-5-sonnet": "claude-3-5-sonnet-20241022",
26
+ # Claude 3 models
27
+ "claude-3-opus": "claude-3-opus-20240229",
28
+ "claude-3-sonnet": "claude-3-sonnet-20240229",
29
+ "claude-3-haiku": "claude-3-haiku-20240307",
30
+ }
31
+
15
32
  def __init__(
16
33
  self,
17
34
  model: str = "claude-3-opus-20240229",
@@ -29,7 +46,9 @@ class AnthropicProvider(LLMProvider):
29
46
  max_tokens: Maximum tokens to generate
30
47
  **kwargs: Additional Anthropic-specific parameters
31
48
  """
32
- super().__init__(model, temperature, max_tokens, **kwargs)
49
+ resolved_model = self._normalize_model(model)
50
+ super().__init__(resolved_model, temperature, max_tokens, **kwargs)
51
+ self.requested_model = model
33
52
 
34
53
  self.api_key = api_key or os.getenv("ANTHROPIC_API_KEY")
35
54
  if not self.api_key:
@@ -126,6 +145,35 @@ class AnthropicProvider(LLMProvider):
126
145
  )
127
146
 
128
147
  except Exception as e:
148
+ if self._is_model_not_found_error(e):
149
+ fallback_model = self._fallback_model(self.model)
150
+ if fallback_model and fallback_model != self.model:
151
+ logger.warning(
152
+ "Anthropic model '%s' not found. Falling back to '%s'.",
153
+ self.model,
154
+ fallback_model,
155
+ )
156
+ self.model = fallback_model
157
+ params["model"] = fallback_model
158
+ response = await self._client.messages.create(**params)
159
+ content = response.content[0].text if response.content else ""
160
+ finish_reason = response.stop_reason
161
+ usage = {
162
+ "prompt_tokens": response.usage.input_tokens if response.usage else 0,
163
+ "completion_tokens": response.usage.output_tokens if response.usage else 0,
164
+ "total_tokens": (
165
+ (response.usage.input_tokens + response.usage.output_tokens)
166
+ if response.usage else 0
167
+ ),
168
+ }
169
+ self._update_stats(usage)
170
+ return LLMResponse(
171
+ content=content,
172
+ model=response.model,
173
+ usage=usage,
174
+ finish_reason=finish_reason,
175
+ metadata={"response_id": response.id, "type": response.type},
176
+ )
129
177
  logger.error(f"Anthropic API call failed: {e}")
130
178
  raise
131
179
 
@@ -245,5 +293,72 @@ class AnthropicProvider(LLMProvider):
245
293
  )
246
294
 
247
295
  except Exception as e:
296
+ if self._is_model_not_found_error(e):
297
+ fallback_model = self._fallback_model(self.model)
298
+ if fallback_model and fallback_model != self.model:
299
+ logger.warning(
300
+ "Anthropic model '%s' not found. Falling back to '%s'.",
301
+ self.model,
302
+ fallback_model,
303
+ )
304
+ self.model = fallback_model
305
+ params["model"] = fallback_model
306
+ response = await self._client.messages.create(**params)
307
+ content = response.content[0].text if response.content else ""
308
+ finish_reason = response.stop_reason
309
+ usage = {
310
+ "prompt_tokens": response.usage.input_tokens if response.usage else 0,
311
+ "completion_tokens": response.usage.output_tokens if response.usage else 0,
312
+ "total_tokens": (
313
+ (response.usage.input_tokens + response.usage.output_tokens)
314
+ if response.usage else 0
315
+ ),
316
+ }
317
+ self._update_stats(usage)
318
+ return LLMResponse(
319
+ content=content,
320
+ model=response.model,
321
+ usage=usage,
322
+ finish_reason=finish_reason,
323
+ metadata={"response_id": response.id, "type": response.type},
324
+ )
248
325
  logger.error(f"Anthropic chat API call failed: {e}")
249
326
  raise
327
+
328
+ @classmethod
329
+ def _normalize_model(cls, model: str) -> str:
330
+ model_key = model.strip().lower()
331
+ return cls._MODEL_ALIASES.get(model_key, model)
332
+
333
+ @staticmethod
334
+ def _is_model_not_found_error(error: Exception) -> bool:
335
+ message = str(error).lower()
336
+ return "not_found_error" in message or "model:" in message
337
+
338
+ @staticmethod
339
+ def _fallback_model(model: str) -> Optional[str]:
340
+ model_lower = model.lower()
341
+ # Claude 4.5 fallbacks
342
+ if model_lower.startswith("claude-sonnet-4-5") or model_lower.startswith("claude-opus-4-5"):
343
+ return "claude-sonnet-4-20250514"
344
+ if model_lower.startswith("claude-haiku-4-5"):
345
+ return "claude-haiku-4-5-20251001"
346
+ # Claude 4 fallbacks
347
+ if model_lower.startswith("claude-opus-4"):
348
+ return "claude-sonnet-4-20250514"
349
+ if model_lower.startswith("claude-sonnet-4"):
350
+ return "claude-3-5-sonnet-20241022"
351
+ # Claude 3.5 fallbacks
352
+ if model_lower.startswith("claude-3-5"):
353
+ return "claude-3-sonnet-20240229"
354
+ # Claude 3 fallbacks
355
+ if model_lower.startswith("claude-3-opus"):
356
+ return "claude-3-sonnet-20240229"
357
+ if model_lower.startswith("claude-3-sonnet"):
358
+ return "claude-3-haiku-20240307"
359
+ if model_lower.startswith("claude-3-haiku"):
360
+ return "claude-3-haiku-20240307"
361
+ # Generic Claude fallback
362
+ if model_lower.startswith("claude"):
363
+ return "claude-3-haiku-20240307"
364
+ return None
@@ -4,7 +4,7 @@ import json
4
4
  import logging
5
5
  import sys
6
6
  from contextvars import ContextVar
7
- from datetime import datetime
7
+ from datetime import datetime, UTC
8
8
  from typing import Any, Dict, Optional
9
9
 
10
10
 
@@ -250,7 +250,7 @@ class StructuredLogger:
250
250
  JSON formatted log string
251
251
  """
252
252
  log_entry = {
253
- "timestamp": datetime.utcnow().isoformat(),
253
+ "timestamp": datetime.now(UTC).isoformat(),
254
254
  "level": level,
255
255
  "message": message,
256
256
  "context": {**get_log_context(), **self.context},
genxai/security/auth.py CHANGED
@@ -4,7 +4,7 @@ import secrets
4
4
  import hashlib
5
5
  import time
6
6
  from typing import Optional, List, Dict, Any
7
- from datetime import datetime, timedelta
7
+ from datetime import datetime, timedelta, UTC
8
8
  from dataclasses import dataclass
9
9
  import sqlite3
10
10
  import os
@@ -94,7 +94,7 @@ class APIKeyManager:
94
94
  # Calculate expiration
95
95
  expires_at = None
96
96
  if expires_in_days:
97
- expires_at = datetime.utcnow() + timedelta(days=expires_in_days)
97
+ expires_at = datetime.now(UTC) + timedelta(days=expires_in_days)
98
98
 
99
99
  # Store in database
100
100
  conn = sqlite3.connect(self.db_path)
@@ -103,7 +103,7 @@ class APIKeyManager:
103
103
  cursor.execute("""
104
104
  INSERT INTO api_keys (key_id, user_id, name, key_hash, created_at, expires_at)
105
105
  VALUES (?, ?, ?, ?, ?, ?)
106
- """, (key_id, user_id, name, key_hash, datetime.utcnow(), expires_at))
106
+ """, (key_id, user_id, name, key_hash, datetime.now(UTC), expires_at))
107
107
 
108
108
  conn.commit()
109
109
  conn.close()
@@ -144,14 +144,16 @@ class APIKeyManager:
144
144
  # Check expiration
145
145
  if expires_at:
146
146
  expires_dt = datetime.fromisoformat(expires_at)
147
- if datetime.utcnow() > expires_dt:
147
+ if expires_dt.tzinfo is None:
148
+ expires_dt = expires_dt.replace(tzinfo=UTC)
149
+ if datetime.now(UTC) > expires_dt:
148
150
  conn.close()
149
151
  return None
150
152
 
151
153
  # Update last used
152
154
  cursor.execute("""
153
155
  UPDATE api_keys SET last_used = ? WHERE key_id = ?
154
- """, (datetime.utcnow(), key_id))
156
+ """, (datetime.now(UTC), key_id))
155
157
 
156
158
  conn.commit()
157
159
  conn.close()
@@ -257,7 +259,9 @@ class APIKeyManager:
257
259
  expires_in_days = None
258
260
  if expires_at:
259
261
  expires_dt = datetime.fromisoformat(expires_at)
260
- expires_in_days = (expires_dt - datetime.utcnow()).days
262
+ if expires_dt.tzinfo is None:
263
+ expires_dt = expires_dt.replace(tzinfo=UTC)
264
+ expires_in_days = (expires_dt - datetime.now(UTC)).days
261
265
 
262
266
  return self.generate_key(user_id, name, expires_in_days)
263
267
 
@@ -2,7 +2,7 @@
2
2
 
3
3
  import sqlite3
4
4
  from typing import Dict, Any, Optional
5
- from datetime import datetime, timedelta
5
+ from datetime import datetime, timedelta, UTC
6
6
  from dataclasses import dataclass
7
7
  import os
8
8
 
@@ -106,7 +106,7 @@ class TokenUsageTracker:
106
106
  INSERT INTO token_usage
107
107
  (user_id, provider, model, prompt_tokens, completion_tokens, cost, timestamp)
108
108
  VALUES (?, ?, ?, ?, ?, ?, ?)
109
- """, (user_id, provider, model, prompt_tokens, completion_tokens, cost, datetime.utcnow()))
109
+ """, (user_id, provider, model, prompt_tokens, completion_tokens, cost, datetime.now(UTC)))
110
110
 
111
111
  conn.commit()
112
112
  conn.close()
@@ -126,7 +126,7 @@ class TokenUsageTracker:
126
126
  Usage statistics
127
127
  """
128
128
  # Calculate time range
129
- now = datetime.utcnow()
129
+ now = datetime.now(UTC)
130
130
  if period == "day":
131
131
  start_time = now - timedelta(days=1)
132
132
  elif period == "week":
@@ -223,7 +223,7 @@ class BudgetManager:
223
223
  conn = sqlite3.connect(self.db_path)
224
224
  cursor = conn.cursor()
225
225
 
226
- now = datetime.utcnow()
226
+ now = datetime.now(UTC)
227
227
 
228
228
  cursor.execute("""
229
229
  INSERT OR REPLACE INTO budgets (user_id, amount, period, created_at, updated_at)
@@ -447,7 +447,7 @@ class CostAlertManager:
447
447
  # Check if we already sent alert recently (within 1 hour)
448
448
  if last_alert:
449
449
  last_alert_time = datetime.fromisoformat(last_alert)
450
- if datetime.utcnow() - last_alert_time < timedelta(hours=1):
450
+ if datetime.now(UTC) - last_alert_time < timedelta(hours=1):
451
451
  conn.close()
452
452
  return
453
453
 
@@ -462,7 +462,7 @@ class CostAlertManager:
462
462
  # Update last alert time
463
463
  cursor.execute("""
464
464
  UPDATE cost_alerts SET last_alert = ? WHERE user_id = ?
465
- """, (datetime.utcnow(), user_id))
465
+ """, (datetime.now(UTC), user_id))
466
466
 
467
467
  conn.commit()
468
468
 
genxai/security/jwt.py CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  import jwt
4
4
  import os
5
- from datetime import datetime, timedelta
5
+ from datetime import datetime, timedelta, UTC
6
6
  from typing import Dict, Any, Optional
7
7
  from dataclasses import dataclass
8
8
 
@@ -50,7 +50,7 @@ class JWTManager:
50
50
  Returns:
51
51
  JWT token string
52
52
  """
53
- now = datetime.utcnow()
53
+ now = datetime.now(UTC)
54
54
  exp = now + timedelta(seconds=expires_in)
55
55
 
56
56
  payload = {
genxai/security/pii.py CHANGED
@@ -3,7 +3,7 @@
3
3
  import re
4
4
  from typing import List, Dict, Any, Optional
5
5
  from dataclasses import dataclass
6
- from datetime import datetime
6
+ from datetime import datetime, UTC
7
7
 
8
8
 
9
9
  # PII patterns
@@ -254,7 +254,7 @@ class PIIAuditLogger:
254
254
  context: Additional context
255
255
  """
256
256
  log_entry = {
257
- "timestamp": datetime.utcnow().isoformat(),
257
+ "timestamp": datetime.now(UTC).isoformat(),
258
258
  "user_id": user_id,
259
259
  "pii_type": pii_type,
260
260
  "action": action,
@@ -34,6 +34,7 @@ from genxai.tools.builtin.database.database_inspector import DatabaseInspectorTo
34
34
 
35
35
  # Communication tools
36
36
  from genxai.tools.builtin.communication.email_sender import EmailSenderTool
37
+ from genxai.tools.builtin.communication.human_input import HumanInputTool
37
38
  from genxai.tools.builtin.communication.slack_notifier import SlackNotifierTool
38
39
  from genxai.tools.builtin.communication.sms_sender import SMSSenderTool
39
40
  from genxai.tools.builtin.communication.webhook_caller import WebhookCallerTool
@@ -75,6 +76,7 @@ _tools_to_register = [
75
76
  DatabaseInspectorTool(),
76
77
  # Communication
77
78
  EmailSenderTool(),
79
+ HumanInputTool(),
78
80
  SlackNotifierTool(),
79
81
  SMSSenderTool(),
80
82
  WebhookCallerTool(),
@@ -119,6 +121,7 @@ __all__ = [
119
121
  "VectorSearchTool",
120
122
  "DatabaseInspectorTool",
121
123
  "EmailSenderTool",
124
+ "HumanInputTool",
122
125
  "SlackNotifierTool",
123
126
  "SMSSenderTool",
124
127
  "WebhookCallerTool",
@@ -0,0 +1,32 @@
1
+ """Human input tool for interactive workflows."""
2
+
3
+ from typing import Any, Dict
4
+
5
+ from genxai.tools.base import Tool, ToolMetadata, ToolParameter, ToolCategory
6
+
7
+
8
+ class HumanInputTool(Tool):
9
+ """Collect input from a human (stdin)."""
10
+
11
+ def __init__(self) -> None:
12
+ from genxai.tools.registry import ToolRegistry
13
+ super().__init__(
14
+ metadata=ToolMetadata(
15
+ name="human_input",
16
+ description="Collects human input from the console",
17
+ category=ToolCategory.CUSTOM,
18
+ ),
19
+ parameters=[
20
+ ToolParameter(
21
+ name="prompt",
22
+ type="string",
23
+ description="Prompt to show the user",
24
+ )
25
+ ],
26
+ )
27
+ if ToolRegistry.get(self.metadata.name) is None:
28
+ ToolRegistry.register(self)
29
+
30
+ async def _execute(self, **kwargs: Any) -> Dict[str, Any]:
31
+ prompt = kwargs.get("prompt", "Your response:")
32
+ return {"response": input(f"{prompt} ")}
@@ -0,0 +1,19 @@
1
+ """
2
+ Auto-generated tool: test-2
3
+ Description: testing
4
+ Category: custom
5
+ Created: 2026-02-03 02:05:46.667101
6
+ """
7
+
8
+ # Tool code
9
+ # Access parameters via 'params' dict
10
+ # Example: value = params.get('input_value')
11
+
12
+ # Your tool logic here
13
+ result = {
14
+ "message": "Hello from custom tool!",
15
+ "data": params
16
+ }
17
+
18
+ # Set 'result' variable with your output
19
+
@@ -0,0 +1,9 @@
1
+ """
2
+ Auto-generated tool: test_tool_ui
3
+ Description: test
4
+ Category: custom
5
+ Created: 2026-02-03 02:03:15.074097
6
+ """
7
+
8
+ # Tool code
9
+ result = {"message": "ok"}
@@ -94,7 +94,7 @@ class ToolService:
94
94
  logger.info(f"Saved tool to database: {name}")
95
95
 
96
96
  # Optionally export to file
97
- ToolService._export_to_file(tool_model)
97
+ # ToolService._export_to_file(tool_model)
98
98
 
99
99
  return tool_model
100
100
  except Exception as e:
@@ -153,7 +153,7 @@ class ToolService:
153
153
  logger.info(f"Updated tool code in database: {name}")
154
154
 
155
155
  # Update file if exists
156
- ToolService._export_to_file(tool)
156
+ # ToolService._export_to_file(tool)
157
157
 
158
158
  return True
159
159
  return False
@@ -183,7 +183,7 @@ class ToolService:
183
183
  logger.info(f"Deleted tool from database: {name}")
184
184
 
185
185
  # Delete file if exists
186
- ToolService._delete_file(name)
186
+ # ToolService._delete_file(name)
187
187
 
188
188
  return True
189
189
  return False
@@ -2,7 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from datetime import datetime
5
+ from datetime import datetime, UTC
6
6
  from typing import Any, Dict, Optional
7
7
  import logging
8
8
 
@@ -52,7 +52,7 @@ class ScheduleTrigger(BaseTrigger):
52
52
  trigger = IntervalTrigger(seconds=self.interval_seconds)
53
53
 
54
54
  async def _emit_wrapper() -> None:
55
- await self.emit(payload={"scheduled_at": datetime.utcnow().isoformat(), **self.payload})
55
+ await self.emit(payload={"scheduled_at": datetime.now(UTC).isoformat(), **self.payload})
56
56
 
57
57
  scheduler.add_job(_emit_wrapper, trigger=trigger)
58
58
  scheduler.start()
genxai/utils/tokens.py CHANGED
@@ -17,8 +17,14 @@ MODEL_TOKEN_LIMITS: Dict[str, int] = {
17
17
  "gpt-3.5-turbo-16k": 16384,
18
18
  # Anthropic models
19
19
  "claude-3-opus": 200000,
20
+ "claude-3-opus-20240229": 200000,
20
21
  "claude-3-sonnet": 200000,
22
+ "claude-3-sonnet-20240229": 200000,
21
23
  "claude-3-haiku": 200000,
24
+ "claude-3-haiku-20240307": 200000,
25
+ "claude-3-5-sonnet": 200000,
26
+ "claude-3-5-sonnet-20241022": 200000,
27
+ "claude-3-5-sonnet-20240620": 200000,
22
28
  "claude-2.1": 200000,
23
29
  "claude-2": 100000,
24
30
  "claude-instant": 100000,