mojentic 0.7.2__tar.gz → 0.7.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. {mojentic-0.7.2/src/mojentic.egg-info → mojentic-0.7.3}/PKG-INFO +1 -1
  2. {mojentic-0.7.2 → mojentic-0.7.3}/pyproject.toml +1 -1
  3. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/anthropic.py +1 -1
  4. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/llm_gateway.py +3 -1
  5. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/ollama.py +6 -0
  6. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/openai.py +5 -0
  7. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/llm_broker.py +42 -24
  8. {mojentic-0.7.2 → mojentic-0.7.3/src/mojentic.egg-info}/PKG-INFO +1 -1
  9. {mojentic-0.7.2 → mojentic-0.7.3}/LICENSE.md +0 -0
  10. {mojentic-0.7.2 → mojentic-0.7.3}/README.md +0 -0
  11. {mojentic-0.7.2 → mojentic-0.7.3}/setup.cfg +0 -0
  12. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/__init__.py +0 -0
  13. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/async_dispatcher_example.py +0 -0
  14. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/async_llm_example.py +0 -0
  15. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/broker_as_tool.py +0 -0
  16. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/broker_examples.py +0 -0
  17. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/broker_image_examples.py +0 -0
  18. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/characterize_ollama.py +0 -0
  19. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/characterize_openai.py +0 -0
  20. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/chat_session.py +0 -0
  21. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/chat_session_with_tool.py +0 -0
  22. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/coding_file_tool.py +0 -0
  23. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/current_datetime_tool_example.py +0 -0
  24. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/design_analysis.py +0 -0
  25. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/embeddings.py +0 -0
  26. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/ensures_files_exist.py +0 -0
  27. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/ephemeral_task_manager_example.py +0 -0
  28. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/file_deduplication.py +0 -0
  29. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/file_tool.py +0 -0
  30. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/image_analysis.py +0 -0
  31. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/image_broker.py +0 -0
  32. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/image_broker_splat.py +0 -0
  33. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/iterative_solver.py +0 -0
  34. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/list_models.py +0 -0
  35. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/oversized_embeddings.py +0 -0
  36. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/raw.py +0 -0
  37. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/__init__.py +0 -0
  38. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/agents/__init__.py +0 -0
  39. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/agents/decisioning_agent.py +0 -0
  40. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/agents/thinking_agent.py +0 -0
  41. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/formatters.py +0 -0
  42. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/models/__init__.py +0 -0
  43. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/models/base.py +0 -0
  44. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react/models/events.py +0 -0
  45. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/react.py +0 -0
  46. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/recursive_agent.py +0 -0
  47. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/routed_send_response.py +0 -0
  48. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/simple_llm.py +0 -0
  49. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/simple_llm_repl.py +0 -0
  50. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/simple_structured.py +0 -0
  51. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/simple_tool.py +0 -0
  52. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/solver_chat_session.py +0 -0
  53. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/streaming.py +0 -0
  54. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/tell_user_example.py +0 -0
  55. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/tracer_demo.py +0 -0
  56. {mojentic-0.7.2 → mojentic-0.7.3}/src/_examples/working_memory.py +0 -0
  57. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/__init__.py +0 -0
  58. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/__init__.py +0 -0
  59. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/agent_broker.py +0 -0
  60. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/async_aggregator_agent.py +0 -0
  61. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/async_aggregator_agent_spec.py +0 -0
  62. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/async_llm_agent.py +0 -0
  63. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/async_llm_agent_spec.py +0 -0
  64. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/base_agent.py +0 -0
  65. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/base_async_agent.py +0 -0
  66. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/base_llm_agent.py +0 -0
  67. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/base_llm_agent_spec.py +0 -0
  68. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/correlation_aggregator_agent.py +0 -0
  69. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/iterative_problem_solver.py +0 -0
  70. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/output_agent.py +0 -0
  71. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/agents/simple_recursive_agent.py +0 -0
  72. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/async_dispatcher.py +0 -0
  73. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/async_dispatcher_spec.py +0 -0
  74. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/context/__init__.py +0 -0
  75. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/context/shared_working_memory.py +0 -0
  76. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/dispatcher.py +0 -0
  77. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/event.py +0 -0
  78. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/__init__.py +0 -0
  79. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/chat_session.py +0 -0
  80. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/chat_session_spec.py +0 -0
  81. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/__init__.py +0 -0
  82. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/anthropic_messages_adapter.py +0 -0
  83. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/embeddings_gateway.py +0 -0
  84. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/file_gateway.py +0 -0
  85. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/models.py +0 -0
  86. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/ollama_messages_adapter.py +0 -0
  87. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/ollama_messages_adapter_spec.py +0 -0
  88. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/openai_message_adapter_spec.py +0 -0
  89. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/openai_messages_adapter.py +0 -0
  90. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/gateways/tokenizer_gateway.py +0 -0
  91. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/llm_broker_spec.py +0 -0
  92. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/message_composers.py +0 -0
  93. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/message_composers_spec.py +0 -0
  94. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/registry/__init__.py +0 -0
  95. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/registry/llm_registry.py +0 -0
  96. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/registry/models.py +0 -0
  97. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/registry/populate_registry_from_ollama.py +0 -0
  98. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/__init__.py +0 -0
  99. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ask_user_tool.py +0 -0
  100. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/current_datetime.py +0 -0
  101. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/date_resolver.py +0 -0
  102. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/date_resolver_spec.py +0 -0
  103. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/__init__.py +0 -0
  104. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/append_task_tool.py +0 -0
  105. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/append_task_tool_spec.py +0 -0
  106. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/clear_tasks_tool.py +0 -0
  107. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/clear_tasks_tool_spec.py +0 -0
  108. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/complete_task_tool.py +0 -0
  109. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/complete_task_tool_spec.py +0 -0
  110. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/ephemeral_task_list.py +0 -0
  111. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/ephemeral_task_list_spec.py +0 -0
  112. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/insert_task_after_tool.py +0 -0
  113. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/insert_task_after_tool_spec.py +0 -0
  114. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/list_tasks_tool.py +0 -0
  115. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/list_tasks_tool_spec.py +0 -0
  116. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/prepend_task_tool.py +0 -0
  117. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/prepend_task_tool_spec.py +0 -0
  118. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/start_task_tool.py +0 -0
  119. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/ephemeral_task_manager/start_task_tool_spec.py +0 -0
  120. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/file_manager.py +0 -0
  121. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/file_manager_spec.py +0 -0
  122. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/llm_tool.py +0 -0
  123. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/llm_tool_spec.py +0 -0
  124. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/organic_web_search.py +0 -0
  125. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/tell_user_tool.py +0 -0
  126. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/tool_wrapper.py +0 -0
  127. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/llm/tools/tool_wrapper_spec.py +0 -0
  128. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/router.py +0 -0
  129. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/router_spec.py +0 -0
  130. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/__init__.py +0 -0
  131. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/event_store.py +0 -0
  132. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/event_store_spec.py +0 -0
  133. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/null_tracer.py +0 -0
  134. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/tracer_events.py +0 -0
  135. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/tracer_events_spec.py +0 -0
  136. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/tracer_system.py +0 -0
  137. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/tracer/tracer_system_spec.py +0 -0
  138. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/utils/__init__.py +0 -0
  139. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic/utils/formatting.py +0 -0
  140. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic.egg-info/SOURCES.txt +0 -0
  141. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic.egg-info/dependency_links.txt +0 -0
  142. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic.egg-info/requires.txt +0 -0
  143. {mojentic-0.7.2 → mojentic-0.7.3}/src/mojentic.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mojentic
3
- Version: 0.7.2
3
+ Version: 0.7.3
4
4
  Summary: Mojentic is an agentic framework that aims to provide a simple and flexible way to assemble teams of agents to solve complex problems.
5
5
  Author-email: Stacey Vetzal <stacey@vetzal.com>
6
6
  Project-URL: Homepage, https://github.com/svetzal/mojentic
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "mojentic"
3
- version = "0.7.2"
3
+ version = "0.7.3"
4
4
  authors = [
5
5
  { name = "Stacey Vetzal", email = "stacey@vetzal.com" },
6
6
  ]
@@ -29,7 +29,7 @@ class AnthropicGateway(LLMGateway):
29
29
  response = self.client.messages.create(
30
30
  **anthropic_args,
31
31
  temperature=args.get('temperature', 1.0),
32
- max_tokens=args.get('num_predict', 2000),
32
+ max_tokens=args.get('max_tokens', args.get('num_predict', 2000)),
33
33
  # thinking={
34
34
  # "type": "enabled",
35
35
  # "budget_tokens": 32768,
@@ -19,7 +19,7 @@ class LLMGateway:
19
19
  object_model: Optional[Type[BaseModel]] = None,
20
20
  tools: Optional[List[LLMTool]] = None,
21
21
  temperature: float = 1.0,
22
- num_ctx: int = 32768,
22
+ num_ctx: int = 32768, max_tokens: int = 16384,
23
23
  num_predict: int = -1) -> LLMGatewayResponse:
24
24
  """
25
25
  Complete the LLM request.
@@ -39,6 +39,8 @@ class LLMGateway:
39
39
  The temperature to use for the response. Defaults to 1.0.
40
40
  num_ctx : int
41
41
  The number of context tokens to use. Defaults to 32768.
42
+ max_tokens : int
43
+ The maximum number of tokens to generate. Defaults to 16384.
42
44
  num_predict : int
43
45
  The number of tokens to predict. Defaults to no limit.
44
46
 
@@ -35,6 +35,8 @@ class OllamaGateway(LLMGateway):
35
35
  )
36
36
  if args.get('num_predict', 0) > 0:
37
37
  options.num_predict = args['num_predict']
38
+ if 'max_tokens' in args:
39
+ options.num_predict = args['max_tokens']
38
40
  return options
39
41
 
40
42
  def complete(self, **args) -> LLMGatewayResponse:
@@ -56,6 +58,8 @@ class OllamaGateway(LLMGateway):
56
58
  The temperature to use for the response. Defaults to 1.0.
57
59
  num_ctx : int, optional
58
60
  The number of context tokens to use. Defaults to 32768.
61
+ max_tokens : int, optional
62
+ The maximum number of tokens to generate. Defaults to 16384.
59
63
  num_predict : int, optional
60
64
  The number of tokens to predict. Defaults to no limit.
61
65
 
@@ -120,6 +124,8 @@ class OllamaGateway(LLMGateway):
120
124
  The temperature to use for the response. Defaults to 1.0.
121
125
  num_ctx : int, optional
122
126
  The number of context tokens to use. Defaults to 32768.
127
+ max_tokens : int, optional
128
+ The maximum number of tokens to generate. Defaults to 16384.
123
129
  num_predict : int, optional
124
130
  The number of tokens to predict. Defaults to no limit.
125
131
 
@@ -46,6 +46,8 @@ class OpenAIGateway(LLMGateway):
46
46
  The temperature to use for the response. Defaults to 1.0.
47
47
  num_ctx : int, optional
48
48
  The number of context tokens to use. Defaults to 32768.
49
+ max_tokens : int, optional
50
+ The maximum number of tokens to generate. Defaults to 16384.
49
51
  num_predict : int, optional
50
52
  The number of tokens to predict. Defaults to no limit.
51
53
 
@@ -68,6 +70,9 @@ class OpenAIGateway(LLMGateway):
68
70
  if 'tools' in args and args['tools'] is not None:
69
71
  openai_args['tools'] = [t.descriptor for t in args['tools']]
70
72
 
73
+ if 'max_tokens' in args:
74
+ openai_args['max_tokens'] = args['max_tokens']
75
+
71
76
  response = completion(**openai_args)
72
77
 
73
78
  object = None
@@ -5,18 +5,19 @@ from typing import List, Optional, Type
5
5
  import structlog
6
6
  from pydantic import BaseModel
7
7
 
8
- from mojentic.tracer.tracer_system import TracerSystem
9
8
  from mojentic.llm.gateways.llm_gateway import LLMGateway
10
9
  from mojentic.llm.gateways.models import MessageRole, LLMMessage, LLMGatewayResponse
11
10
  from mojentic.llm.gateways.ollama import OllamaGateway
12
11
  from mojentic.llm.gateways.tokenizer_gateway import TokenizerGateway
12
+ from mojentic.tracer.tracer_system import TracerSystem
13
13
 
14
14
  logger = structlog.get_logger()
15
15
 
16
16
 
17
17
  class LLMBroker():
18
18
  """
19
- This class is responsible for managing interaction with a Large Language Model. It abstracts the user
19
+ This class is responsible for managing interaction with a Large Language Model. It abstracts
20
+ the user
20
21
  from the specific mechanics of the LLM and provides a common interface for generating responses.
21
22
  """
22
23
 
@@ -25,7 +26,8 @@ class LLMBroker():
25
26
  model: str
26
27
  tracer: Optional[TracerSystem]
27
28
 
28
- def __init__(self, model: str, gateway: Optional[LLMGateway] = None, tokenizer: Optional[TokenizerGateway] = None,
29
+ def __init__(self, model: str, gateway: Optional[LLMGateway] = None,
30
+ tokenizer: Optional[TokenizerGateway] = None,
29
31
  tracer: Optional[TracerSystem] = None):
30
32
  """
31
33
  Create an instance of the LLMBroker.
@@ -35,10 +37,12 @@ class LLMBroker():
35
37
  model
36
38
  The name of the model to use.
37
39
  gateway
38
- The gateway to use for communication with the LLM. If None, a gateway is created that will utilize a local
40
+ The gateway to use for communication with the LLM. If None, a gateway is created that
41
+ will utilize a local
39
42
  Ollama server.
40
43
  tokenizer
41
- The gateway to use for tokenization. This is used to log approximate token counts for the LLM calls. If
44
+ The gateway to use for tokenization. This is used to log approximate token counts for
45
+ the LLM calls. If
42
46
  None, `mxbai-embed-large` is used on a local Ollama server.
43
47
  tracer
44
48
  Optional tracer system to record LLM calls and responses.
@@ -58,8 +62,9 @@ class LLMBroker():
58
62
  else:
59
63
  self.adapter = gateway
60
64
 
61
- def generate(self, messages: List[LLMMessage], tools=None, temperature=1.0, num_ctx=32768, num_predict=-1,
62
- correlation_id: str = None) -> str:
65
+ def generate(self, messages: List[LLMMessage], tools=None, temperature=1.0, num_ctx=32768,
66
+ num_predict=-1, max_tokens=16384,
67
+ correlation_id: str = None) -> str:
63
68
  """
64
69
  Generate a text response from the LLM.
65
70
 
@@ -68,7 +73,8 @@ class LLMBroker():
68
73
  messages : LLMMessage
69
74
  A list of messages to send to the LLM.
70
75
  tools : List[Tool]
71
- A list of tools to use with the LLM. If a tool call is requested, the tool will be called and the output
76
+ A list of tools to use with the LLM. If a tool call is requested, the tool will be
77
+ called and the output
72
78
  will be included in the response.
73
79
  temperature : float
74
80
  The temperature to use for the response. Defaults to 1.0
@@ -91,10 +97,11 @@ class LLMBroker():
91
97
  messages_for_tracer = [m.model_dump() for m in messages]
92
98
 
93
99
  # Record LLM call in tracer
94
- tools_for_tracer = [{"name": t.name, "description": t.description} for t in tools] if tools else None
100
+ tools_for_tracer = [{"name": t.name, "description": t.description} for t in
101
+ tools] if tools else None
95
102
  self.tracer.record_llm_call(
96
- self.model,
97
- messages_for_tracer,
103
+ self.model,
104
+ messages_for_tracer,
98
105
  temperature,
99
106
  tools=tools_for_tracer,
100
107
  source=type(self),
@@ -110,12 +117,14 @@ class LLMBroker():
110
117
  tools=tools,
111
118
  temperature=temperature,
112
119
  num_ctx=num_ctx,
113
- num_predict=num_predict)
120
+ num_predict=num_predict,
121
+ max_tokens=max_tokens)
114
122
 
115
123
  call_duration_ms = (time.time() - start_time) * 1000
116
124
 
117
125
  # Record LLM response in tracer
118
- tool_calls_for_tracer = [tc.model_dump() for tc in result.tool_calls] if result.tool_calls else None
126
+ tool_calls_for_tracer = [tc.model_dump() for tc in
127
+ result.tool_calls] if result.tool_calls else None
119
128
  self.tracer.record_llm_response(
120
129
  self.model,
121
130
  result.content,
@@ -153,13 +162,17 @@ class LLMBroker():
153
162
  logger.info('Function output', output=output)
154
163
  messages.append(LLMMessage(role=MessageRole.Assistant, tool_calls=[tool_call]))
155
164
  messages.append(
156
- LLMMessage(role=MessageRole.Tool, content=json.dumps(output), tool_calls=[tool_call]))
157
- # {'role': 'tool', 'content': str(output), 'name': tool_call.name, 'tool_call_id': tool_call.id})
158
- return self.generate(messages, tools, temperature, num_ctx, num_predict, correlation_id=correlation_id)
165
+ LLMMessage(role=MessageRole.Tool, content=json.dumps(output),
166
+ tool_calls=[tool_call]))
167
+ # {'role': 'tool', 'content': str(output), 'name': tool_call.name,
168
+ # 'tool_call_id': tool_call.id})
169
+ return self.generate(messages, tools, temperature, num_ctx, num_predict,
170
+ correlation_id=correlation_id)
159
171
  else:
160
172
  logger.warn('Function not found', function=tool_call.name)
161
173
  logger.info('Expected usage of missing function', expected_usage=tool_call)
162
- # raise Exception('Unknown tool function requested:', requested_tool.function.name)
174
+ # raise Exception('Unknown tool function requested:',
175
+ # requested_tool.function.name)
163
176
 
164
177
  return result.content
165
178
 
@@ -170,8 +183,9 @@ class LLMBroker():
170
183
  content += message.content
171
184
  return content
172
185
 
173
- def generate_object(self, messages: List[LLMMessage], object_model: Type[BaseModel], temperature=1.0, num_ctx=32768,
174
- num_predict=-1, correlation_id: str = None) -> BaseModel:
186
+ def generate_object(self, messages: List[LLMMessage], object_model: Type[BaseModel],
187
+ temperature=1.0, num_ctx=32768, num_predict=-1, max_tokens=16384,
188
+ correlation_id: str = None) -> BaseModel:
175
189
  """
176
190
  Generate a structured response from the LLM and return it as an object.
177
191
 
@@ -203,8 +217,8 @@ class LLMBroker():
203
217
 
204
218
  # Record LLM call in tracer
205
219
  self.tracer.record_llm_call(
206
- self.model,
207
- messages_for_tracer,
220
+ self.model,
221
+ messages_for_tracer,
208
222
  temperature,
209
223
  tools=None,
210
224
  source=type(self),
@@ -214,14 +228,18 @@ class LLMBroker():
214
228
  # Measure call duration for audit
215
229
  start_time = time.time()
216
230
 
217
- result = self.adapter.complete(model=self.model, messages=messages, object_model=object_model,
218
- temperature=temperature, num_ctx=num_ctx, num_predict=num_predict)
231
+ result = self.adapter.complete(model=self.model, messages=messages,
232
+ object_model=object_model,
233
+ temperature=temperature, num_ctx=num_ctx,
234
+ num_predict=num_predict, max_tokens=max_tokens)
219
235
 
220
236
  call_duration_ms = (time.time() - start_time) * 1000
221
237
 
222
238
  # Record LLM response in tracer with object representation
223
239
  # Convert object to string for tracer
224
- object_str = str(result.object.model_dump()) if hasattr(result.object, "model_dump") else str(result.object)
240
+ object_str = str(result.object.model_dump()) if hasattr(result.object,
241
+ "model_dump") else str(
242
+ result.object)
225
243
  self.tracer.record_llm_response(
226
244
  self.model,
227
245
  f"Structured response: {object_str}",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mojentic
3
- Version: 0.7.2
3
+ Version: 0.7.3
4
4
  Summary: Mojentic is an agentic framework that aims to provide a simple and flexible way to assemble teams of agents to solve complex problems.
5
5
  Author-email: Stacey Vetzal <stacey@vetzal.com>
6
6
  Project-URL: Homepage, https://github.com/svetzal/mojentic
File without changes
File without changes
File without changes
File without changes
File without changes