autonomous-app 0.3.33__py3-none-any.whl → 0.3.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,16 +22,11 @@ class LocalAIModel(AutoModel):
22
22
  description = StringAttr(default="A Local AI Model using Ollama and Media AI.")
23
23
 
24
24
  # Config
25
- _ollama_url = os.environ.get("OLLAMA_API_BASE", "http://ollama:11434/api")
26
- _media_url = os.environ.get("MEDIA_API_BASE", "http://media_ai:5005")
25
+ _ollama_url = os.environ.get("OLLAMA_API_BASE_URL", "INVALID")
26
+ _media_url = os.environ.get("MEDIA_API_BASE_URL", "INVALID")
27
27
  _text_model = "llama3"
28
28
  _json_model = "llama3"
29
29
 
30
- # DB Connections
31
- _mongo_client = pymongo.MongoClient("mongodb://db:27017/")
32
- _mongo_db = os.getenv("DB_DB", "default")
33
- _redis = redis.Redis(host="cachedb", port=6379, decode_responses=True)
34
-
35
30
  VOICES = {
36
31
  "Zephyr": ["female"],
37
32
  "Puck": ["male"],
@@ -73,132 +68,70 @@ class LocalAIModel(AutoModel):
73
68
  }
74
69
  return json.dumps(schema, indent=2)
75
70
 
76
- def get_embedding(self, text):
77
- try:
78
- res = requests.post(f"{self._media_url}/embeddings", json={"text": text})
79
- res.raise_for_status()
80
- return res.json()["embedding"]
81
- except Exception as e:
82
- log(f"Embedding Error: {e}", _print=True)
83
- return []
84
-
85
- def build_hybrid_context(self, prompt, focus_object_id=None):
86
- cache_key = f"ctx:{focus_object_id}:{len(prompt) // 50}"
87
- cached_ctx = self._redis.get(cache_key)
88
- if cached_ctx:
89
- return cached_ctx
90
-
91
- context_str = ""
92
- # --- PART 1: MONGODB ---
93
- if focus_object_id:
94
- try:
95
- oid = (
96
- ObjectId(focus_object_id)
97
- if isinstance(focus_object_id, str)
98
- else focus_object_id
99
- )
100
- main_obj = self._mongo_db.objects.find_one({"_id": oid})
101
-
102
- if main_obj:
103
- context_str += "### FOCUS OBJECT ###\n" + prompt
104
- ref_ids = main_obj.get("associations", []) or []
105
- if world_id := main_obj.get("world"):
106
- ref_ids.append(world_id)
107
- ref_ids.extend(main_obj.get("stories", []) or [])
108
- ref_ids.extend(main_obj.get("events", []) or [])
109
-
110
- if ref_ids:
111
- valid_oids = [
112
- ObjectId(rid) if isinstance(rid, str) else rid
113
- for rid in ref_ids
114
- ]
115
- if valid_oids:
116
- associated_objs = self._mongo_db.objects.find(
117
- {"_id": {"$in": valid_oids}}
118
- )
119
- context_str += "\n### ASSOCIATED REFERENCES ###\n"
120
- for obj in associated_objs:
121
- context_str += f"- {obj}\n"
122
- context_str += "\n"
123
- except Exception as e:
124
- log(f"Mongo Association Error: {e}", _print=True)
125
-
126
- # --- PART 2: REDIS ---
127
- if len(prompt) > 10:
128
- vector = self.get_embedding(prompt)
129
- if vector:
130
- try:
131
- q = "*=>[KNN 2 @vector $blob AS score]"
132
- params = {"blob": np.array(vector, dtype=np.float32).tobytes()}
133
- results = self._redis.ft("search_index").search(
134
- q, query_params=params
135
- )
136
- if results.docs:
137
- context_str += "### RELEVANT LORE ###\n"
138
- for doc in results.docs:
139
- context_str += f"- {doc.content}\n"
140
- except Exception:
141
- pass
142
-
143
- self._redis.set(cache_key, context_str, ex=120)
144
- return context_str
145
-
146
- def generate_json(self, message, function, additional_instructions="", **kwargs):
147
- """
148
- UPDATED: Uses correct /api/chat payload structure (messages list)
149
- """
71
+ def generate_json(
72
+ self, message, function, additional_instructions="", uri="", context={}
73
+ ):
150
74
  schema_str = self._convert_tools_to_json_schema(function)
151
- focus_pk = kwargs.get("focus_object")
152
- world_context = self.build_hybrid_context(message, focus_object_id=focus_pk)
153
-
75
+ # 1. Base System Prompt
154
76
  full_system_prompt = (
155
77
  f"{self.instructions}. {additional_instructions}\n"
156
78
  f"You must respond strictly with a valid JSON object matching this schema:\n"
157
79
  f"{schema_str}\n"
158
80
  f"Do not include markdown formatting or explanations."
159
- f"You must strictly adhere to the following context:\n"
160
- f"{world_context}"
81
+ f"You must strictly adhere to the following context:\n {json.dumps(context, indent=2)}"
82
+ if context
83
+ else f"Use the following URI for reference: {uri}"
84
+ if uri
85
+ else ""
161
86
  )
162
87
 
163
- # FIX: Using 'messages' instead of 'prompt'/'system'
88
+ # 3. Send to Ollama
164
89
  payload = {
165
- "model": "llama3",
90
+ "model": self._json_model,
166
91
  "messages": [
167
92
  {"role": "system", "content": full_system_prompt},
168
93
  {"role": "user", "content": message},
169
94
  ],
170
- "format": "json",
171
95
  "stream": False,
172
96
  "keep_alive": "24h",
173
97
  }
174
98
 
175
99
  try:
100
+ print(
101
+ f"==== {self._ollama_url}: LocalAI JSON Payload: {json.dumps(payload, indent=2)} ===="
102
+ )
176
103
  response = requests.post(f"{self._ollama_url}/chat", json=payload)
177
104
  response.raise_for_status()
178
105
 
179
106
  # FIX: Chat API returns 'message' -> 'content'
180
107
  result_text = response.json().get("message", {}).get("content", "{}")
181
- return json.loads(result_text)
108
+ # If the tool returns a wrapper, unwrap it!
109
+ result_dict = json.loads(result_text)
110
+ if "parameters" in result_dict and isinstance(
111
+ result_dict["parameters"], dict
112
+ ):
113
+ params = result_dict.pop("parameters")
114
+ result_dict.update(params)
115
+ return result_dict
182
116
 
183
117
  except Exception as e:
184
118
  log(f"==== LocalAI JSON Error: {e} ====", _print=True)
185
119
  return {}
186
120
 
187
- def generate_text(self, message, additional_instructions="", **kwargs):
188
- """
189
- UPDATED: Uses correct /api/chat payload structure
190
- """
191
- focus_pk = kwargs.get("focus_object")
192
- world_context = self.build_hybrid_context(message, focus_object_id=focus_pk)
193
-
121
+ def generate_text(self, message, additional_instructions="", uri="", context={}):
122
+ # 1. Base System Prompt
194
123
  full_system_prompt = (
195
124
  f"{self.instructions}. {additional_instructions}\n"
196
- f"You must strictly adhere to the following context:\n"
197
- f"{world_context}"
125
+ f"You must strictly adhere to the following context:\n {json.dumps(context, indent=2)}"
126
+ if context
127
+ else f"Use the following URI for reference: {uri}"
128
+ if uri
129
+ else ""
198
130
  )
199
131
 
132
+ # 3. Send to Ollama
200
133
  payload = {
201
- "model": "llama3",
134
+ "model": self._text_model,
202
135
  "messages": [
203
136
  {"role": "system", "content": full_system_prompt},
204
137
  {"role": "user", "content": message},
@@ -216,7 +149,7 @@ class LocalAIModel(AutoModel):
216
149
  log(f"==== LocalAI Text Error: {e} ====", _print=True)
217
150
  return "Error generating text."
218
151
 
219
- def summarize_text(self, text, primer="", **kwargs):
152
+ def summarize_text(self, text, primer=""):
220
153
  primer = primer or "Summarize the following text concisely."
221
154
  max_chars = 12000
222
155
  chunks = [text[i : i + max_chars] for i in range(0, len(text), max_chars)]
@@ -238,7 +171,7 @@ class LocalAIModel(AutoModel):
238
171
 
239
172
  return full_summary
240
173
 
241
- def generate_audio_text(self, audio_file, prompt="", **kwargs):
174
+ def generate_transcription(self, audio_file, prompt=""):
242
175
  try:
243
176
  if isinstance(audio_file, bytes):
244
177
  f_obj = io.BytesIO(audio_file)
@@ -252,7 +185,7 @@ class LocalAIModel(AutoModel):
252
185
  log(f"STT Error: {e}", _print=True)
253
186
  return ""
254
187
 
255
- def generate_audio(self, prompt, voice=None, **kwargs):
188
+ def generate_audio(self, prompt, voice=None):
256
189
  voice = voice or random.choice(list(self.VOICES.keys()))
257
190
  try:
258
191
  payload = {"text": prompt, "voice": voice}
@@ -267,21 +200,26 @@ class LocalAIModel(AutoModel):
267
200
  log(f"TTS Error: {e}", _print=True)
268
201
  return None
269
202
 
270
- def generate_image(self, prompt, negative_prompt="", **kwargs):
203
+ def generate_image(
204
+ self,
205
+ prompt,
206
+ negative_prompt="",
207
+ files=None,
208
+ aspect_ratio="3:4",
209
+ image_size="2K",
210
+ ):
271
211
  try:
272
212
  data = {"prompt": prompt, "negative_prompt": negative_prompt}
273
- files = {}
274
- if kwargs.get("files"):
275
- for fn, f_bytes in kwargs.get("files").items():
213
+ img_files = {}
214
+ if files:
215
+ for fn, f_bytes in files.items():
276
216
  if isinstance(f_bytes, bytes):
277
217
  file_obj = io.BytesIO(f_bytes)
278
218
  else:
279
219
  file_obj = f_bytes
280
- files["file"] = (fn, file_obj, "image/png")
281
- break
282
- if files:
220
+ img_files["file"] = (fn, file_obj, "image/png")
283
221
  response = requests.post(
284
- f"{self._media_url}/generate-image", data=data, files=files
222
+ f"{self._media_url}/generate-image", data=data, files=img_files
285
223
  )
286
224
  else:
287
225
  response = requests.post(f"{self._media_url}/generate-image", data=data)
@@ -1,13 +1,15 @@
1
+ import os
2
+
1
3
  from autonomous import log
2
4
  from autonomous.ai.baseagent import BaseAgent
3
- from autonomous.model.autoattr import ReferenceAttr, StringAttr
4
- from autonomous.model.automodel import AutoModel
5
-
6
- from .models.openai import OpenAIModel
5
+ from autonomous.model.autoattr import StringAttr
7
6
 
8
7
 
9
8
  class TextAgent(BaseAgent):
10
9
  name = StringAttr(default="textagent")
10
+
11
+ # Force this agent to use Gemini
12
+ provider = StringAttr(default="gemini")
11
13
  instructions = StringAttr(
12
14
  default="You are highly skilled AI trained to assist with generating text according to the given requirements."
13
15
  )
@@ -15,10 +17,12 @@ class TextAgent(BaseAgent):
15
17
  default="A helpful AI assistant trained to assist with generating text according to the given requirements."
16
18
  )
17
19
 
18
- def summarize_text(self, text, primer="", **kwargs):
19
- return self.get_client().summarize_text(text, primer, **kwargs)
20
+ def summarize_text(self, text, primer=""):
21
+ return self.get_client(
22
+ os.environ.get("SUMMARY_AI_AGENT", self.provider)
23
+ ).summarize_text(text, primer=primer)
20
24
 
21
- def generate(self, messages, additional_instructions="", **kwargs):
22
- return self.get_client().generate_text(
23
- messages, additional_instructions, **kwargs
24
- )
25
+ def generate(self, message, additional_instructions="", uri="", context=""):
26
+ return self.get_client(
27
+ os.environ.get("TEXT_AI_AGENT", self.provider)
28
+ ).generate_text(message, additional_instructions, uri=uri, context=context)
autonomous/db/db_sync.py CHANGED
@@ -8,19 +8,20 @@ import numpy as np
8
8
  import pymongo
9
9
  import redis
10
10
  import requests
11
+
11
12
  from autonomous.taskrunner.autotasks import AutoTasks, TaskPriority
13
+
12
14
  # CONFIGURATION
13
15
  db_host = os.getenv("DB_HOST", "db")
14
16
  db_port = os.getenv("DB_PORT", 27017)
15
17
  password = urllib.parse.quote_plus(str(os.getenv("DB_PASSWORD")))
16
18
  username = urllib.parse.quote_plus(str(os.getenv("DB_USERNAME")))
17
- MEDIA_URL = "http://media_ai_internal:5005"
18
- REDIS_HOST = os.getenv("REDIS_HOST", "cachedb")
19
19
  MONGO_URI = f"mongodb://{username}:{password}@{db_host}:{db_port}/?authSource=admin"
20
-
20
+ MEDIA_URL = os.getenv("MEDIA_API_BASE_URL", "http://media_ai:5005")
21
+ REDIS_HOST = os.getenv("REDIS_HOST", "cachedb")
22
+ REDIS_PORT = int(os.getenv("REDIS_PORT", 6379))
21
23
  # DB SETUP
22
- r = redis.Redis(host=REDIS_HOST, port=6379, decode_responses=True)
23
-
24
+ r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
24
25
  mongo = pymongo.MongoClient(MONGO_URI)
25
26
  db = mongo[os.getenv("DB_DB")]
26
27
  # connect(host=f"mongodb://{username}:{password}@{host}:{port}/{dbname}?authSource=admin")
@@ -131,7 +132,7 @@ def request_indexing(object_id, collection_name):
131
132
  object_id=str_id,
132
133
  collection_name=collection_name,
133
134
  token=current_token,
134
- priority=TaskPriority.LOW
135
+ priority=TaskPriority.LOW,
135
136
  )
136
137
  return True
137
138
  except Exception as e:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: autonomous-app
3
- Version: 0.3.33
3
+ Version: 0.3.34
4
4
  Summary: Containerized application framework built on Flask with additional libraries and tools for rapid development of web applications.
5
5
  Author-email: Steven A Moore <samoore@binghamton.edu>
6
6
  Project-URL: homepage, https://github.com/Sallenmoore/autonomous
@@ -1,18 +1,15 @@
1
- autonomous/__init__.py,sha256=0l5vmpCKXV4HwXo_U4svrh1UwjkhyKyloF5T_WIdCTY,95
1
+ autonomous/__init__.py,sha256=vMMIVR0EbFnjlGGd4iXpgSCJ6zFYv6GjJenp8hJ1aQA,95
2
2
  autonomous/cli.py,sha256=z4AaGeWNW_uBLFAHng0J_lfS9v3fXemK1PeT85u4Eo4,42
3
3
  autonomous/logger.py,sha256=NQtgEaTWNAWfLSgqSP7ksXj1GpOuCgoUV711kSMm-WA,2022
4
4
  autonomous/ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- autonomous/ai/audioagent.py,sha256=SvPLzKgqUnrkcsR7y93aURSrStIrryuntQMPS1SzUXw,1033
6
- autonomous/ai/baseagent.py,sha256=d6OYOk8LGHlDPNcqDmEX2PpWeerl-bIBL73IZ_T78oU,880
7
- autonomous/ai/imageagent.py,sha256=bIOrgg_CM-rgfyLme7V9vPqP8WKVMIAVoB2E9lLtIRk,521
8
- autonomous/ai/jsonagent.py,sha256=VQGhK0RFo0H_eVH9dAyf4_lp-RIpdgH988joLoKjm94,1065
9
- autonomous/ai/textagent.py,sha256=1yM1aMvws64PocvG_L-POMDKjxq2JDuGqgc3haUHybU,926
5
+ autonomous/ai/audioagent.py,sha256=HEXhqMTNaVZdpok0frhvHhSFQ1q1od-HTeuDpeXdzag,1131
6
+ autonomous/ai/baseagent.py,sha256=icOPygr1NdH64u1ZYbwHHywYIY1ZtaLY9HtfNmUbx4k,4702
7
+ autonomous/ai/imageagent.py,sha256=yN-Qv1QSBsFvw0fVXLVqdh_3cveRETJGILgeBI3GRoc,955
8
+ autonomous/ai/jsonagent.py,sha256=OZeQthp5WOSCV6pmbPfPQRjARkvbK5lk7A0QTEPUrUk,1228
9
+ autonomous/ai/textagent.py,sha256=0y2Hvb9pup1OnsA51hGPcD8yllZOZtztDLQvCNYABaw,1043
10
10
  autonomous/ai/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- autonomous/ai/models/aws.py,sha256=bGDjnGTm350zOqor9IsICzUkBUN2bubGI_ZssQuSXIw,12715
12
- autonomous/ai/models/deepseek.py,sha256=fkoi-hJp60yFlZ9Cb9PdUrmNSErYltQ5ezkUI75llXc,2734
13
- autonomous/ai/models/gemini.py,sha256=jrTMbh8SAdzzz27elOhs82iwjyutYcy8fvTOSdW-GFQ,14247
14
- autonomous/ai/models/local_model.py,sha256=jWLTHBLb-6GtrMeDNctUTucX7HFZfla5NsfGfFsNPJ0,11297
15
- autonomous/ai/models/openai.py,sha256=2-LttCm6woGklaLbs1H5LjlbfM-7leDwGmC9vksSqW4,13135
11
+ autonomous/ai/models/gemini.py,sha256=eu48gywNFpUFaqBt-4MFX2oRM5IED9rUTgtavM_HRG0,14468
12
+ autonomous/ai/models/local_model.py,sha256=VXLtEFM3QrL6wdVxORF0ZHP1gl3_FHiEC3MRgv5WNQA,8526
16
13
  autonomous/apis/version_control/GHCallbacks.py,sha256=AyiUlYfV5JePi11GVyqYyXoj5UTbPKzS-HRRI94rjJo,1069
17
14
  autonomous/apis/version_control/GHOrganization.py,sha256=mi2livdsGurKiifbvuLwiFbdDzL77IlEfhwEa-tG77I,1155
18
15
  autonomous/apis/version_control/GHRepo.py,sha256=hTFHMkxSbSlVELfh8S6mq6ijkIKPRQO-Q5775ZjRKD4,4622
@@ -27,7 +24,7 @@ autonomous/db/__init__.py,sha256=2mNynmYV0I_J3-W4Aw1cojAQrHf4aHZT1Ow9xUdmM18,115
27
24
  autonomous/db/common.py,sha256=BUN2x_XuQBRFcq54TGPx4yLMLJdgytdbIt07QWr4CSM,2551
28
25
  autonomous/db/connection.py,sha256=j_-eMre4ade9Y8GejJcMbQQiSEimL4j2vIQxaXViKxI,17754
29
26
  autonomous/db/context_managers.py,sha256=_nH2ajCL8Xy90AuB2rKaryR4iF8Q8ksU3Nei_mZj-DE,9918
30
- autonomous/db/db_sync.py,sha256=VfthiqoCirEccBFlqLrTl8OOUBf3ksUGd8B2gu04d_k,4329
27
+ autonomous/db/db_sync.py,sha256=Njs589pJ2QFuxQkr85O_U1tDCNezqupJ945Ubpgqmbc,4408
31
28
  autonomous/db/dereference.py,sha256=EgbpPCXtDZqD_ZuY1Wd4o3ltRy8qEo3C5yRh5_c9fLE,12776
32
29
  autonomous/db/document.py,sha256=oZKdTaoqwv9fCHiv450rIxgINASQF3J9FzIsUOUXHhw,44428
33
30
  autonomous/db/errors.py,sha256=_QeCotid1kmr7_W0QyH6NUrwwYN9eced_yyyiop0Xlw,4108
@@ -58,7 +55,7 @@ autonomous/taskrunner/__init__.py,sha256=ughX-QfWBas5W3aB2SiF887SWJ3Dzc2X43Yxtmp
58
55
  autonomous/taskrunner/autotasks.py,sha256=2zRaqHYqfdlgC_BQm6B6D2svN1ukyWeJJHwweZFHVoo,2616
59
56
  autonomous/taskrunner/task_router.py,sha256=W09HtRUuhwlnGxM5w4l6Hzw6mfS6L4ljWiMzD3ZVFeU,601
60
57
  autonomous/utils/markdown.py,sha256=tf8vlHARiQO1X_aGbqlYozzP_TbdiDRT9EEP6aFRQo0,2153
61
- autonomous_app-0.3.33.dist-info/METADATA,sha256=hB6MYZigvoI7PaienrCRpTX9FfVe96BJf3ZRJfDdDkw,3024
62
- autonomous_app-0.3.33.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
63
- autonomous_app-0.3.33.dist-info/top_level.txt,sha256=ZyxWWDdbvZekF3UFunxl4BQsVDb_FOW3eTn0vun_jb4,11
64
- autonomous_app-0.3.33.dist-info/RECORD,,
58
+ autonomous_app-0.3.34.dist-info/METADATA,sha256=Ct1XM2RK9Z2yCNTDGtRKpPxqD1u_Srtjq_JO1YcWDP4,3024
59
+ autonomous_app-0.3.34.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
60
+ autonomous_app-0.3.34.dist-info/top_level.txt,sha256=ZyxWWDdbvZekF3UFunxl4BQsVDb_FOW3eTn0vun_jb4,11
61
+ autonomous_app-0.3.34.dist-info/RECORD,,