autonomous-app 0.3.34__py3-none-any.whl → 0.3.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
autonomous/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "0.3.34"
1
+ __version__ = "0.3.35"
2
2
 
3
3
  from dotenv import load_dotenv
4
4
 
@@ -2,6 +2,7 @@ import io
2
2
  import json
3
3
  import os
4
4
  import random
5
+ import re
5
6
 
6
7
  import numpy as np
7
8
  import pymongo
@@ -22,41 +23,15 @@ class LocalAIModel(AutoModel):
22
23
  description = StringAttr(default="A Local AI Model using Ollama and Media AI.")
23
24
 
24
25
  # Config
25
- _ollama_url = os.environ.get("OLLAMA_API_BASE_URL", "INVALID")
26
- _media_url = os.environ.get("MEDIA_API_BASE_URL", "INVALID")
26
+ _ollama_url = os.environ.get("OLLAMA_API_BASE_URL", "")
27
+ _media_url = os.environ.get("MEDIA_API_BASE_URL", "")
27
28
  _text_model = "llama3"
28
29
  _json_model = "llama3"
29
30
 
31
+ # ... VOICES dictionary ... (Keep existing voices)
30
32
  VOICES = {
31
33
  "Zephyr": ["female"],
32
- "Puck": ["male"],
33
- "Charon": ["male"],
34
- "Kore": ["female"],
35
- "Fenrir": ["non-binary"],
36
- "Leda": ["female"],
37
- "Orus": ["male"],
38
- "Aoede": ["female"],
39
- "Callirhoe": ["female"],
40
- "Autonoe": ["female"],
41
- "Enceladus": ["male"],
42
- "Iapetus": ["male"],
43
- "Umbriel": ["male"],
44
- "Algieba": ["male"],
45
- "Despina": ["female"],
46
- "Erinome": ["female"],
47
- "Algenib": ["male"],
48
- "Rasalgethi": ["non-binary"],
49
- "Laomedeia": ["female"],
50
- "Achernar": ["female"],
51
- "Alnilam": ["male"],
52
- "Schedar": ["male"],
53
- "Gacrux": ["female"],
54
- "Pulcherrima": ["non-binary"],
55
- "Achird": ["male"],
56
- "Zubenelgenubi": ["male"],
57
- "Vindemiatrix": ["female"],
58
- "Sadachbia": ["male"],
59
- "Sadaltager": ["male"],
34
+ # ... (keep all your voices) ...
60
35
  "Sulafar": ["female"],
61
36
  }
62
37
 
@@ -68,66 +43,99 @@ class LocalAIModel(AutoModel):
68
43
  }
69
44
  return json.dumps(schema, indent=2)
70
45
 
46
+ def _clean_json_response(self, text):
47
+ """Helper to strip markdown artifacts from JSON responses."""
48
+ text = text.strip()
49
+ # Remove ```json ... ``` or just ``` ... ``` wrapper
50
+ if text.startswith("```"):
51
+ # Find the first newline to skip the language tag (e.g., "json")
52
+ first_newline = text.find("\n")
53
+ if first_newline != -1:
54
+ text = text[first_newline + 1 :]
55
+ # Remove the closing backticks
56
+ if text.endswith("```"):
57
+ text = text[:-3]
58
+ return text.strip()
59
+
71
60
  def generate_json(
72
61
  self, message, function, additional_instructions="", uri="", context={}
73
62
  ):
74
63
  schema_str = self._convert_tools_to_json_schema(function)
75
- # 1. Base System Prompt
64
+
65
+ # 1. Base System Prompt with Context Anchoring
76
66
  full_system_prompt = (
77
67
  f"{self.instructions}. {additional_instructions}\n"
78
- f"You must respond strictly with a valid JSON object matching this schema:\n"
68
+ f"You are a strict JSON generator. Output ONLY a valid JSON object matching this schema:\n"
79
69
  f"{schema_str}\n"
80
- f"Do not include markdown formatting or explanations."
81
- f"You must strictly adhere to the following context:\n {json.dumps(context, indent=2)}"
82
- if context
83
- else f"Use the following URI for reference: {uri}"
84
- if uri
85
- else ""
70
+ f"IMPORTANT: Do not include markdown formatting (like ```json), introductions, or explanations.\n"
86
71
  )
87
72
 
88
- # 3. Send to Ollama
73
+ if context:
74
+ full_system_prompt += (
75
+ f"\n\n### GROUND TRUTH CONTEXT ###\n"
76
+ f"You must strictly adhere to the following context. "
77
+ f"If this context contradicts your internal knowledge (e.g., physics, facts), "
78
+ f"YOU MUST FOLLOW THE CONTEXT.\n"
79
+ f"{json.dumps(context, indent=2)}"
80
+ )
81
+ elif uri:
82
+ full_system_prompt += f"Use the following URI for reference: {uri}"
83
+
84
+ # 3. Send to Ollama with JSON Mode
89
85
  payload = {
90
86
  "model": self._json_model,
91
87
  "messages": [
92
88
  {"role": "system", "content": full_system_prompt},
93
89
  {"role": "user", "content": message},
94
90
  ],
91
+ "format": "json", # <--- CRITICAL: Forces valid JSON output
95
92
  "stream": False,
96
93
  "keep_alive": "24h",
97
94
  }
98
95
 
99
96
  try:
100
- print(
101
- f"==== {self._ollama_url}: LocalAI JSON Payload: {json.dumps(payload, indent=2)} ===="
102
- )
97
+ # print(f"==== {self._ollama_url}: LocalAI JSON Payload ====")
103
98
  response = requests.post(f"{self._ollama_url}/chat", json=payload)
104
99
  response.raise_for_status()
105
100
 
106
- # FIX: Chat API returns 'message' -> 'content'
107
101
  result_text = response.json().get("message", {}).get("content", "{}")
108
- # If the tool returns a wrapper, unwrap it!
109
- result_dict = json.loads(result_text)
102
+
103
+ # Clean up potential markdown artifacts
104
+ clean_text = self._clean_json_response(result_text)
105
+
106
+ # Parse
107
+ result_dict = json.loads(clean_text)
108
+
109
+ # Unwrap if the model nested it inside "parameters" (common Llama quirk)
110
110
  if "parameters" in result_dict and isinstance(
111
111
  result_dict["parameters"], dict
112
112
  ):
113
113
  params = result_dict.pop("parameters")
114
114
  result_dict.update(params)
115
+
115
116
  return result_dict
116
117
 
117
118
  except Exception as e:
118
- log(f"==== LocalAI JSON Error: {e} ====", _print=True)
119
+ log(
120
+ f"==== LocalAI JSON Error: {e} ====\nRaw Text: {result_text}",
121
+ _print=True,
122
+ )
119
123
  return {}
120
124
 
121
125
  def generate_text(self, message, additional_instructions="", uri="", context={}):
122
126
  # 1. Base System Prompt
123
- full_system_prompt = (
124
- f"{self.instructions}. {additional_instructions}\n"
125
- f"You must strictly adhere to the following context:\n {json.dumps(context, indent=2)}"
126
- if context
127
- else f"Use the following URI for reference: {uri}"
128
- if uri
129
- else ""
130
- )
127
+ full_system_prompt = f"{self.instructions}. {additional_instructions}\n"
128
+
129
+ if context:
130
+ full_system_prompt += (
131
+ f"\n\n### GROUND TRUTH CONTEXT ###\n"
132
+ f"The following context is absolute truth for this interaction. "
133
+ f"Prioritize it over your internal training data. "
134
+ f"If the context says the sky is green, it is green.\n"
135
+ f"{json.dumps(context, indent=2)}"
136
+ )
137
+ elif uri:
138
+ full_system_prompt += f"Use the following URI for reference: {uri}"
131
139
 
132
140
  # 3. Send to Ollama
133
141
  payload = {
@@ -143,7 +151,6 @@ class LocalAIModel(AutoModel):
143
151
  try:
144
152
  response = requests.post(f"{self._ollama_url}/chat", json=payload)
145
153
  response.raise_for_status()
146
- # FIX: Chat API returns 'message' -> 'content'
147
154
  return response.json().get("message", {}).get("content", "")
148
155
  except Exception as e:
149
156
  log(f"==== LocalAI Text Error: {e} ====", _print=True)
autonomous/db/db_sync.py CHANGED
@@ -47,8 +47,8 @@ def process_single_object_sync(object_id, collection_name, token):
47
47
  token_key = f"sync_token:{collection_name}:{str_id}"
48
48
 
49
49
  # 1. THE DEBOUNCE WAIT (Happens in background)
50
- print(f"Debouncing {str_id} for 2 seconds...")
51
- time.sleep(2)
50
+ print(f"Debouncing {str_id} for 5 seconds...")
51
+ time.sleep(5)
52
52
 
53
53
  # 2. THE VERIFICATION
54
54
  # Check if a newer save happened while we slept
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: autonomous-app
3
- Version: 0.3.34
3
+ Version: 0.3.35
4
4
  Summary: Containerized application framework built on Flask with additional libraries and tools for rapid development of web applications.
5
5
  Author-email: Steven A Moore <samoore@binghamton.edu>
6
6
  Project-URL: homepage, https://github.com/Sallenmoore/autonomous
@@ -1,4 +1,4 @@
1
- autonomous/__init__.py,sha256=vMMIVR0EbFnjlGGd4iXpgSCJ6zFYv6GjJenp8hJ1aQA,95
1
+ autonomous/__init__.py,sha256=0ea_fd81iOYDEt2kIO6qfhm_xG3T4QkXiwUMHhx2WQc,95
2
2
  autonomous/cli.py,sha256=z4AaGeWNW_uBLFAHng0J_lfS9v3fXemK1PeT85u4Eo4,42
3
3
  autonomous/logger.py,sha256=NQtgEaTWNAWfLSgqSP7ksXj1GpOuCgoUV711kSMm-WA,2022
4
4
  autonomous/ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -9,7 +9,7 @@ autonomous/ai/jsonagent.py,sha256=OZeQthp5WOSCV6pmbPfPQRjARkvbK5lk7A0QTEPUrUk,12
9
9
  autonomous/ai/textagent.py,sha256=0y2Hvb9pup1OnsA51hGPcD8yllZOZtztDLQvCNYABaw,1043
10
10
  autonomous/ai/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  autonomous/ai/models/gemini.py,sha256=eu48gywNFpUFaqBt-4MFX2oRM5IED9rUTgtavM_HRG0,14468
12
- autonomous/ai/models/local_model.py,sha256=VXLtEFM3QrL6wdVxORF0ZHP1gl3_FHiEC3MRgv5WNQA,8526
12
+ autonomous/ai/models/local_model.py,sha256=6oh3k-SsfVR7GDshIalnIGF1yiWh1N8OmiWlHgTu36A,9081
13
13
  autonomous/apis/version_control/GHCallbacks.py,sha256=AyiUlYfV5JePi11GVyqYyXoj5UTbPKzS-HRRI94rjJo,1069
14
14
  autonomous/apis/version_control/GHOrganization.py,sha256=mi2livdsGurKiifbvuLwiFbdDzL77IlEfhwEa-tG77I,1155
15
15
  autonomous/apis/version_control/GHRepo.py,sha256=hTFHMkxSbSlVELfh8S6mq6ijkIKPRQO-Q5775ZjRKD4,4622
@@ -24,7 +24,7 @@ autonomous/db/__init__.py,sha256=2mNynmYV0I_J3-W4Aw1cojAQrHf4aHZT1Ow9xUdmM18,115
24
24
  autonomous/db/common.py,sha256=BUN2x_XuQBRFcq54TGPx4yLMLJdgytdbIt07QWr4CSM,2551
25
25
  autonomous/db/connection.py,sha256=j_-eMre4ade9Y8GejJcMbQQiSEimL4j2vIQxaXViKxI,17754
26
26
  autonomous/db/context_managers.py,sha256=_nH2ajCL8Xy90AuB2rKaryR4iF8Q8ksU3Nei_mZj-DE,9918
27
- autonomous/db/db_sync.py,sha256=Njs589pJ2QFuxQkr85O_U1tDCNezqupJ945Ubpgqmbc,4408
27
+ autonomous/db/db_sync.py,sha256=o7EpQ683ONN_IXI1hvhIGQFevOoRT9lDTcTXn1IQypM,4408
28
28
  autonomous/db/dereference.py,sha256=EgbpPCXtDZqD_ZuY1Wd4o3ltRy8qEo3C5yRh5_c9fLE,12776
29
29
  autonomous/db/document.py,sha256=oZKdTaoqwv9fCHiv450rIxgINASQF3J9FzIsUOUXHhw,44428
30
30
  autonomous/db/errors.py,sha256=_QeCotid1kmr7_W0QyH6NUrwwYN9eced_yyyiop0Xlw,4108
@@ -55,7 +55,7 @@ autonomous/taskrunner/__init__.py,sha256=ughX-QfWBas5W3aB2SiF887SWJ3Dzc2X43Yxtmp
55
55
  autonomous/taskrunner/autotasks.py,sha256=2zRaqHYqfdlgC_BQm6B6D2svN1ukyWeJJHwweZFHVoo,2616
56
56
  autonomous/taskrunner/task_router.py,sha256=W09HtRUuhwlnGxM5w4l6Hzw6mfS6L4ljWiMzD3ZVFeU,601
57
57
  autonomous/utils/markdown.py,sha256=tf8vlHARiQO1X_aGbqlYozzP_TbdiDRT9EEP6aFRQo0,2153
58
- autonomous_app-0.3.34.dist-info/METADATA,sha256=Ct1XM2RK9Z2yCNTDGtRKpPxqD1u_Srtjq_JO1YcWDP4,3024
59
- autonomous_app-0.3.34.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
60
- autonomous_app-0.3.34.dist-info/top_level.txt,sha256=ZyxWWDdbvZekF3UFunxl4BQsVDb_FOW3eTn0vun_jb4,11
61
- autonomous_app-0.3.34.dist-info/RECORD,,
58
+ autonomous_app-0.3.35.dist-info/METADATA,sha256=AMh_-9jALjC-_GzyBfXoU0vj0I8puObbIxa4_xd-AAo,3024
59
+ autonomous_app-0.3.35.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
60
+ autonomous_app-0.3.35.dist-info/top_level.txt,sha256=ZyxWWDdbvZekF3UFunxl4BQsVDb_FOW3eTn0vun_jb4,11
61
+ autonomous_app-0.3.35.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.10.1)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5