aiecs 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiecs might be problematic. Click here for more details.

aiecs/__init__.py CHANGED
@@ -5,7 +5,7 @@ A powerful Python middleware framework for building AI-powered applications
5
5
  with tool orchestration, task execution, and multi-provider LLM support.
6
6
  """
7
7
 
8
- __version__ = "1.3.3"
8
+ __version__ = "1.3.5"
9
9
  __author__ = "AIECS Team"
10
10
  __email__ = "iretbl@gmail.com"
11
11
 
@@ -23,6 +23,12 @@ class VertexAIClient(BaseLLMClient):
23
23
  super().__init__("Vertex")
24
24
  self.settings = get_settings()
25
25
  self._initialized = False
26
+ # Track part count statistics for monitoring
27
+ self._part_count_stats = {
28
+ "total_responses": 0,
29
+ "part_counts": {}, # {part_count: frequency}
30
+ "last_part_count": None
31
+ }
26
32
 
27
33
  def _init_vertex_ai(self):
28
34
  """Lazy initialization of Vertex AI with proper authentication"""
@@ -141,19 +147,44 @@ class VertexAIClient(BaseLLMClient):
141
147
  text_parts.append(part.text)
142
148
 
143
149
  if text_parts:
150
+ # Log part count for monitoring
151
+ part_count = len(text_parts)
152
+ self.logger.info(f"📊 Vertex AI response: {part_count} parts detected")
153
+
154
+ # Update statistics
155
+ self._part_count_stats["total_responses"] += 1
156
+ self._part_count_stats["part_counts"][part_count] = self._part_count_stats["part_counts"].get(part_count, 0) + 1
157
+ self._part_count_stats["last_part_count"] = part_count
158
+
159
+ # Log statistics if significant variation detected
160
+ if part_count != self._part_count_stats.get("last_part_count", part_count):
161
+ self.logger.warning(f"⚠️ Part count variation detected: {part_count} parts (previous: {self._part_count_stats.get('last_part_count', 'unknown')})")
162
+
144
163
  # Handle multi-part response format
145
164
  if len(text_parts) > 1:
146
- # Multi-part response (typical for Gemini 2.5 with tool calling)
147
- # Part 1: Thinking/reasoning content
148
- # Part 2+: Actual output (JSON or other format)
165
+ # Multi-part response
166
+ # Minimal fix: only fix incomplete <thinking> tags, preserve original order
167
+ # Do NOT reorganize content - let downstream code handle semantics
149
168
 
150
- # Wrap first part (thinking) in <thinking> tags
151
- thinking_part = text_parts[0]
152
- actual_output_parts = text_parts[1:]
169
+ processed_parts = []
170
+ fixed_count = 0
153
171
 
154
- # Format: <thinking>Part 1</thinking>\nPart 2\nPart 3...
155
- content = f"<thinking>\n{thinking_part}\n</thinking>\n" + "\n".join(actual_output_parts)
156
- self.logger.info(f"✅ Successfully wrapped multi-part response: {len(text_parts)} parts (thinking + output)")
172
+ for i, part in enumerate(text_parts):
173
+ if '<thinking>' in part and '</thinking>' not in part:
174
+ # Incomplete thinking tag: add closing tag
175
+ part = part + '\n</thinking>'
176
+ fixed_count += 1
177
+ self.logger.debug(f" Part {i+1}: Incomplete <thinking> tag fixed")
178
+
179
+ processed_parts.append(part)
180
+
181
+ # Merge in original order
182
+ content = "\n".join(processed_parts)
183
+
184
+ if fixed_count > 0:
185
+ self.logger.info(f"✅ Multi-part response merged: {len(text_parts)} parts, {fixed_count} incomplete tags fixed, order preserved")
186
+ else:
187
+ self.logger.info(f"✅ Multi-part response merged: {len(text_parts)} parts, order preserved")
157
188
  else:
158
189
  # Single part response - use as is
159
190
  content = text_parts[0]
@@ -238,7 +269,64 @@ class VertexAIClient(BaseLLMClient):
238
269
  yield word + " "
239
270
  await asyncio.sleep(0.05) # Small delay to simulate streaming
240
271
 
272
+ def get_part_count_stats(self) -> Dict[str, Any]:
273
+ """
274
+ Get statistics about part count variations in Vertex AI responses.
275
+
276
+ Returns:
277
+ Dictionary containing part count statistics and analysis
278
+ """
279
+ stats = self._part_count_stats.copy()
280
+
281
+ if stats["total_responses"] > 0:
282
+ # Calculate variation metrics
283
+ part_counts = list(stats["part_counts"].keys())
284
+ stats["variation_analysis"] = {
285
+ "unique_part_counts": len(part_counts),
286
+ "most_common_count": max(stats["part_counts"].items(), key=lambda x: x[1])[0] if stats["part_counts"] else None,
287
+ "part_count_range": f"{min(part_counts)}-{max(part_counts)}" if part_counts else "N/A",
288
+ "stability_score": 1.0 - (len(part_counts) - 1) / max(stats["total_responses"], 1) # 0-1, higher is more stable
289
+ }
290
+
291
+ # Generate recommendations
292
+ if stats["variation_analysis"]["stability_score"] < 0.7:
293
+ stats["recommendations"] = [
294
+ "High part count variation detected",
295
+ "Consider optimizing prompt structure",
296
+ "Monitor input complexity patterns",
297
+ "Review tool calling configuration"
298
+ ]
299
+ else:
300
+ stats["recommendations"] = [
301
+ "Part count variation is within acceptable range",
302
+ "Continue monitoring for patterns"
303
+ ]
304
+
305
+ return stats
306
+
307
+ def log_part_count_summary(self):
308
+ """Log a summary of part count statistics"""
309
+ stats = self.get_part_count_stats()
310
+
311
+ if stats["total_responses"] > 0:
312
+ self.logger.info("📈 Vertex AI Part Count Summary:")
313
+ self.logger.info(f" Total responses: {stats['total_responses']}")
314
+ self.logger.info(f" Part count distribution: {stats['part_counts']}")
315
+
316
+ if "variation_analysis" in stats:
317
+ analysis = stats["variation_analysis"]
318
+ self.logger.info(f" Stability score: {analysis['stability_score']:.2f}")
319
+ self.logger.info(f" Most common count: {analysis['most_common_count']}")
320
+ self.logger.info(f" Count range: {analysis['part_count_range']}")
321
+
322
+ if "recommendations" in stats:
323
+ self.logger.info(" Recommendations:")
324
+ for rec in stats["recommendations"]:
325
+ self.logger.info(f" • {rec}")
326
+
241
327
  async def close(self):
242
328
  """Clean up resources"""
329
+ # Log final statistics before cleanup
330
+ self.log_part_count_summary()
243
331
  # Vertex AI doesn't require explicit cleanup
244
332
  self._initialized = False
aiecs/main.py CHANGED
@@ -142,7 +142,7 @@ async def lifespan(app: FastAPI):
142
142
  app = FastAPI(
143
143
  title="AIECS - AI Execute Services",
144
144
  description="Middleware service for AI-powered task execution and tool orchestration",
145
- version="1.3.3",
145
+ version="1.3.5",
146
146
  lifespan=lifespan
147
147
  )
148
148
 
@@ -167,7 +167,7 @@ async def health_check():
167
167
  return {
168
168
  "status": "healthy",
169
169
  "service": "aiecs",
170
- "version": "1.3.3"
170
+ "version": "1.3.5"
171
171
  }
172
172
 
173
173
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiecs
3
- Version: 1.3.3
3
+ Version: 1.3.5
4
4
  Summary: AI Execute Services - A middleware framework for AI-powered task execution and tool orchestration
5
5
  Author-email: AIECS Team <iretbl@gmail.com>
6
6
  License-Expression: MIT
@@ -1,7 +1,7 @@
1
- aiecs/__init__.py,sha256=_1XRwx-1uwE4PsagfdYcA2kuDIrAOPxmK6OBVtKf_qA,1859
1
+ aiecs/__init__.py,sha256=0C3__5CeIMR8OU-fqcmKw2ETo4SIjYuFNk_MZNI_wfI,1859
2
2
  aiecs/__main__.py,sha256=AfQpzy3SgwWuP4DuymYcm4MISMuzqwhxxGSYo53PBvY,1035
3
3
  aiecs/aiecs_client.py,sha256=gIqecRBBH_bYIWhqiHCemdVgmGb9Jqdxf1b6RoqXWlQ,17276
4
- aiecs/main.py,sha256=QF_ln8faRFzoT-kTAC9ZbNVskodN9nkr22ZR_wQZfq8,10837
4
+ aiecs/main.py,sha256=jYY2nqk00XGoEtyQdLNkmpt-4RF52QeQyZUNU0305Ig,10837
5
5
  aiecs/application/__init__.py,sha256=NkmrUH1DqxJ3vaVC8QwscNdlWqHfC7ZagL4k3nZ_qz4,192
6
6
  aiecs/application/executors/__init__.py,sha256=WIl7L9HBsEhNfbNtJdvBvFUJXzESvNZVaiAA6tdtJcs,191
7
7
  aiecs/application/executors/operation_executor.py,sha256=-7mFo1hUnWdehVPg0fnSiRhW3LACpIiyLSH-iu7bX4U,13818
@@ -58,7 +58,7 @@ aiecs/llm/clients/__init__.py,sha256=uQM004TQappwJMqTxVZNscpVPQtirkvYUPea3QYB7d0
58
58
  aiecs/llm/clients/base_client.py,sha256=j4NY-oEdG5ALBCSddblPpjttISn5teqLVVUuZyYn7g4,5880
59
59
  aiecs/llm/clients/googleai_client.py,sha256=sTgdw4eicxWruNGOMSsuEHbfF9RuDQo8SClqEtu1JOQ,6591
60
60
  aiecs/llm/clients/openai_client.py,sha256=x7Y_yTVu0kp-gu5Z-M0Bx-O20D0YDgZoJQxzkjNpr6c,4202
61
- aiecs/llm/clients/vertex_client.py,sha256=jyyNMeRTJX-QtM95qjor-qA2iEOBO4s7940uv910_J4,12386
61
+ aiecs/llm/clients/vertex_client.py,sha256=Vd5IJIvPVQuv0l6bMQmkY2aemrkfLoHY2RM5Z5SUhVg,17139
62
62
  aiecs/llm/clients/xai_client.py,sha256=XEELb9_qFeeQaansDWvAJRJVpt8CaBRLcYskuv9uDq0,6386
63
63
  aiecs/llm/config/__init__.py,sha256=KZbwHoBlbcN7HgNueA5p-0GpyVMJRNG1V5T-tkri8G4,1115
64
64
  aiecs/llm/config/config_loader.py,sha256=PTMsZax3CoTrMo6BZlUoI7takII3_DHm3w5xTKgBJpA,8921
@@ -168,9 +168,9 @@ aiecs/utils/prompt_loader.py,sha256=cBS2bZXpYQOWSiOGkhwIzyy3_bETqwIblRi_9qQT9iQ,
168
168
  aiecs/utils/token_usage_repository.py,sha256=1xjenLYwC0YT6lKZFEGO4scRCXLuWdec2MWjzih5SZY,10210
169
169
  aiecs/ws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
170
170
  aiecs/ws/socket_server.py,sha256=j_9idVY_rWlTsF51FgmuhWCWFVt7_gAHL8vNg3IxV5g,1476
171
- aiecs-1.3.3.dist-info/licenses/LICENSE,sha256=_1YRaIS0eZu1pv6xfz245UkU0i1Va2B841hv3OWRwqg,12494
172
- aiecs-1.3.3.dist-info/METADATA,sha256=5AuGrBEyu3n9K-4iQQUQTfdKsuuBucisZ8ip4JOWYQ4,16635
173
- aiecs-1.3.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
174
- aiecs-1.3.3.dist-info/entry_points.txt,sha256=TfLBuwLOfgQqKvnoF1sgTS19-Hgl0aWvCZjIdblIiig,667
175
- aiecs-1.3.3.dist-info/top_level.txt,sha256=22IlUlOqh9Ni3jXlQNMNUqzbW8dcxXPeR_EQ-BJVcV8,6
176
- aiecs-1.3.3.dist-info/RECORD,,
171
+ aiecs-1.3.5.dist-info/licenses/LICENSE,sha256=_1YRaIS0eZu1pv6xfz245UkU0i1Va2B841hv3OWRwqg,12494
172
+ aiecs-1.3.5.dist-info/METADATA,sha256=34_y02-Q5OdrWlquol8HuRW0QOgP6WRrSHM9uCaYuQM,16635
173
+ aiecs-1.3.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
174
+ aiecs-1.3.5.dist-info/entry_points.txt,sha256=TfLBuwLOfgQqKvnoF1sgTS19-Hgl0aWvCZjIdblIiig,667
175
+ aiecs-1.3.5.dist-info/top_level.txt,sha256=22IlUlOqh9Ni3jXlQNMNUqzbW8dcxXPeR_EQ-BJVcV8,6
176
+ aiecs-1.3.5.dist-info/RECORD,,
File without changes