aiqtoolkit 1.2.0rc2__py3-none-any.whl → 1.2.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiqtoolkit might be problematic. Click here for more details.

Files changed (37) hide show
  1. aiq/agent/base.py +8 -7
  2. aiq/agent/react_agent/agent.py +2 -3
  3. aiq/agent/react_agent/register.py +1 -1
  4. aiq/agent/reasoning_agent/reasoning_agent.py +2 -1
  5. aiq/agent/tool_calling_agent/register.py +2 -1
  6. aiq/authentication/api_key/api_key_auth_provider.py +6 -2
  7. aiq/builder/function.py +21 -6
  8. aiq/builder/function_base.py +6 -2
  9. aiq/cli/commands/sizing/calc.py +6 -3
  10. aiq/cli/commands/start.py +0 -5
  11. aiq/cli/commands/uninstall.py +2 -4
  12. aiq/data_models/api_server.py +6 -12
  13. aiq/data_models/component_ref.py +1 -1
  14. aiq/data_models/discovery_metadata.py +62 -13
  15. aiq/front_ends/console/console_front_end_plugin.py +2 -22
  16. aiq/front_ends/simple_base/simple_front_end_plugin_base.py +4 -2
  17. aiq/object_store/in_memory_object_store.py +18 -16
  18. aiq/observability/exporter/processing_exporter.py +99 -46
  19. aiq/observability/exporter/span_exporter.py +1 -0
  20. aiq/observability/processor/batching_processor.py +52 -59
  21. aiq/observability/processor/callback_processor.py +42 -0
  22. aiq/observability/processor/processor.py +4 -1
  23. aiq/profiler/calc/calc_runner.py +5 -1
  24. aiq/profiler/calc/data_models.py +18 -6
  25. aiq/registry_handlers/package_utils.py +397 -28
  26. aiq/runtime/loader.py +23 -2
  27. aiq/tool/code_execution/README.md +0 -1
  28. aiq/tool/server_tools.py +1 -1
  29. aiq/utils/dump_distro_mapping.py +32 -0
  30. aiq/utils/type_converter.py +52 -10
  31. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/METADATA +1 -1
  32. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/RECORD +37 -35
  33. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/WHEEL +0 -0
  34. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/entry_points.txt +0 -0
  35. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
  36. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/licenses/LICENSE.md +0 -0
  37. {aiqtoolkit-1.2.0rc2.dist-info → aiqtoolkit-1.2.0rc4.dist-info}/top_level.txt +0 -0
@@ -20,6 +20,8 @@ import os
20
20
  import subprocess
21
21
  from functools import lru_cache
22
22
 
23
+ from packaging.requirements import Requirement
24
+
23
25
  from aiq.data_models.component import AIQComponentEnum
24
26
  from aiq.data_models.discovery_metadata import DiscoveryMetadata
25
27
  from aiq.registry_handlers.schemas.package import WheelData
@@ -57,6 +59,372 @@ def get_module_name_from_distribution(distro_name: str) -> str | None:
57
59
  return None
58
60
 
59
61
 
62
+ def parse_requirement(requirement: str) -> str:
63
+ """Extract the base package name from a requirement string.
64
+
65
+ This function extracts only the package name, ignoring extras, version specifiers,
66
+ and environment markers.
67
+
68
+ Args:
69
+ requirement (str): A requirement string like 'numpy>=1.20.0' or 'requests[security]~=2.28.0'
70
+
71
+ Returns:
72
+ str: The base package name (e.g., 'numpy' from 'numpy>=1.20.0',
73
+ 'requests' from 'requests[security]~=2.28.0')
74
+ """
75
+ # Handle inline comments by splitting on '#' and taking the first part
76
+ clean_requirement = requirement.split('#')[0].strip()
77
+ if not clean_requirement:
78
+ return ""
79
+
80
+ try:
81
+ parsed = Requirement(clean_requirement)
82
+ return parsed.name.lower()
83
+ except Exception as e:
84
+ logger.warning("Failed to parse requirement '%s': %s. Skipping this dependency.", requirement, e)
85
+ return ""
86
+
87
+
88
+ def resolve_extras_to_packages(package_name: str, extras: list[str]) -> set[str]:
89
+ """Resolve package extras to their actual package dependencies.
90
+
91
+ Args:
92
+ package_name (str): The base package name (e.g., 'aiqtoolkit')
93
+ extras (list[str]): List of extra names (e.g., ['langchain', 'telemetry'])
94
+
95
+ Returns:
96
+ set[str]: Set of additional package names that the extras resolve to
97
+ (e.g., {'aiqtoolkit-langchain', 'aiqtoolkit-opentelemetry', 'aiqtoolkit-phoenix',
98
+ 'aiqtoolkit-weave', 'aiqtoolkit-ragaai'})
99
+ """
100
+ resolved_packages = set()
101
+
102
+ try:
103
+ # Get the distribution metadata for the package
104
+ dist = importlib.metadata.distribution(package_name)
105
+
106
+ # Parse all requirements to find optional dependencies
107
+ requires = dist.requires or []
108
+
109
+ for requirement_str in requires:
110
+ try:
111
+ req = Requirement(requirement_str)
112
+
113
+ # Check if this requirement has a marker that matches our extras
114
+ if req.marker:
115
+ for extra in extras:
116
+ # Try marker evaluation first
117
+ try:
118
+ if req.marker.evaluate({'extra': extra}):
119
+ resolved_packages.add(req.name.lower())
120
+ break
121
+ except Exception:
122
+ # Fallback to simple string check
123
+ marker_str = str(req.marker)
124
+ if f'extra == "{extra}"' in marker_str or f"extra == '{extra}'" in marker_str:
125
+ resolved_packages.add(req.name.lower())
126
+ break
127
+
128
+ except Exception as e:
129
+ logger.warning("Failed to parse requirement '%s' for extras resolution: %s", requirement_str, e)
130
+
131
+ except importlib.metadata.PackageNotFoundError:
132
+ logger.warning("Package '%s' not found for extras resolution", package_name)
133
+ except Exception as e:
134
+ logger.warning("Failed to resolve extras for package '%s': %s", package_name, e)
135
+
136
+ return resolved_packages
137
+
138
+
139
+ def extract_dependencies_with_extras_resolved(pyproject_path: str) -> set[str]:
140
+ """Extract dependency names from pyproject.toml with extras properly resolved.
141
+
142
+ This function not only extracts the base package names but also resolves
143
+ any extras (e.g., package[extra1,extra2]) to their actual package dependencies.
144
+
145
+ Args:
146
+ pyproject_path (str): Path to the pyproject.toml file
147
+
148
+ Returns:
149
+ set[str]: Set of all dependency names including those resolved from extras
150
+
151
+ Example:
152
+ For a dependency like "aiqtoolkit[langchain,telemetry]~=1.2", this will return:
153
+ {'aiqtoolkit', 'aiqtoolkit-langchain', 'aiqtoolkit-opentelemetry', 'aiqtoolkit-phoenix', ...}
154
+
155
+ Raises:
156
+ FileNotFoundError: If the pyproject.toml file doesn't exist
157
+ ValueError: If the file cannot be parsed
158
+ """
159
+ import tomllib
160
+
161
+ if not os.path.exists(pyproject_path):
162
+ raise FileNotFoundError(f"pyproject.toml not found at {pyproject_path}")
163
+
164
+ try:
165
+ with open(pyproject_path, "rb") as f:
166
+ data = tomllib.load(f)
167
+ except Exception as e:
168
+ raise ValueError(f"Failed to parse pyproject.toml: {e}") from e
169
+
170
+ project_data = data.get("project", {})
171
+ all_dependencies = set()
172
+
173
+ def _process_dependency(dep_spec: str):
174
+ """Process a single dependency specification and resolve extras."""
175
+ # Handle inline comments
176
+ clean_req = dep_spec.split('#')[0].strip()
177
+ if not clean_req:
178
+ return
179
+
180
+ try:
181
+ parsed = Requirement(clean_req)
182
+ base_name = parsed.name.lower()
183
+ all_dependencies.add(base_name)
184
+
185
+ # If there are extras, try to resolve them
186
+ if parsed.extras:
187
+ resolved_extras = resolve_extras_to_packages(base_name, list(parsed.extras))
188
+ all_dependencies.update(resolved_extras)
189
+
190
+ except Exception as e:
191
+ logger.warning("Failed to process dependency '%s': %s", dep_spec, e)
192
+
193
+ # Process main dependencies
194
+ for dep_spec in project_data.get("dependencies", []):
195
+ _process_dependency(dep_spec)
196
+
197
+ # Process optional dependencies
198
+ optional_deps = project_data.get("optional-dependencies", {})
199
+ for _group_name, group_deps in optional_deps.items():
200
+ for dep_spec in group_deps:
201
+ _process_dependency(dep_spec)
202
+
203
+ return all_dependencies
204
+
205
+
206
+ @lru_cache
207
+ def get_distributions() -> list[importlib.metadata.Distribution]:
208
+ """Get all installed distributions. This is an expensive operation and should be cached."""
209
+ return list(importlib.metadata.distributions())
210
+
211
+
212
+ def find_distribution_name(name: str) -> str | None:
213
+ """Try to find the correct distribution name for a given package name.
214
+
215
+ Uses dynamic discovery through importlib.metadata to find distributions
216
+ that provide the requested module/package name.
217
+
218
+ Args:
219
+ name (str): Package name to search for.
220
+
221
+ Returns:
222
+ str | None: The correct distribution name if found, None otherwise.
223
+ """
224
+ # First try the name as-is
225
+ try:
226
+ importlib.metadata.distribution(name)
227
+ return name
228
+ except importlib.metadata.PackageNotFoundError:
229
+ pass
230
+
231
+ # Try common case variations
232
+ variations = [
233
+ name.lower(),
234
+ name.upper(),
235
+ name.replace('-', '_'),
236
+ name.replace('_', '-'),
237
+ ]
238
+
239
+ # Try each variation
240
+ for variation in variations:
241
+ if variation != name: # Skip the original name we already tried
242
+ try:
243
+ importlib.metadata.distribution(variation)
244
+ return variation
245
+ except importlib.metadata.PackageNotFoundError:
246
+ continue
247
+
248
+ # Search through all installed distributions to find one that provides this module
249
+ try:
250
+ for dist in get_distributions():
251
+ dist_name = dist.metadata['Name']
252
+
253
+ # Check top-level packages provided by this distribution
254
+ try:
255
+ # Try to get top-level packages from metadata
256
+ top_level_txt = dist.read_text('top_level.txt')
257
+ if top_level_txt:
258
+ top_level_packages = set(top_level_txt.strip().split('\n'))
259
+ if name in top_level_packages:
260
+ return dist_name
261
+ except (FileNotFoundError, AttributeError):
262
+ # top_level.txt doesn't exist, try alternative method
263
+ pass
264
+
265
+ # Fallback: check file paths for top-level modules
266
+ try:
267
+ if hasattr(dist, 'files') and dist.files:
268
+ top_level_from_files = {
269
+ f.parts[0]
270
+ for f in dist.files if len(f.parts) > 0 and not f.parts[0].endswith('.dist-info')
271
+ }
272
+ if name in top_level_from_files:
273
+ return dist_name
274
+ except Exception:
275
+ # Some distributions might not have files info or it might be inaccessible
276
+ continue
277
+
278
+ except Exception as e:
279
+ logger.debug("Error searching distributions for %s: %s", name, e)
280
+
281
+ return None
282
+
283
+
284
+ def get_transitive_dependencies(distribution_names: list[str]) -> dict[str, set[str]]:
285
+ """Get transitive dependencies from a list of Python distribution names.
286
+
287
+ This function recursively resolves all dependencies for the given distribution names,
288
+ returning a mapping of each package to its complete set of transitive dependencies.
289
+ This is useful when publishing plugins to remote registries that contain with nested dependencies,
290
+ ensuring that all dependencies are included in the AIQArtifact's metadata.
291
+
292
+ Args:
293
+ distribution_names (list[str]): List of Python distribution names (package names) to analyze.
294
+
295
+ Returns:
296
+ dict[str, set[str]]: Dictionary mapping each distribution name to its set of transitive dependencies.
297
+ The dependencies include both direct and indirect dependencies.
298
+ """
299
+ result: dict[str, set[str]] = {}
300
+ processing: set[str] = set() # Track packages currently being processed (cycle detection)
301
+ completed: set[str] = set() # Track packages that have been fully processed
302
+
303
+ def _get_dependencies_recursive(dist_name: str, path: set[str]) -> set[str]:
304
+ """Recursively get all dependencies for a distribution.
305
+
306
+ Args:
307
+ dist_name: The distribution name to process
308
+ path: Set of packages in the current dependency path (for cycle detection)
309
+ """
310
+ # If we've already computed this package's dependencies, return them
311
+ if dist_name in completed:
312
+ return result.get(dist_name, set())
313
+
314
+ # If we encounter this package in the current path, we have a cycle
315
+ if dist_name in path:
316
+ logger.debug("Cycle detected in dependency chain: %s", " -> ".join(list(path) + [dist_name]))
317
+ return set()
318
+
319
+ # If we're currently processing this package in another branch, return empty
320
+ # to avoid duplicate work (we'll get the full result when that branch completes)
321
+ if dist_name in processing:
322
+ return set()
323
+
324
+ processing.add(dist_name)
325
+ new_path = path | {dist_name}
326
+ dependencies = set()
327
+
328
+ try:
329
+ dist = importlib.metadata.distribution(dist_name)
330
+ requires = dist.requires or []
331
+
332
+ for requirement in requires:
333
+ # Skip requirements with extra markers (optional dependencies)
334
+ # These should only be included if the extra is explicitly requested
335
+ if 'extra ==' in requirement:
336
+ continue
337
+
338
+ # Parse the requirement to get the package name
339
+ dep_name = parse_requirement(requirement)
340
+
341
+ # Skip self-references and empty names
342
+ if not dep_name or dep_name == dist_name.lower():
343
+ continue
344
+
345
+ dependencies.add(dep_name)
346
+
347
+ # Recursively get dependencies of this dependency
348
+ try:
349
+ transitive_deps = _get_dependencies_recursive(dep_name, new_path)
350
+ dependencies.update(transitive_deps)
351
+ except importlib.metadata.PackageNotFoundError:
352
+ # Check if this is likely a conditional dependency (has markers)
353
+ is_conditional = any(marker in requirement for marker in [
354
+ 'python_version', 'sys_platform', 'platform_system', 'platform_machine', 'implementation_name',
355
+ 'implementation_version'
356
+ ])
357
+
358
+ if is_conditional:
359
+ # This is expected - conditional dependencies aren't always installed
360
+ logger.debug("Conditional dependency %s of %s is not installed: %s",
361
+ dep_name,
362
+ dist_name,
363
+ requirement)
364
+ else:
365
+ # This might be a real issue - a non-conditional dependency is missing
366
+ logger.warning("Dependency %s of %s is not installed", dep_name, dist_name)
367
+ continue
368
+
369
+ except importlib.metadata.PackageNotFoundError:
370
+ # Transitive dependencies that aren't found are usually conditional (platform/version specific)
371
+ # and this is expected behavior
372
+ logger.debug("Distribution %s not found (likely conditional dependency)", dist_name)
373
+ # Don't raise - just return empty dependencies for missing distributions
374
+ finally:
375
+ processing.remove(dist_name)
376
+
377
+ result[dist_name] = dependencies
378
+ completed.add(dist_name)
379
+ return dependencies
380
+
381
+ # Process each distribution name
382
+ for dist_name in distribution_names:
383
+ if dist_name not in completed:
384
+ try:
385
+ _get_dependencies_recursive(dist_name.lower(), set())
386
+ except importlib.metadata.PackageNotFoundError:
387
+ # Try to find the correct distribution name
388
+ correct_name = find_distribution_name(dist_name)
389
+ if correct_name:
390
+ logger.debug("Found distribution '%s' for requested name '%s'", correct_name, dist_name)
391
+ try:
392
+ _get_dependencies_recursive(correct_name.lower(), set())
393
+ # Map the original name to the results of the correct name
394
+ if correct_name.lower() in result:
395
+ result[dist_name] = result[correct_name.lower()]
396
+ continue
397
+ except importlib.metadata.PackageNotFoundError:
398
+ pass
399
+
400
+ logger.error("Distribution %s not found (tried common variations)", dist_name)
401
+ result[dist_name] = set()
402
+
403
+ return result
404
+
405
+
406
+ def get_all_transitive_dependencies(distribution_names: list[str]) -> set[str]:
407
+ """Get all unique transitive dependencies from a list of Python distribution names.
408
+
409
+ Returns a flattened set of all unique dependencies across all the provided distribution names.
410
+ This is useful when publishing plugins to remote registries that contain with nested dependencies,
411
+ ensuring that all dependencies are included in the AIQArtifact's metadata.
412
+
413
+ Args:
414
+ distribution_names: List of Python distribution names (package names) to analyze
415
+
416
+ Returns:
417
+ set[str]: Set of all unique transitive dependency names
418
+ """
419
+ deps_map = get_transitive_dependencies(distribution_names)
420
+ all_deps = set()
421
+
422
+ for deps in deps_map.values():
423
+ all_deps.update(deps)
424
+
425
+ return all_deps
426
+
427
+
60
428
  def build_wheel(package_root: str) -> WheelData:
61
429
  """Builds a Python .whl for the specified package and saves to disk, sets self._whl_path, and returned as bytes.
62
430
 
@@ -67,8 +435,6 @@ def build_wheel(package_root: str) -> WheelData:
67
435
  WheelData: Data model containing a built python wheel and its corresponding metadata.
68
436
  """
69
437
 
70
- import importlib.util
71
- import re
72
438
  import tomllib
73
439
 
74
440
  from pkginfo import Wheel
@@ -83,22 +449,27 @@ def build_wheel(package_root: str) -> WheelData:
83
449
 
84
450
  toml_project: dict = data.get("project", {})
85
451
  toml_project_name = toml_project.get("name", None)
86
-
87
- assert toml_project_name is not None, f"Package name '{toml_project_name}' not found in pyproject.toml"
88
- # replace "aiqtoolkit" substring with "aiq" to get the import name
89
- module_name = get_module_name_from_distribution(toml_project_name)
90
- assert module_name is not None, f"No modules found for package name '{toml_project_name}'"
91
-
92
- assert importlib.util.find_spec(module_name) is not None, (f"Package {module_name} not "
93
- "installed, cannot discover components.")
94
-
95
452
  toml_packages = set(i for i in data.get("project", {}).get("entry-points", {}).get("aiq.plugins", {}))
96
- toml_dependencies = set(
97
- re.search(r"[a-zA-Z][a-zA-Z\d_-]*", package_name).group(0)
98
- for package_name in toml_project.get("dependencies", []))
99
453
 
454
+ # Extract dependencies using the robust requirement parser with extras resolution
455
+ try:
456
+ toml_dependencies = extract_dependencies_with_extras_resolved(pyproject_toml_path)
457
+ logger.debug("Extracted dependencies with extras resolved: %s", toml_dependencies)
458
+ except Exception as e:
459
+ logger.warning("Failed to extract dependencies with extras resolution, falling back to basic extraction: %s", e)
460
+ # Fallback to basic extraction
461
+ toml_dependencies = set()
462
+ for dep_spec in toml_project.get("dependencies", []):
463
+ try:
464
+ dep_name = parse_requirement(dep_spec)
465
+ if dep_name:
466
+ toml_dependencies.add(dep_name)
467
+ except Exception as e:
468
+ logger.warning("Failed to parse dependency '%s': %s", dep_spec, e)
469
+
470
+ toml_dependencies_transitive = get_all_transitive_dependencies(list(toml_dependencies))
100
471
  union_dependencies = toml_dependencies.union(toml_packages)
101
- union_dependencies.add(toml_project_name)
472
+ union_dependencies.update(toml_dependencies_transitive)
102
473
 
103
474
  working_dir = os.getcwd()
104
475
  os.chdir(package_root)
@@ -119,16 +490,15 @@ def build_wheel(package_root: str) -> WheelData:
119
490
 
120
491
  whl_version = Wheel(whl_path).version
121
492
 
122
- return WheelData(
123
- package_root=package_root,
124
- package_name=module_name, # should it be module name or distro name here
125
- toml_project=toml_project,
126
- toml_dependencies=toml_dependencies,
127
- toml_aiq_packages=toml_packages,
128
- union_dependencies=union_dependencies,
129
- whl_path=whl_path,
130
- whl_base64=whl_base64,
131
- whl_version=whl_version)
493
+ return WheelData(package_root=package_root,
494
+ package_name=toml_project_name,
495
+ toml_project=toml_project,
496
+ toml_dependencies=toml_dependencies,
497
+ toml_aiq_packages=toml_packages,
498
+ union_dependencies=union_dependencies,
499
+ whl_path=whl_path,
500
+ whl_base64=whl_base64,
501
+ whl_version=whl_version)
132
502
 
133
503
 
134
504
  def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnum, list[dict | DiscoveryMetadata]]:
@@ -155,7 +525,7 @@ def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnu
155
525
  if (wheel_data is not None):
156
526
  registry.register_package(package_name=wheel_data.package_name, package_version=wheel_data.whl_version)
157
527
  for entry_point in aiq_plugins:
158
- package_name = entry_point.module.split('.')[0]
528
+ package_name = entry_point.dist.name
159
529
  if (package_name == wheel_data.package_name):
160
530
  continue
161
531
  if (package_name in wheel_data.union_dependencies):
@@ -163,8 +533,7 @@ def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnu
163
533
 
164
534
  else:
165
535
  for entry_point in aiq_plugins:
166
- package_name = entry_point.module.split('.')[0]
167
- registry.register_package(package_name=package_name)
536
+ registry.register_package(package_name=entry_point.dist.name)
168
537
 
169
538
  discovery_metadata = {}
170
539
  for component_type in AIQComponentEnum:
aiq/runtime/loader.py CHANGED
@@ -21,6 +21,7 @@ import time
21
21
  from contextlib import asynccontextmanager
22
22
  from enum import IntFlag
23
23
  from enum import auto
24
+ from functools import lru_cache
24
25
  from functools import reduce
25
26
 
26
27
  from aiq.builder.workflow_builder import WorkflowBuilder
@@ -116,6 +117,7 @@ async def load_workflow(config_file: StrPath, max_concurrency: int = -1):
116
117
  yield AIQSessionManager(workflow.build(), max_concurrency=max_concurrency)
117
118
 
118
119
 
120
+ @lru_cache
119
121
  def discover_entrypoints(plugin_type: PluginTypes):
120
122
  """
121
123
  Discover all the requested plugin types which were registered via an entry point group and return them.
@@ -143,6 +145,25 @@ def discover_entrypoints(plugin_type: PluginTypes):
143
145
  return aiq_plugins
144
146
 
145
147
 
148
+ @lru_cache
149
+ def get_all_aiq_entrypoints_distro_mapping() -> dict[str, str]:
150
+ """
151
+ Get the mapping of all AIQ entry points to their distribution names.
152
+ """
153
+
154
+ mapping = {}
155
+ aiq_entrypoints = discover_entrypoints(PluginTypes.ALL)
156
+ for ep in aiq_entrypoints:
157
+ ep_module_parts = ep.module.split(".")
158
+ current_parts = []
159
+ for part in ep_module_parts:
160
+ current_parts.append(part)
161
+ module_prefix = ".".join(current_parts)
162
+ mapping[module_prefix] = ep.dist.name
163
+
164
+ return mapping
165
+
166
+
146
167
  def discover_and_register_plugins(plugin_type: PluginTypes):
147
168
  """
148
169
  Discover all the requested plugin types which were registered via an entry point group and register them into the
@@ -175,8 +196,8 @@ def discover_and_register_plugins(plugin_type: PluginTypes):
175
196
  # Log a warning if the plugin took a long time to load. This can be useful for debugging slow imports.
176
197
  # The threshold is 300 ms if no plugins have been loaded yet, and 100 ms otherwise. Triple the threshold
177
198
  # if a debugger is attached.
178
- if (elapsed_time > (300.0 if count == 0 else 100.0) * (3 if is_debugger_attached() else 1)):
179
- logger.warning(
199
+ if (elapsed_time > (300.0 if count == 0 else 150.0) * (3 if is_debugger_attached() else 1)):
200
+ logger.debug(
180
201
  "Loading module '%s' from entry point '%s' took a long time (%f ms). "
181
202
  "Ensure all imports are inside your registered functions.",
182
203
  entry_point.module,
@@ -23,7 +23,6 @@ A secure, containerized Python code execution environment that allows safe execu
23
23
 
24
24
  The Code Execution Sandbox provides:
25
25
  - **Secure code execution** in isolated Docker containers
26
- - **Comprehensive error handling** with detailed stdout/stderr capture
27
26
  - **Multiple input formats** including raw code, dictionary format, and markdown
28
27
  - **Dependency management** with pre-installed libraries
29
28
  - **Flexible configuration** with customizable timeouts and output limits
aiq/tool/server_tools.py CHANGED
@@ -63,4 +63,4 @@ async def current_request_attributes(config: RequestAttributesTool, builder: Bui
63
63
  f"Conversation Id: {conversation_id}")
64
64
 
65
65
  yield FunctionInfo.from_fn(_get_request_attributes,
66
- description="Returns the acquired user defined request attriubutes.")
66
+ description="Returns the acquired user defined request attributes.")
@@ -0,0 +1,32 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import argparse
17
+ import json
18
+
19
+ from aiq.runtime.loader import get_all_aiq_entrypoints_distro_mapping
20
+
21
+
22
+ def dump_distro_mapping(path: str):
23
+ mapping = get_all_aiq_entrypoints_distro_mapping()
24
+ with open(path, "w", encoding="utf-8") as f:
25
+ json.dump(mapping, f, indent=4)
26
+
27
+
28
+ if __name__ == "__main__":
29
+ parser = argparse.ArgumentParser()
30
+ parser.add_argument("--path", type=str, required=True)
31
+ args = parser.parse_args()
32
+ dump_distro_mapping(args.path)