aiqtoolkit 1.2.0rc3__py3-none-any.whl → 1.2.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiqtoolkit might be problematic. Click here for more details.

@@ -28,9 +28,13 @@ logger = logging.getLogger(__name__)
28
28
 
29
29
  class APIKeyAuthProvider(AuthProviderBase[APIKeyAuthProviderConfig]):
30
30
 
31
- def __init__(self, config: APIKeyAuthProviderConfig, config_name: str | None = None) -> None:
32
- assert isinstance(config, APIKeyAuthProviderConfig), ("Config is not APIKeyConfig")
31
+ # fmt: off
32
+ def __init__(self,
33
+ config: APIKeyAuthProviderConfig,
34
+ config_name: str | None = None) -> None: # pylint: disable=unused-argument
35
+ assert isinstance(config, APIKeyAuthProviderConfig), ("Config is not APIKeyAuthProviderConfig")
33
36
  super().__init__(config)
37
+ # fmt: on
34
38
 
35
39
  async def _construct_authentication_header(self) -> BearerTokenCred:
36
40
  """
@@ -130,7 +130,7 @@ class ObjectStoreRef(ComponentRef):
130
130
  """
131
131
 
132
132
  @property
133
- @typing.override
133
+ @override
134
134
  def component_group(self):
135
135
  return ComponentGroup.OBJECT_STORES
136
136
 
@@ -21,6 +21,7 @@ import typing
21
21
  from enum import Enum
22
22
  from functools import lru_cache
23
23
  from pathlib import Path
24
+ from types import ModuleType
24
25
  from typing import TYPE_CHECKING
25
26
 
26
27
  from pydantic import BaseModel
@@ -115,6 +116,55 @@ class DiscoveryMetadata(BaseModel):
115
116
  return data.get(root_package, None)
116
117
  return None
117
118
 
119
+ @staticmethod
120
+ @lru_cache
121
+ def get_distribution_name_from_module(module: ModuleType | None) -> str:
122
+ """Get the distribution name from the config type using the mapping of module names to distro names.
123
+
124
+ Args:
125
+ module (ModuleType): A registered component's module.
126
+
127
+ Returns:
128
+ str: The distribution name of the AIQ Toolkit component.
129
+ """
130
+ from aiq.runtime.loader import get_all_aiq_entrypoints_distro_mapping
131
+
132
+ if module is None:
133
+ return "aiqtoolkit"
134
+
135
+ # Get the mapping of module names to distro names
136
+ mapping = get_all_aiq_entrypoints_distro_mapping()
137
+ module_package = module.__package__
138
+
139
+ if module_package is None:
140
+ return "aiqtoolkit"
141
+
142
+ # Traverse the module package parts in reverse order to find the distro name
143
+ # This is because the module package is the root package for the AIQ Toolkit component
144
+ # and the distro name is the name of the package that contains the component
145
+ module_package_parts = module_package.split(".")
146
+ for part_idx in range(len(module_package_parts), 0, -1):
147
+ candidate_module_name = ".".join(module_package_parts[0:part_idx])
148
+ candidate_distro_name = mapping.get(candidate_module_name, None)
149
+ if candidate_distro_name is not None:
150
+ return candidate_distro_name
151
+
152
+ return "aiqtoolkit"
153
+
154
+ @staticmethod
155
+ @lru_cache
156
+ def get_distribution_name_from_config_type(config_type: type["TypedBaseModelT"]) -> str:
157
+ """Get the distribution name from the config type using the mapping of module names to distro names.
158
+
159
+ Args:
160
+ config_type (type[TypedBaseModelT]): A registered component's configuration object.
161
+
162
+ Returns:
163
+ str: The distribution name of the AIQ Toolkit component.
164
+ """
165
+ module = inspect.getmodule(config_type)
166
+ return DiscoveryMetadata.get_distribution_name_from_module(module)
167
+
118
168
  @staticmethod
119
169
  @lru_cache
120
170
  def get_distribution_name(root_package: str) -> str:
@@ -123,6 +173,7 @@ class DiscoveryMetadata(BaseModel):
123
173
  root package name 'aiq'. They provide mapping in a metadata file
124
174
  for optimized installation.
125
175
  """
176
+
126
177
  distro_name = DiscoveryMetadata.get_distribution_name_from_private_data(root_package)
127
178
  return distro_name if distro_name else root_package
128
179
 
@@ -142,8 +193,7 @@ class DiscoveryMetadata(BaseModel):
142
193
 
143
194
  try:
144
195
  module = inspect.getmodule(config_type)
145
- root_package: str = module.__package__.split(".")[0]
146
- distro_name = DiscoveryMetadata.get_distribution_name(root_package)
196
+ distro_name = DiscoveryMetadata.get_distribution_name_from_config_type(config_type)
147
197
 
148
198
  if not distro_name:
149
199
  # raise an exception
@@ -187,12 +237,13 @@ class DiscoveryMetadata(BaseModel):
187
237
 
188
238
  try:
189
239
  module = inspect.getmodule(fn)
190
- root_package: str = module.__package__.split(".")[0]
191
- root_package = DiscoveryMetadata.get_distribution_name(root_package)
240
+ distro_name = DiscoveryMetadata.get_distribution_name_from_module(module)
241
+
192
242
  try:
193
- version = importlib.metadata.version(root_package) if root_package != "" else ""
243
+ # version = importlib.metadata.version(root_package) if root_package != "" else ""
244
+ version = importlib.metadata.version(distro_name) if distro_name != "" else ""
194
245
  except importlib.metadata.PackageNotFoundError:
195
- logger.warning("Package metadata not found for %s", root_package)
246
+ logger.warning("Package metadata not found for %s", distro_name)
196
247
  version = ""
197
248
  except Exception as e:
198
249
  logger.exception("Encountered issue extracting module metadata for %s: %s", fn, e, exc_info=True)
@@ -201,7 +252,7 @@ class DiscoveryMetadata(BaseModel):
201
252
  if isinstance(wrapper_type, LLMFrameworkEnum):
202
253
  wrapper_type = wrapper_type.value
203
254
 
204
- return DiscoveryMetadata(package=root_package,
255
+ return DiscoveryMetadata(package=distro_name,
205
256
  version=version,
206
257
  component_type=component_type,
207
258
  component_name=wrapper_type,
@@ -220,7 +271,6 @@ class DiscoveryMetadata(BaseModel):
220
271
  """
221
272
 
222
273
  try:
223
- package_name = DiscoveryMetadata.get_distribution_name(package_name)
224
274
  try:
225
275
  metadata = importlib.metadata.metadata(package_name)
226
276
  description = metadata.get("Summary", "")
@@ -263,12 +313,11 @@ class DiscoveryMetadata(BaseModel):
263
313
 
264
314
  try:
265
315
  module = inspect.getmodule(config_type)
266
- root_package: str = module.__package__.split(".")[0]
267
- root_package = DiscoveryMetadata.get_distribution_name(root_package)
316
+ distro_name = DiscoveryMetadata.get_distribution_name_from_module(module)
268
317
  try:
269
- version = importlib.metadata.version(root_package) if root_package != "" else ""
318
+ version = importlib.metadata.version(distro_name) if distro_name != "" else ""
270
319
  except importlib.metadata.PackageNotFoundError:
271
- logger.warning("Package metadata not found for %s", root_package)
320
+ logger.warning("Package metadata not found for %s", distro_name)
272
321
  version = ""
273
322
  except Exception as e:
274
323
  logger.exception("Encountered issue extracting module metadata for %s: %s", config_type, e, exc_info=True)
@@ -279,7 +328,7 @@ class DiscoveryMetadata(BaseModel):
279
328
 
280
329
  description = generate_config_type_docs(config_type=config_type)
281
330
 
282
- return DiscoveryMetadata(package=root_package,
331
+ return DiscoveryMetadata(package=distro_name,
283
332
  version=version,
284
333
  component_type=component_type,
285
334
  component_name=component_name,
@@ -13,6 +13,8 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import asyncio
17
+
16
18
  from aiq.builder.builder import Builder
17
19
  from aiq.cli.register_workflow import register_object_store
18
20
  from aiq.data_models.object_store import KeyAlreadyExistsError
@@ -37,37 +39,37 @@ class InMemoryObjectStore(ObjectStore):
37
39
  """
38
40
 
39
41
  def __init__(self) -> None:
42
+ self._lock = asyncio.Lock()
40
43
  self._store: dict[str, ObjectStoreItem] = {}
41
44
 
42
45
  @override
43
46
  async def put_object(self, key: str, item: ObjectStoreItem) -> None:
44
- if key in self._store:
45
- raise KeyAlreadyExistsError(key)
46
-
47
- self._store[key] = item
48
- return
47
+ async with self._lock:
48
+ if key in self._store:
49
+ raise KeyAlreadyExistsError(key)
50
+ self._store[key] = item
49
51
 
50
52
  @override
51
53
  async def upsert_object(self, key: str, item: ObjectStoreItem) -> None:
52
- self._store[key] = item
53
- return
54
+ async with self._lock:
55
+ self._store[key] = item
54
56
 
55
57
  @override
56
58
  async def get_object(self, key: str) -> ObjectStoreItem:
57
-
58
- if key not in self._store:
59
- raise NoSuchKeyError(key)
60
-
61
- return self._store[key]
59
+ async with self._lock:
60
+ value = self._store.get(key)
61
+ if value is None:
62
+ raise NoSuchKeyError(key)
63
+ return value
62
64
 
63
65
  @override
64
66
  async def delete_object(self, key: str) -> None:
65
- if key not in self._store:
67
+ try:
68
+ async with self._lock:
69
+ self._store.pop(key)
70
+ except KeyError:
66
71
  raise NoSuchKeyError(key)
67
72
 
68
- self._store.pop(key)
69
- return
70
-
71
73
 
72
74
  @register_object_store(config_type=InMemoryObjectStoreConfig)
73
75
  async def in_memory_object_store(config: InMemoryObjectStoreConfig, builder: Builder):
@@ -20,6 +20,8 @@ import os
20
20
  import subprocess
21
21
  from functools import lru_cache
22
22
 
23
+ from packaging.requirements import Requirement
24
+
23
25
  from aiq.data_models.component import AIQComponentEnum
24
26
  from aiq.data_models.discovery_metadata import DiscoveryMetadata
25
27
  from aiq.registry_handlers.schemas.package import WheelData
@@ -57,6 +59,372 @@ def get_module_name_from_distribution(distro_name: str) -> str | None:
57
59
  return None
58
60
 
59
61
 
62
+ def parse_requirement(requirement: str) -> str:
63
+ """Extract the base package name from a requirement string.
64
+
65
+ This function extracts only the package name, ignoring extras, version specifiers,
66
+ and environment markers.
67
+
68
+ Args:
69
+ requirement (str): A requirement string like 'numpy>=1.20.0' or 'requests[security]~=2.28.0'
70
+
71
+ Returns:
72
+ str: The base package name (e.g., 'numpy' from 'numpy>=1.20.0',
73
+ 'requests' from 'requests[security]~=2.28.0')
74
+ """
75
+ # Handle inline comments by splitting on '#' and taking the first part
76
+ clean_requirement = requirement.split('#')[0].strip()
77
+ if not clean_requirement:
78
+ return ""
79
+
80
+ try:
81
+ parsed = Requirement(clean_requirement)
82
+ return parsed.name.lower()
83
+ except Exception as e:
84
+ logger.warning("Failed to parse requirement '%s': %s. Skipping this dependency.", requirement, e)
85
+ return ""
86
+
87
+
88
+ def resolve_extras_to_packages(package_name: str, extras: list[str]) -> set[str]:
89
+ """Resolve package extras to their actual package dependencies.
90
+
91
+ Args:
92
+ package_name (str): The base package name (e.g., 'aiqtoolkit')
93
+ extras (list[str]): List of extra names (e.g., ['langchain', 'telemetry'])
94
+
95
+ Returns:
96
+ set[str]: Set of additional package names that the extras resolve to
97
+ (e.g., {'aiqtoolkit-langchain', 'aiqtoolkit-opentelemetry', 'aiqtoolkit-phoenix',
98
+ 'aiqtoolkit-weave', 'aiqtoolkit-ragaai'})
99
+ """
100
+ resolved_packages = set()
101
+
102
+ try:
103
+ # Get the distribution metadata for the package
104
+ dist = importlib.metadata.distribution(package_name)
105
+
106
+ # Parse all requirements to find optional dependencies
107
+ requires = dist.requires or []
108
+
109
+ for requirement_str in requires:
110
+ try:
111
+ req = Requirement(requirement_str)
112
+
113
+ # Check if this requirement has a marker that matches our extras
114
+ if req.marker:
115
+ for extra in extras:
116
+ # Try marker evaluation first
117
+ try:
118
+ if req.marker.evaluate({'extra': extra}):
119
+ resolved_packages.add(req.name.lower())
120
+ break
121
+ except Exception:
122
+ # Fallback to simple string check
123
+ marker_str = str(req.marker)
124
+ if f'extra == "{extra}"' in marker_str or f"extra == '{extra}'" in marker_str:
125
+ resolved_packages.add(req.name.lower())
126
+ break
127
+
128
+ except Exception as e:
129
+ logger.warning("Failed to parse requirement '%s' for extras resolution: %s", requirement_str, e)
130
+
131
+ except importlib.metadata.PackageNotFoundError:
132
+ logger.warning("Package '%s' not found for extras resolution", package_name)
133
+ except Exception as e:
134
+ logger.warning("Failed to resolve extras for package '%s': %s", package_name, e)
135
+
136
+ return resolved_packages
137
+
138
+
139
+ def extract_dependencies_with_extras_resolved(pyproject_path: str) -> set[str]:
140
+ """Extract dependency names from pyproject.toml with extras properly resolved.
141
+
142
+ This function not only extracts the base package names but also resolves
143
+ any extras (e.g., package[extra1,extra2]) to their actual package dependencies.
144
+
145
+ Args:
146
+ pyproject_path (str): Path to the pyproject.toml file
147
+
148
+ Returns:
149
+ set[str]: Set of all dependency names including those resolved from extras
150
+
151
+ Example:
152
+ For a dependency like "aiqtoolkit[langchain,telemetry]~=1.2", this will return:
153
+ {'aiqtoolkit', 'aiqtoolkit-langchain', 'aiqtoolkit-opentelemetry', 'aiqtoolkit-phoenix', ...}
154
+
155
+ Raises:
156
+ FileNotFoundError: If the pyproject.toml file doesn't exist
157
+ ValueError: If the file cannot be parsed
158
+ """
159
+ import tomllib
160
+
161
+ if not os.path.exists(pyproject_path):
162
+ raise FileNotFoundError(f"pyproject.toml not found at {pyproject_path}")
163
+
164
+ try:
165
+ with open(pyproject_path, "rb") as f:
166
+ data = tomllib.load(f)
167
+ except Exception as e:
168
+ raise ValueError(f"Failed to parse pyproject.toml: {e}") from e
169
+
170
+ project_data = data.get("project", {})
171
+ all_dependencies = set()
172
+
173
+ def _process_dependency(dep_spec: str):
174
+ """Process a single dependency specification and resolve extras."""
175
+ # Handle inline comments
176
+ clean_req = dep_spec.split('#')[0].strip()
177
+ if not clean_req:
178
+ return
179
+
180
+ try:
181
+ parsed = Requirement(clean_req)
182
+ base_name = parsed.name.lower()
183
+ all_dependencies.add(base_name)
184
+
185
+ # If there are extras, try to resolve them
186
+ if parsed.extras:
187
+ resolved_extras = resolve_extras_to_packages(base_name, list(parsed.extras))
188
+ all_dependencies.update(resolved_extras)
189
+
190
+ except Exception as e:
191
+ logger.warning("Failed to process dependency '%s': %s", dep_spec, e)
192
+
193
+ # Process main dependencies
194
+ for dep_spec in project_data.get("dependencies", []):
195
+ _process_dependency(dep_spec)
196
+
197
+ # Process optional dependencies
198
+ optional_deps = project_data.get("optional-dependencies", {})
199
+ for _group_name, group_deps in optional_deps.items():
200
+ for dep_spec in group_deps:
201
+ _process_dependency(dep_spec)
202
+
203
+ return all_dependencies
204
+
205
+
206
+ @lru_cache
207
+ def get_distributions() -> list[importlib.metadata.Distribution]:
208
+ """Get all installed distributions. This is an expensive operation and should be cached."""
209
+ return list(importlib.metadata.distributions())
210
+
211
+
212
+ def find_distribution_name(name: str) -> str | None:
213
+ """Try to find the correct distribution name for a given package name.
214
+
215
+ Uses dynamic discovery through importlib.metadata to find distributions
216
+ that provide the requested module/package name.
217
+
218
+ Args:
219
+ name (str): Package name to search for.
220
+
221
+ Returns:
222
+ str | None: The correct distribution name if found, None otherwise.
223
+ """
224
+ # First try the name as-is
225
+ try:
226
+ importlib.metadata.distribution(name)
227
+ return name
228
+ except importlib.metadata.PackageNotFoundError:
229
+ pass
230
+
231
+ # Try common case variations
232
+ variations = [
233
+ name.lower(),
234
+ name.upper(),
235
+ name.replace('-', '_'),
236
+ name.replace('_', '-'),
237
+ ]
238
+
239
+ # Try each variation
240
+ for variation in variations:
241
+ if variation != name: # Skip the original name we already tried
242
+ try:
243
+ importlib.metadata.distribution(variation)
244
+ return variation
245
+ except importlib.metadata.PackageNotFoundError:
246
+ continue
247
+
248
+ # Search through all installed distributions to find one that provides this module
249
+ try:
250
+ for dist in get_distributions():
251
+ dist_name = dist.metadata['Name']
252
+
253
+ # Check top-level packages provided by this distribution
254
+ try:
255
+ # Try to get top-level packages from metadata
256
+ top_level_txt = dist.read_text('top_level.txt')
257
+ if top_level_txt:
258
+ top_level_packages = set(top_level_txt.strip().split('\n'))
259
+ if name in top_level_packages:
260
+ return dist_name
261
+ except (FileNotFoundError, AttributeError):
262
+ # top_level.txt doesn't exist, try alternative method
263
+ pass
264
+
265
+ # Fallback: check file paths for top-level modules
266
+ try:
267
+ if hasattr(dist, 'files') and dist.files:
268
+ top_level_from_files = {
269
+ f.parts[0]
270
+ for f in dist.files if len(f.parts) > 0 and not f.parts[0].endswith('.dist-info')
271
+ }
272
+ if name in top_level_from_files:
273
+ return dist_name
274
+ except Exception:
275
+ # Some distributions might not have files info or it might be inaccessible
276
+ continue
277
+
278
+ except Exception as e:
279
+ logger.debug("Error searching distributions for %s: %s", name, e)
280
+
281
+ return None
282
+
283
+
284
+ def get_transitive_dependencies(distribution_names: list[str]) -> dict[str, set[str]]:
285
+ """Get transitive dependencies from a list of Python distribution names.
286
+
287
+ This function recursively resolves all dependencies for the given distribution names,
288
+ returning a mapping of each package to its complete set of transitive dependencies.
289
+ This is useful when publishing plugins to remote registries that contain with nested dependencies,
290
+ ensuring that all dependencies are included in the AIQArtifact's metadata.
291
+
292
+ Args:
293
+ distribution_names (list[str]): List of Python distribution names (package names) to analyze.
294
+
295
+ Returns:
296
+ dict[str, set[str]]: Dictionary mapping each distribution name to its set of transitive dependencies.
297
+ The dependencies include both direct and indirect dependencies.
298
+ """
299
+ result: dict[str, set[str]] = {}
300
+ processing: set[str] = set() # Track packages currently being processed (cycle detection)
301
+ completed: set[str] = set() # Track packages that have been fully processed
302
+
303
+ def _get_dependencies_recursive(dist_name: str, path: set[str]) -> set[str]:
304
+ """Recursively get all dependencies for a distribution.
305
+
306
+ Args:
307
+ dist_name: The distribution name to process
308
+ path: Set of packages in the current dependency path (for cycle detection)
309
+ """
310
+ # If we've already computed this package's dependencies, return them
311
+ if dist_name in completed:
312
+ return result.get(dist_name, set())
313
+
314
+ # If we encounter this package in the current path, we have a cycle
315
+ if dist_name in path:
316
+ logger.debug("Cycle detected in dependency chain: %s", " -> ".join(list(path) + [dist_name]))
317
+ return set()
318
+
319
+ # If we're currently processing this package in another branch, return empty
320
+ # to avoid duplicate work (we'll get the full result when that branch completes)
321
+ if dist_name in processing:
322
+ return set()
323
+
324
+ processing.add(dist_name)
325
+ new_path = path | {dist_name}
326
+ dependencies = set()
327
+
328
+ try:
329
+ dist = importlib.metadata.distribution(dist_name)
330
+ requires = dist.requires or []
331
+
332
+ for requirement in requires:
333
+ # Skip requirements with extra markers (optional dependencies)
334
+ # These should only be included if the extra is explicitly requested
335
+ if 'extra ==' in requirement:
336
+ continue
337
+
338
+ # Parse the requirement to get the package name
339
+ dep_name = parse_requirement(requirement)
340
+
341
+ # Skip self-references and empty names
342
+ if not dep_name or dep_name == dist_name.lower():
343
+ continue
344
+
345
+ dependencies.add(dep_name)
346
+
347
+ # Recursively get dependencies of this dependency
348
+ try:
349
+ transitive_deps = _get_dependencies_recursive(dep_name, new_path)
350
+ dependencies.update(transitive_deps)
351
+ except importlib.metadata.PackageNotFoundError:
352
+ # Check if this is likely a conditional dependency (has markers)
353
+ is_conditional = any(marker in requirement for marker in [
354
+ 'python_version', 'sys_platform', 'platform_system', 'platform_machine', 'implementation_name',
355
+ 'implementation_version'
356
+ ])
357
+
358
+ if is_conditional:
359
+ # This is expected - conditional dependencies aren't always installed
360
+ logger.debug("Conditional dependency %s of %s is not installed: %s",
361
+ dep_name,
362
+ dist_name,
363
+ requirement)
364
+ else:
365
+ # This might be a real issue - a non-conditional dependency is missing
366
+ logger.warning("Dependency %s of %s is not installed", dep_name, dist_name)
367
+ continue
368
+
369
+ except importlib.metadata.PackageNotFoundError:
370
+ # Transitive dependencies that aren't found are usually conditional (platform/version specific)
371
+ # and this is expected behavior
372
+ logger.debug("Distribution %s not found (likely conditional dependency)", dist_name)
373
+ # Don't raise - just return empty dependencies for missing distributions
374
+ finally:
375
+ processing.remove(dist_name)
376
+
377
+ result[dist_name] = dependencies
378
+ completed.add(dist_name)
379
+ return dependencies
380
+
381
+ # Process each distribution name
382
+ for dist_name in distribution_names:
383
+ if dist_name not in completed:
384
+ try:
385
+ _get_dependencies_recursive(dist_name.lower(), set())
386
+ except importlib.metadata.PackageNotFoundError:
387
+ # Try to find the correct distribution name
388
+ correct_name = find_distribution_name(dist_name)
389
+ if correct_name:
390
+ logger.debug("Found distribution '%s' for requested name '%s'", correct_name, dist_name)
391
+ try:
392
+ _get_dependencies_recursive(correct_name.lower(), set())
393
+ # Map the original name to the results of the correct name
394
+ if correct_name.lower() in result:
395
+ result[dist_name] = result[correct_name.lower()]
396
+ continue
397
+ except importlib.metadata.PackageNotFoundError:
398
+ pass
399
+
400
+ logger.error("Distribution %s not found (tried common variations)", dist_name)
401
+ result[dist_name] = set()
402
+
403
+ return result
404
+
405
+
406
+ def get_all_transitive_dependencies(distribution_names: list[str]) -> set[str]:
407
+ """Get all unique transitive dependencies from a list of Python distribution names.
408
+
409
+ Returns a flattened set of all unique dependencies across all the provided distribution names.
410
+ This is useful when publishing plugins to remote registries that contain with nested dependencies,
411
+ ensuring that all dependencies are included in the AIQArtifact's metadata.
412
+
413
+ Args:
414
+ distribution_names: List of Python distribution names (package names) to analyze
415
+
416
+ Returns:
417
+ set[str]: Set of all unique transitive dependency names
418
+ """
419
+ deps_map = get_transitive_dependencies(distribution_names)
420
+ all_deps = set()
421
+
422
+ for deps in deps_map.values():
423
+ all_deps.update(deps)
424
+
425
+ return all_deps
426
+
427
+
60
428
  def build_wheel(package_root: str) -> WheelData:
61
429
  """Builds a Python .whl for the specified package and saves to disk, sets self._whl_path, and returned as bytes.
62
430
 
@@ -67,8 +435,6 @@ def build_wheel(package_root: str) -> WheelData:
67
435
  WheelData: Data model containing a built python wheel and its corresponding metadata.
68
436
  """
69
437
 
70
- import importlib.util
71
- import re
72
438
  import tomllib
73
439
 
74
440
  from pkginfo import Wheel
@@ -83,22 +449,27 @@ def build_wheel(package_root: str) -> WheelData:
83
449
 
84
450
  toml_project: dict = data.get("project", {})
85
451
  toml_project_name = toml_project.get("name", None)
86
-
87
- assert toml_project_name is not None, f"Package name '{toml_project_name}' not found in pyproject.toml"
88
- # replace "aiqtoolkit" substring with "aiq" to get the import name
89
- module_name = get_module_name_from_distribution(toml_project_name)
90
- assert module_name is not None, f"No modules found for package name '{toml_project_name}'"
91
-
92
- assert importlib.util.find_spec(module_name) is not None, (f"Package {module_name} not "
93
- "installed, cannot discover components.")
94
-
95
452
  toml_packages = set(i for i in data.get("project", {}).get("entry-points", {}).get("aiq.plugins", {}))
96
- toml_dependencies = set(
97
- re.search(r"[a-zA-Z][a-zA-Z\d_-]*", package_name).group(0)
98
- for package_name in toml_project.get("dependencies", []))
99
453
 
454
+ # Extract dependencies using the robust requirement parser with extras resolution
455
+ try:
456
+ toml_dependencies = extract_dependencies_with_extras_resolved(pyproject_toml_path)
457
+ logger.debug("Extracted dependencies with extras resolved: %s", toml_dependencies)
458
+ except Exception as e:
459
+ logger.warning("Failed to extract dependencies with extras resolution, falling back to basic extraction: %s", e)
460
+ # Fallback to basic extraction
461
+ toml_dependencies = set()
462
+ for dep_spec in toml_project.get("dependencies", []):
463
+ try:
464
+ dep_name = parse_requirement(dep_spec)
465
+ if dep_name:
466
+ toml_dependencies.add(dep_name)
467
+ except Exception as e:
468
+ logger.warning("Failed to parse dependency '%s': %s", dep_spec, e)
469
+
470
+ toml_dependencies_transitive = get_all_transitive_dependencies(list(toml_dependencies))
100
471
  union_dependencies = toml_dependencies.union(toml_packages)
101
- union_dependencies.add(toml_project_name)
472
+ union_dependencies.update(toml_dependencies_transitive)
102
473
 
103
474
  working_dir = os.getcwd()
104
475
  os.chdir(package_root)
@@ -119,16 +490,15 @@ def build_wheel(package_root: str) -> WheelData:
119
490
 
120
491
  whl_version = Wheel(whl_path).version
121
492
 
122
- return WheelData(
123
- package_root=package_root,
124
- package_name=module_name, # should it be module name or distro name here
125
- toml_project=toml_project,
126
- toml_dependencies=toml_dependencies,
127
- toml_aiq_packages=toml_packages,
128
- union_dependencies=union_dependencies,
129
- whl_path=whl_path,
130
- whl_base64=whl_base64,
131
- whl_version=whl_version)
493
+ return WheelData(package_root=package_root,
494
+ package_name=toml_project_name,
495
+ toml_project=toml_project,
496
+ toml_dependencies=toml_dependencies,
497
+ toml_aiq_packages=toml_packages,
498
+ union_dependencies=union_dependencies,
499
+ whl_path=whl_path,
500
+ whl_base64=whl_base64,
501
+ whl_version=whl_version)
132
502
 
133
503
 
134
504
  def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnum, list[dict | DiscoveryMetadata]]:
@@ -155,7 +525,7 @@ def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnu
155
525
  if (wheel_data is not None):
156
526
  registry.register_package(package_name=wheel_data.package_name, package_version=wheel_data.whl_version)
157
527
  for entry_point in aiq_plugins:
158
- package_name = entry_point.module.split('.')[0]
528
+ package_name = entry_point.dist.name
159
529
  if (package_name == wheel_data.package_name):
160
530
  continue
161
531
  if (package_name in wheel_data.union_dependencies):
@@ -163,8 +533,7 @@ def build_package_metadata(wheel_data: WheelData | None) -> dict[AIQComponentEnu
163
533
 
164
534
  else:
165
535
  for entry_point in aiq_plugins:
166
- package_name = entry_point.module.split('.')[0]
167
- registry.register_package(package_name=package_name)
536
+ registry.register_package(package_name=entry_point.dist.name)
168
537
 
169
538
  discovery_metadata = {}
170
539
  for component_type in AIQComponentEnum:
aiq/runtime/loader.py CHANGED
@@ -21,6 +21,7 @@ import time
21
21
  from contextlib import asynccontextmanager
22
22
  from enum import IntFlag
23
23
  from enum import auto
24
+ from functools import lru_cache
24
25
  from functools import reduce
25
26
 
26
27
  from aiq.builder.workflow_builder import WorkflowBuilder
@@ -116,6 +117,7 @@ async def load_workflow(config_file: StrPath, max_concurrency: int = -1):
116
117
  yield AIQSessionManager(workflow.build(), max_concurrency=max_concurrency)
117
118
 
118
119
 
120
+ @lru_cache
119
121
  def discover_entrypoints(plugin_type: PluginTypes):
120
122
  """
121
123
  Discover all the requested plugin types which were registered via an entry point group and return them.
@@ -143,6 +145,25 @@ def discover_entrypoints(plugin_type: PluginTypes):
143
145
  return aiq_plugins
144
146
 
145
147
 
148
+ @lru_cache
149
+ def get_all_aiq_entrypoints_distro_mapping() -> dict[str, str]:
150
+ """
151
+ Get the mapping of all AIQ entry points to their distribution names.
152
+ """
153
+
154
+ mapping = {}
155
+ aiq_entrypoints = discover_entrypoints(PluginTypes.ALL)
156
+ for ep in aiq_entrypoints:
157
+ ep_module_parts = ep.module.split(".")
158
+ current_parts = []
159
+ for part in ep_module_parts:
160
+ current_parts.append(part)
161
+ module_prefix = ".".join(current_parts)
162
+ mapping[module_prefix] = ep.dist.name
163
+
164
+ return mapping
165
+
166
+
146
167
  def discover_and_register_plugins(plugin_type: PluginTypes):
147
168
  """
148
169
  Discover all the requested plugin types which were registered via an entry point group and register them into the
@@ -23,7 +23,6 @@ A secure, containerized Python code execution environment that allows safe execu
23
23
 
24
24
  The Code Execution Sandbox provides:
25
25
  - **Secure code execution** in isolated Docker containers
26
- - **Comprehensive error handling** with detailed stdout/stderr capture
27
26
  - **Multiple input formats** including raw code, dictionary format, and markdown
28
27
  - **Dependency management** with pre-installed libraries
29
28
  - **Flexible configuration** with customizable timeouts and output limits
@@ -0,0 +1,32 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import argparse
17
+ import json
18
+
19
+ from aiq.runtime.loader import get_all_aiq_entrypoints_distro_mapping
20
+
21
+
22
+ def dump_distro_mapping(path: str):
23
+ mapping = get_all_aiq_entrypoints_distro_mapping()
24
+ with open(path, "w", encoding="utf-8") as f:
25
+ json.dump(mapping, f, indent=4)
26
+
27
+
28
+ if __name__ == "__main__":
29
+ parser = argparse.ArgumentParser()
30
+ parser.add_argument("--path", type=str, required=True)
31
+ args = parser.parse_args()
32
+ dump_distro_mapping(args.path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiqtoolkit
3
- Version: 1.2.0rc3
3
+ Version: 1.2.0rc4
4
4
  Summary: NVIDIA Agent Intelligence toolkit
5
5
  Author: NVIDIA Corporation
6
6
  Maintainer: NVIDIA Corporation
@@ -20,7 +20,7 @@ aiq/authentication/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7
20
20
  aiq/authentication/interfaces.py,sha256=jfsbVx0MTrxZonyTEH0YFcSJoyFHVkfVHFhm9v3jMrY,3317
21
21
  aiq/authentication/register.py,sha256=dYChd2HRv-uv4m8Ebo2sLfbVnksT8D3jEWA-QT-MzX0,947
22
22
  aiq/authentication/api_key/__init__.py,sha256=GUJrgGtpvyMUCjUBvR3faAdv-tZzbU9W-izgx9aMEQg,680
23
- aiq/authentication/api_key/api_key_auth_provider.py,sha256=08YhKWSS0t5ov1sagLC9DN8xM2Asep8eu5m5yeAzzj8,3947
23
+ aiq/authentication/api_key/api_key_auth_provider.py,sha256=ZclKuexJShF8QVJ2vwhePk2q-VSDsJB4iZn2dkOCK4I,4057
24
24
  aiq/authentication/api_key/api_key_auth_provider_config.py,sha256=U9Rx93XVfEFXzT_fikkE0b3bX4eP8VRB2b52_YjAFjY,5472
25
25
  aiq/authentication/api_key/register.py,sha256=2W7GMWNNa_cZvB1uU4an5kNBrYVBY-kX0tgZ9daAF6Y,1147
26
26
  aiq/authentication/exceptions/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7c,685
@@ -100,10 +100,10 @@ aiq/data_models/api_server.py,sha256=dHlRq_jI8N6dFHiKVJ_y4mMOHKkKP9TPM_A6oc0GSCQ
100
100
  aiq/data_models/authentication.py,sha256=TShP47-z9vIrkV3S4sadm0QF7YUkEWtsToKSBiHS3NI,7349
101
101
  aiq/data_models/common.py,sha256=y_8AiWmTEaMjCMayVaFYddhv2AAou8Pr84isHgGxeUg,5874
102
102
  aiq/data_models/component.py,sha256=_HeHlDmz2fgJlVfmz0tG_yCyes86hQNaQwSWz1FDQvk,1737
103
- aiq/data_models/component_ref.py,sha256=_6DeNWJSHN3GSoAFe0RCQkCJ31_WA9GR3nP6vsxMj7Q,4594
103
+ aiq/data_models/component_ref.py,sha256=LJbi5NzGQuq8QRhNGzAKNK1zfxvYTycELFiBy1DKNeM,4587
104
104
  aiq/data_models/config.py,sha256=I7-9kJqpr6IvqmTU4nnALjDmq8YRlrKhYzq6g7DrJeo,17124
105
105
  aiq/data_models/dataset_handler.py,sha256=liMB3xRohkr4VTMmNWPvWi9qhbhlJQfQK36g5Rknweo,4027
106
- aiq/data_models/discovery_metadata.py,sha256=OcITQc5VeML4bTHurrsMNiK_oB3z7wudMxcyN7LI8pY,12785
106
+ aiq/data_models/discovery_metadata.py,sha256=ycrLQ2HTT4lOFZJGwvvGfXPAFRswygmNUWW0nKFNtHg,14662
107
107
  aiq/data_models/embedder.py,sha256=nPhthEQDtzAMGd8gFRB1ZfJpN5M9DJvv0h28ohHnTmI,1002
108
108
  aiq/data_models/evaluate.py,sha256=WBeABZsIa6W04MPj24SRu4s-ty2PkJ7_4SLojXmj5Pk,4704
109
109
  aiq/data_models/evaluator.py,sha256=bd2njsyQB2t6ClJ66gJiCjYHsQpWZwPD7rsU0J109TI,939
@@ -250,7 +250,7 @@ aiq/memory/models.py,sha256=c5dA7nKHQ4AS1_ptQZcfC_oXO495-ehocnf_qXTE6c8,4319
250
250
  aiq/meta/module_to_distro.json,sha256=1XV7edobFrdDKvsSoynfodXg_hczUWpDrQzGkW9qqEs,28
251
251
  aiq/meta/pypi.md,sha256=N1fvWaio3KhnAw9yigeM-oWaLuT5i_C7U_2UVzyPbks,4386
252
252
  aiq/object_store/__init__.py,sha256=7JInpxFZUdqigaBaXDzUj0KTlv_2tiM-SuTOlSvadkg,847
253
- aiq/object_store/in_memory_object_store.py,sha256=_k_5V3sOZh-S3UtsVaKWPnQaeQc7eKY0-Ui6Tx2uXW4,2453
253
+ aiq/object_store/in_memory_object_store.py,sha256=iMwy_JaA3uZkXL6K5ToaOtarC7UfmBuvRoqmLbuYieo,2644
254
254
  aiq/object_store/interfaces.py,sha256=5NbsE9TccihOf5ScG04hE1eNOaiajOZIUOeK_Kvukk8,2519
255
255
  aiq/object_store/models.py,sha256=yAEa7oZgax7OwObynv0MFCMo43xTGps5xz3SIY6-0VM,1425
256
256
  aiq/object_store/register.py,sha256=M93V17RxBXzGZaAmWboDw-S2XP7lrMEyzdlxXqC0f30,788
@@ -324,7 +324,7 @@ aiq/profiler/inference_optimization/experimental/concurrency_spike_analysis.py,s
324
324
  aiq/profiler/inference_optimization/experimental/prefix_span_analysis.py,sha256=nD46SCVi7zwxdPXRN5c61O58_OgMA2WCfaZdRJRqgP4,16600
325
325
  aiq/registry_handlers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
326
326
  aiq/registry_handlers/metadata_factory.py,sha256=Urr_7mLLpoU0VPjl2Nknl9aZhzaAAwb66ydTGBBrIwc,2778
327
- aiq/registry_handlers/package_utils.py,sha256=WCENHOO55Vhwv9DbLYjR9SMu_ERzxMKX6oO9ywe9cpM,7468
327
+ aiq/registry_handlers/package_utils.py,sha256=KZYtGNEIaG9pyG3gyHyumL6w8jsNF8iD-FWNWgP4oBY,22523
328
328
  aiq/registry_handlers/register.py,sha256=k2gvV0htVpfMdzsRvZGnTZp17JA2Na8sO9ocMaxDSmo,902
329
329
  aiq/registry_handlers/registry_handler_base.py,sha256=BYLH6R9y3pySBDH9HKUR16rM0atFRt594IDyyhgCQVQ,5842
330
330
  aiq/registry_handlers/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -355,7 +355,7 @@ aiq/retriever/nemo_retriever/__init__.py,sha256=GUJrgGtpvyMUCjUBvR3faAdv-tZzbU9W
355
355
  aiq/retriever/nemo_retriever/register.py,sha256=ODV-TZfXzDs1VJHHLdj2kC05odirtlQZSeh9c1zw8AQ,2893
356
356
  aiq/retriever/nemo_retriever/retriever.py,sha256=IvScUr9XuDLiMR__I3QsboLaM52N5D5Qu94qtTOGQw8,6958
357
357
  aiq/runtime/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7c,685
358
- aiq/runtime/loader.py,sha256=drLE-OnrAPjFgWrQ3ZsnhHSbon8GkRsUk0p7Oe8Jn2I,7117
358
+ aiq/runtime/loader.py,sha256=F_CHLj3KR8PBxzqu1sy70JLyNBDFG55bFm9kVNGoSdM,7707
359
359
  aiq/runtime/runner.py,sha256=CqmlVAYfrBh3ml3t2n3V693RaNyxtK9ScWT4S-Isbr8,6365
360
360
  aiq/runtime/session.py,sha256=i1pIqopZCBgGJqVUskKLiBnZYH-lTdMhvFu56dXAU5A,6206
361
361
  aiq/runtime/user_metadata.py,sha256=9EiBc-EEJzOdpf3Q1obHqAdY_kRlJ1T0TVvY0Jonk6o,3692
@@ -370,7 +370,7 @@ aiq/tool/nvidia_rag.py,sha256=9mS3igONo1RywxXNj_ITh2-qD91x1R0f7uhOWMZQX3o,4178
370
370
  aiq/tool/register.py,sha256=Lwl6l_eEzS8LAELxClmniNhhLluRVZFYXhsk2ocQhNg,1491
371
371
  aiq/tool/retriever.py,sha256=DnuU4khpJkd4epDBGQsowDOqDBKFiLQrnyKXgU6IRW8,3724
372
372
  aiq/tool/server_tools.py,sha256=286hTIvX_8pEohmkqmgWTCLkTfMTjuR9v0zivcW17r4,3200
373
- aiq/tool/code_execution/README.md,sha256=GQy3pPOVspFHQdclQCCweIAY2r8rVZ6bXyCY2FcEc6M,4090
373
+ aiq/tool/code_execution/README.md,sha256=QM8Fe8O7a9t2oDbG2d1PTIbddpGM4hhlIg2ZTYUKHY0,4019
374
374
  aiq/tool/code_execution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
375
375
  aiq/tool/code_execution/code_sandbox.py,sha256=ibe6BoYdWzmQWHdVNweVP-QW1WRGirfhvFr0iqm4fI8,10159
376
376
  aiq/tool/code_execution/register.py,sha256=i3MNxCb3lBeeSKMwQeg6X2oUpgUKdEoOw2bqUEIqy1E,3322
@@ -400,6 +400,7 @@ aiq/tool/memory_tools/delete_memory_tool.py,sha256=wdB_I8y-1D1OpNtBi6ZOg36vvNkba
400
400
  aiq/tool/memory_tools/get_memory_tool.py,sha256=-i0Bt5xYeapbbd2wtAgPc0pOv0Dx4jK1-yyHG7YCeQ0,2749
401
401
  aiq/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
402
402
  aiq/utils/debugging_utils.py,sha256=6M4JhbHDNDnfmSRGmHvT5IgEeWSHBore3VngdE_PMqc,1332
403
+ aiq/utils/dump_distro_mapping.py,sha256=5I3IGsmhoW12VZrInxx9CyyVSEtRmhRyo1bSdJJUj-o,1155
403
404
  aiq/utils/log_utils.py,sha256=dZLHt7qFqLlpPqMMFO9UVtSkOpMjFwz9tkmbAfOiNlg,1355
404
405
  aiq/utils/metadata_utils.py,sha256=lGYvc8Gk0az4qZDGeRbVz4L7B_b-Gnjss8JT4goqL5I,2897
405
406
  aiq/utils/optional_imports.py,sha256=jQSVBc2fBSRw-2d6r8cEwvh5-di2EUUPakuuo9QbbwA,4039
@@ -428,10 +429,10 @@ aiq/utils/reactive/base/observer_base.py,sha256=UAlyAY_ky4q2t0P81RVFo2Bs_R7z5Nde
428
429
  aiq/utils/reactive/base/subject_base.py,sha256=Ed-AC6P7cT3qkW1EXjzbd5M9WpVoeN_9KCe3OM3FLU4,2521
429
430
  aiq/utils/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
430
431
  aiq/utils/settings/global_settings.py,sha256=U9TCLdoZsKq5qOVGjREipGVv9e-FlStzqy5zv82_VYk,7454
431
- aiqtoolkit-1.2.0rc3.dist-info/licenses/LICENSE-3rd-party.txt,sha256=8o7aySJa9CBvFshPcsRdJbczzdNyDGJ8b0J67WRUQ2k,183936
432
- aiqtoolkit-1.2.0rc3.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
433
- aiqtoolkit-1.2.0rc3.dist-info/METADATA,sha256=X2SwqPxRdjMtrQ0l2Q9Gg439pHoduJja350zK9gCLc0,21558
434
- aiqtoolkit-1.2.0rc3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
435
- aiqtoolkit-1.2.0rc3.dist-info/entry_points.txt,sha256=iZR3yrf1liXfbcLqn5_pUkLhZyr1bUw_Qh1d2i7gsv4,625
436
- aiqtoolkit-1.2.0rc3.dist-info/top_level.txt,sha256=fo7AzYcNhZ_tRWrhGumtxwnxMew4xrT1iwouDy_f0Kc,4
437
- aiqtoolkit-1.2.0rc3.dist-info/RECORD,,
432
+ aiqtoolkit-1.2.0rc4.dist-info/licenses/LICENSE-3rd-party.txt,sha256=8o7aySJa9CBvFshPcsRdJbczzdNyDGJ8b0J67WRUQ2k,183936
433
+ aiqtoolkit-1.2.0rc4.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
434
+ aiqtoolkit-1.2.0rc4.dist-info/METADATA,sha256=4mgm80zLDxo_rJhad7-nJNv--TYpcFJv5UO3eUDzUZM,21558
435
+ aiqtoolkit-1.2.0rc4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
436
+ aiqtoolkit-1.2.0rc4.dist-info/entry_points.txt,sha256=iZR3yrf1liXfbcLqn5_pUkLhZyr1bUw_Qh1d2i7gsv4,625
437
+ aiqtoolkit-1.2.0rc4.dist-info/top_level.txt,sha256=fo7AzYcNhZ_tRWrhGumtxwnxMew4xrT1iwouDy_f0Kc,4
438
+ aiqtoolkit-1.2.0rc4.dist-info/RECORD,,