guidellm 0.4.0a18__py3-none-any.whl → 0.4.0a155__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of guidellm might be problematic. Click here for more details.

Files changed (116) hide show
  1. guidellm/__init__.py +5 -2
  2. guidellm/__main__.py +451 -252
  3. guidellm/backends/__init__.py +33 -0
  4. guidellm/backends/backend.py +110 -0
  5. guidellm/backends/openai.py +355 -0
  6. guidellm/backends/response_handlers.py +455 -0
  7. guidellm/benchmark/__init__.py +53 -39
  8. guidellm/benchmark/benchmarker.py +148 -317
  9. guidellm/benchmark/entrypoints.py +466 -128
  10. guidellm/benchmark/output.py +517 -771
  11. guidellm/benchmark/profile.py +580 -280
  12. guidellm/benchmark/progress.py +568 -549
  13. guidellm/benchmark/scenarios/__init__.py +40 -0
  14. guidellm/benchmark/scenarios/chat.json +6 -0
  15. guidellm/benchmark/scenarios/rag.json +6 -0
  16. guidellm/benchmark/schemas.py +2085 -0
  17. guidellm/data/__init__.py +28 -4
  18. guidellm/data/collators.py +16 -0
  19. guidellm/data/deserializers/__init__.py +53 -0
  20. guidellm/data/deserializers/deserializer.py +109 -0
  21. guidellm/data/deserializers/file.py +222 -0
  22. guidellm/data/deserializers/huggingface.py +94 -0
  23. guidellm/data/deserializers/memory.py +192 -0
  24. guidellm/data/deserializers/synthetic.py +346 -0
  25. guidellm/data/loaders.py +145 -0
  26. guidellm/data/preprocessors/__init__.py +25 -0
  27. guidellm/data/preprocessors/formatters.py +412 -0
  28. guidellm/data/preprocessors/mappers.py +198 -0
  29. guidellm/data/preprocessors/preprocessor.py +29 -0
  30. guidellm/data/processor.py +30 -0
  31. guidellm/data/schemas.py +13 -0
  32. guidellm/data/utils/__init__.py +10 -0
  33. guidellm/data/utils/dataset.py +94 -0
  34. guidellm/data/utils/functions.py +18 -0
  35. guidellm/extras/__init__.py +4 -0
  36. guidellm/extras/audio.py +215 -0
  37. guidellm/extras/vision.py +242 -0
  38. guidellm/logger.py +2 -2
  39. guidellm/mock_server/__init__.py +8 -0
  40. guidellm/mock_server/config.py +84 -0
  41. guidellm/mock_server/handlers/__init__.py +17 -0
  42. guidellm/mock_server/handlers/chat_completions.py +280 -0
  43. guidellm/mock_server/handlers/completions.py +280 -0
  44. guidellm/mock_server/handlers/tokenizer.py +142 -0
  45. guidellm/mock_server/models.py +510 -0
  46. guidellm/mock_server/server.py +168 -0
  47. guidellm/mock_server/utils.py +302 -0
  48. guidellm/preprocess/dataset.py +23 -26
  49. guidellm/presentation/builder.py +2 -2
  50. guidellm/presentation/data_models.py +25 -21
  51. guidellm/presentation/injector.py +2 -3
  52. guidellm/scheduler/__init__.py +65 -26
  53. guidellm/scheduler/constraints.py +1035 -0
  54. guidellm/scheduler/environments.py +252 -0
  55. guidellm/scheduler/scheduler.py +140 -368
  56. guidellm/scheduler/schemas.py +272 -0
  57. guidellm/scheduler/strategies.py +519 -0
  58. guidellm/scheduler/worker.py +391 -420
  59. guidellm/scheduler/worker_group.py +707 -0
  60. guidellm/schemas/__init__.py +31 -0
  61. guidellm/schemas/info.py +159 -0
  62. guidellm/schemas/request.py +216 -0
  63. guidellm/schemas/response.py +119 -0
  64. guidellm/schemas/stats.py +228 -0
  65. guidellm/{config.py → settings.py} +32 -21
  66. guidellm/utils/__init__.py +95 -8
  67. guidellm/utils/auto_importer.py +98 -0
  68. guidellm/utils/cli.py +46 -2
  69. guidellm/utils/console.py +183 -0
  70. guidellm/utils/encoding.py +778 -0
  71. guidellm/utils/functions.py +134 -0
  72. guidellm/utils/hf_datasets.py +1 -2
  73. guidellm/utils/hf_transformers.py +4 -4
  74. guidellm/utils/imports.py +9 -0
  75. guidellm/utils/messaging.py +1118 -0
  76. guidellm/utils/mixins.py +115 -0
  77. guidellm/utils/pydantic_utils.py +411 -0
  78. guidellm/utils/random.py +3 -4
  79. guidellm/utils/registry.py +220 -0
  80. guidellm/utils/singleton.py +133 -0
  81. guidellm/{objects → utils}/statistics.py +341 -247
  82. guidellm/utils/synchronous.py +159 -0
  83. guidellm/utils/text.py +163 -50
  84. guidellm/utils/typing.py +41 -0
  85. guidellm/version.py +1 -1
  86. {guidellm-0.4.0a18.dist-info → guidellm-0.4.0a155.dist-info}/METADATA +33 -10
  87. guidellm-0.4.0a155.dist-info/RECORD +96 -0
  88. guidellm/backend/__init__.py +0 -23
  89. guidellm/backend/backend.py +0 -259
  90. guidellm/backend/openai.py +0 -705
  91. guidellm/backend/response.py +0 -136
  92. guidellm/benchmark/aggregator.py +0 -760
  93. guidellm/benchmark/benchmark.py +0 -837
  94. guidellm/benchmark/scenario.py +0 -104
  95. guidellm/data/prideandprejudice.txt.gz +0 -0
  96. guidellm/dataset/__init__.py +0 -22
  97. guidellm/dataset/creator.py +0 -213
  98. guidellm/dataset/entrypoints.py +0 -42
  99. guidellm/dataset/file.py +0 -92
  100. guidellm/dataset/hf_datasets.py +0 -62
  101. guidellm/dataset/in_memory.py +0 -132
  102. guidellm/dataset/synthetic.py +0 -287
  103. guidellm/objects/__init__.py +0 -18
  104. guidellm/objects/pydantic.py +0 -89
  105. guidellm/request/__init__.py +0 -18
  106. guidellm/request/loader.py +0 -284
  107. guidellm/request/request.py +0 -79
  108. guidellm/request/types.py +0 -10
  109. guidellm/scheduler/queues.py +0 -25
  110. guidellm/scheduler/result.py +0 -155
  111. guidellm/scheduler/strategy.py +0 -495
  112. guidellm-0.4.0a18.dist-info/RECORD +0 -62
  113. {guidellm-0.4.0a18.dist-info → guidellm-0.4.0a155.dist-info}/WHEEL +0 -0
  114. {guidellm-0.4.0a18.dist-info → guidellm-0.4.0a155.dist-info}/entry_points.txt +0 -0
  115. {guidellm-0.4.0a18.dist-info → guidellm-0.4.0a155.dist-info}/licenses/LICENSE +0 -0
  116. {guidellm-0.4.0a18.dist-info → guidellm-0.4.0a155.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,115 @@
1
+ """
2
+ Mixin classes for common metadata extraction and object introspection.
3
+
4
+ Provides reusable mixins for extracting structured metadata from objects,
5
+ enabling consistent information exposure across different class hierarchies.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Any
11
+
12
+ __all__ = ["InfoMixin"]
13
+
14
+
15
+ PYTHON_PRIMITIVES = (str, int, float, bool, list, tuple, dict)
16
+ """Type alias for serialized object representations"""
17
+
18
+
19
+ class InfoMixin:
20
+ """
21
+ Mixin class providing standardized metadata extraction for introspection.
22
+
23
+ Enables consistent object metadata extraction patterns across different
24
+ class hierarchies for debugging, serialization, and runtime analysis.
25
+ Provides both instance and class-level methods for extracting structured
26
+ information from arbitrary objects with fallback handling for objects
27
+ without built-in info capabilities.
28
+
29
+ Example:
30
+ ::
31
+ from guidellm.utils.mixins import InfoMixin
32
+
33
+ class ConfiguredClass(InfoMixin):
34
+ def __init__(self, setting: str):
35
+ self.setting = setting
36
+
37
+ obj = ConfiguredClass("value")
38
+ # Returns {'str': 'ConfiguredClass(...)', 'type': 'ConfiguredClass', ...}
39
+ print(obj.info)
40
+ """
41
+
42
+ @classmethod
43
+ def extract_from_obj(cls, obj: Any) -> dict[str, Any]:
44
+ """
45
+ Extract structured metadata from any object.
46
+
47
+ Attempts to use the object's own `info` method or property if available,
48
+ otherwise constructs metadata from object attributes and type information.
49
+ Provides consistent metadata format across different object types.
50
+
51
+ :param obj: Object to extract metadata from
52
+ :return: Dictionary containing object metadata including type, class,
53
+ module, and public attributes
54
+ """
55
+ if hasattr(obj, "info"):
56
+ return obj.info() if callable(obj.info) else obj.info
57
+
58
+ return {
59
+ "str": str(obj),
60
+ "type": type(obj).__name__,
61
+ "class": obj.__class__.__name__ if hasattr(obj, "__class__") else None,
62
+ "module": obj.__class__.__module__ if hasattr(obj, "__class__") else None,
63
+ "attributes": (
64
+ {
65
+ key: val if isinstance(val, PYTHON_PRIMITIVES) else repr(val)
66
+ for key, val in obj.__dict__.items()
67
+ if not key.startswith("_")
68
+ }
69
+ if hasattr(obj, "__dict__")
70
+ else {}
71
+ ),
72
+ }
73
+
74
+ @classmethod
75
+ def create_info_dict(cls, obj: Any) -> dict[str, Any]:
76
+ """
77
+ Create a structured info dictionary for the given object.
78
+
79
+ Builds standardized metadata dictionary containing object identification,
80
+ type information, and accessible attributes. Used internally by other
81
+ info extraction methods and available for direct metadata construction.
82
+
83
+ :param obj: Object to extract info from
84
+ :return: Dictionary containing structured metadata about the object
85
+ """
86
+ return {
87
+ "str": str(obj),
88
+ "type": type(obj).__name__,
89
+ "class": obj.__class__.__name__ if hasattr(obj, "__class__") else None,
90
+ "module": obj.__class__.__module__ if hasattr(obj, "__class__") else None,
91
+ "attributes": (
92
+ {
93
+ key: val
94
+ if isinstance(val, str | int | float | bool | list | dict)
95
+ else repr(val)
96
+ for key, val in obj.__dict__.items()
97
+ if not key.startswith("_")
98
+ }
99
+ if hasattr(obj, "__dict__")
100
+ else {}
101
+ ),
102
+ }
103
+
104
+ @property
105
+ def info(self) -> dict[str, Any]:
106
+ """
107
+ Return structured metadata about this instance.
108
+
109
+ Provides consistent access to object metadata for debugging, serialization,
110
+ and introspection. Uses the create_info_dict method to generate standardized
111
+ metadata format including class information and public attributes.
112
+
113
+ :return: Dictionary containing class name, module, and public attributes
114
+ """
115
+ return self.create_info_dict(self)
@@ -0,0 +1,411 @@
1
+ """
2
+ Pydantic utilities for polymorphic model serialization and registry integration.
3
+
4
+ Provides integration between Pydantic and the registry system, enabling
5
+ polymorphic serialization and deserialization of Pydantic models using
6
+ a discriminator field and dynamic class registry. Includes base model classes
7
+ with standardized configurations and generic status breakdown models for
8
+ structured result organization.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ from abc import ABC, abstractmethod
14
+ from typing import Any, ClassVar, Generic, TypeVar, cast, get_args, get_origin
15
+
16
+ from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler
17
+ from pydantic_core import CoreSchema, core_schema
18
+
19
+ from guidellm.utils.registry import RegistryMixin
20
+
21
+ __all__ = [
22
+ "PydanticClassRegistryMixin",
23
+ "ReloadableBaseModel",
24
+ "StandardBaseDict",
25
+ "StandardBaseModel",
26
+ "StatusBreakdown",
27
+ ]
28
+
29
+
30
+ BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
31
+ RegisterClassT = TypeVar("RegisterClassT", bound=type)
32
+ SuccessfulT = TypeVar("SuccessfulT")
33
+ ErroredT = TypeVar("ErroredT")
34
+ IncompleteT = TypeVar("IncompleteT")
35
+ TotalT = TypeVar("TotalT")
36
+
37
+
38
+ class ReloadableBaseModel(BaseModel):
39
+ """
40
+ Base Pydantic model with schema reloading capabilities.
41
+
42
+ Provides dynamic schema rebuilding functionality for models that need to
43
+ update their validation schemas at runtime, particularly useful when
44
+ working with registry-based polymorphic models where new types are
45
+ registered after initial class definition.
46
+ """
47
+
48
+ model_config = ConfigDict(
49
+ extra="ignore",
50
+ use_enum_values=True,
51
+ from_attributes=True,
52
+ arbitrary_types_allowed=True,
53
+ )
54
+
55
+ @classmethod
56
+ def reload_schema(cls, parents: bool = True) -> None:
57
+ """
58
+ Reload the class schema with updated registry information.
59
+
60
+ Forces a complete rebuild of the Pydantic model schema to incorporate
61
+ any changes made to associated registries or validation rules.
62
+
63
+ :param parents: Whether to also rebuild schemas for any pydantic parent
64
+ types that reference this model.
65
+ """
66
+ cls.model_rebuild(force=True)
67
+
68
+ if parents:
69
+ cls.reload_parent_schemas()
70
+
71
+ @classmethod
72
+ def reload_parent_schemas(cls):
73
+ """
74
+ Recursively reload schemas for all parent Pydantic models.
75
+
76
+ Traverses the inheritance hierarchy to find all parent classes that
77
+ are Pydantic models and triggers schema rebuilding on each to ensure
78
+ that any changes in child models are reflected in parent schemas.
79
+ """
80
+ potential_parents: set[type[BaseModel]] = {BaseModel}
81
+ stack: list[type[BaseModel]] = [BaseModel]
82
+
83
+ while stack:
84
+ current = stack.pop()
85
+ for subclass in current.__subclasses__():
86
+ if (
87
+ issubclass(subclass, BaseModel)
88
+ and subclass is not cls
89
+ and subclass not in potential_parents
90
+ ):
91
+ potential_parents.add(subclass)
92
+ stack.append(subclass)
93
+
94
+ for check in cls.__mro__:
95
+ if isinstance(check, type) and issubclass(check, BaseModel):
96
+ cls._reload_schemas_depending_on(check, potential_parents)
97
+
98
+ @classmethod
99
+ def _reload_schemas_depending_on(cls, target: type[BaseModel], types: set[type]):
100
+ changed = True
101
+ while changed:
102
+ changed = False
103
+ for candidate in types:
104
+ if (
105
+ isinstance(candidate, type)
106
+ and issubclass(candidate, BaseModel)
107
+ and any(
108
+ cls._uses_type(target, field_info.annotation)
109
+ for field_info in candidate.model_fields.values()
110
+ if field_info.annotation is not None
111
+ )
112
+ ):
113
+ try:
114
+ before = candidate.model_json_schema()
115
+ except Exception: # noqa: BLE001
116
+ before = None
117
+ candidate.model_rebuild(force=True)
118
+ if before is not None:
119
+ after = candidate.model_json_schema()
120
+ changed |= before != after
121
+
122
+ @classmethod
123
+ def _uses_type(cls, target: type, candidate: type) -> bool:
124
+ if target is candidate:
125
+ return True
126
+
127
+ origin = get_origin(candidate)
128
+
129
+ if origin is None:
130
+ return isinstance(candidate, type) and issubclass(candidate, target)
131
+
132
+ if isinstance(origin, type) and (
133
+ target is origin or issubclass(origin, target)
134
+ ):
135
+ return True
136
+
137
+ for arg in get_args(candidate) or []:
138
+ if isinstance(arg, type) and cls._uses_type(target, arg):
139
+ return True
140
+
141
+ return False
142
+
143
+
144
+ class StandardBaseModel(BaseModel):
145
+ """
146
+ Base Pydantic model with standardized configuration for GuideLLM.
147
+
148
+ Provides consistent validation behavior and configuration settings across
149
+ all Pydantic models in the application, including field validation,
150
+ attribute conversion, and default value handling.
151
+
152
+ Example:
153
+ ::
154
+ class MyModel(StandardBaseModel):
155
+ name: str
156
+ value: int = 42
157
+
158
+ # Access default values
159
+ default_value = MyModel.get_default("value") # Returns 42
160
+ """
161
+
162
+ model_config = ConfigDict(
163
+ extra="ignore",
164
+ use_enum_values=True,
165
+ from_attributes=True,
166
+ )
167
+
168
+ @classmethod
169
+ def get_default(cls: type[BaseModel], field: str) -> Any:
170
+ """
171
+ Get default value for a model field.
172
+
173
+ :param field: Name of the field to get the default value for
174
+ :return: Default value of the specified field
175
+ :raises KeyError: If the field does not exist in the model
176
+ """
177
+ return cls.model_fields[field].default
178
+
179
+
180
+ class StandardBaseDict(StandardBaseModel):
181
+ """
182
+ Base Pydantic model allowing arbitrary additional fields.
183
+
184
+ Extends StandardBaseModel to accept extra fields beyond those explicitly
185
+ defined in the model schema. Useful for flexible data structures that
186
+ need to accommodate varying or unknown field sets while maintaining
187
+ type safety for known fields.
188
+ """
189
+
190
+ model_config = ConfigDict(
191
+ extra="allow",
192
+ use_enum_values=True,
193
+ from_attributes=True,
194
+ arbitrary_types_allowed=True,
195
+ )
196
+
197
+
198
+ class StatusBreakdown(BaseModel, Generic[SuccessfulT, ErroredT, IncompleteT, TotalT]):
199
+ """
200
+ Generic model for organizing results by processing status.
201
+
202
+ Provides structured categorization of results into successful, errored,
203
+ incomplete, and total status groups. Supports flexible typing for each
204
+ status category to accommodate different result types while maintaining
205
+ consistent organization patterns across the application.
206
+
207
+ Example:
208
+ ::
209
+ from guidellm.utils import StatusBreakdown
210
+
211
+ # Define a breakdown for request counts
212
+ breakdown = StatusBreakdown[int, int, int, int](
213
+ successful=150,
214
+ errored=5,
215
+ incomplete=10,
216
+ total=165
217
+ )
218
+ """
219
+
220
+ successful: SuccessfulT = Field(
221
+ description="Results or metrics for requests with successful completion status",
222
+ default=None, # type: ignore[assignment]
223
+ )
224
+ errored: ErroredT = Field(
225
+ description="Results or metrics for requests with error completion status",
226
+ default=None, # type: ignore[assignment]
227
+ )
228
+ incomplete: IncompleteT = Field(
229
+ description="Results or metrics for requests with incomplete processing status",
230
+ default=None, # type: ignore[assignment]
231
+ )
232
+ total: TotalT = Field(
233
+ description="Aggregated results or metrics combining all status categories",
234
+ default=None, # type: ignore[assignment]
235
+ )
236
+
237
+
238
+ class PydanticClassRegistryMixin(
239
+ ReloadableBaseModel, RegistryMixin[type[BaseModelT]], ABC, Generic[BaseModelT]
240
+ ):
241
+ """
242
+ Polymorphic Pydantic model mixin enabling registry-based dynamic instantiation.
243
+
244
+ Integrates Pydantic validation with the registry system to enable polymorphic
245
+ serialization and deserialization based on a discriminator field. Automatically
246
+ instantiates the correct subclass during validation based on registry mappings,
247
+ providing a foundation for extensible plugin-style architectures.
248
+
249
+ Example:
250
+ ::
251
+ from speculators.utils import PydanticClassRegistryMixin
252
+
253
+ class BaseConfig(PydanticClassRegistryMixin["BaseConfig"]):
254
+ schema_discriminator: ClassVar[str] = "config_type"
255
+ config_type: str = Field(description="Configuration type identifier")
256
+
257
+ @classmethod
258
+ def __pydantic_schema_base_type__(cls) -> type["BaseConfig"]:
259
+ return BaseConfig
260
+
261
+ @BaseConfig.register("database")
262
+ class DatabaseConfig(BaseConfig):
263
+ config_type: str = "database"
264
+ connection_string: str = Field(description="Database connection string")
265
+
266
+ # Dynamic instantiation based on discriminator
267
+ config = BaseConfig.model_validate({
268
+ "config_type": "database",
269
+ "connection_string": "postgresql://localhost:5432/db"
270
+ })
271
+
272
+ :cvar schema_discriminator: Field name used for polymorphic type discrimination
273
+ """
274
+
275
+ schema_discriminator: ClassVar[str] = "model_type"
276
+
277
+ def __new__(cls, *args, **kwargs): # noqa: ARG004
278
+ """
279
+ Prevent direct instantiation of base classes that use this mixin.
280
+
281
+ Only allows instantiation of concrete subclasses, not the base class.
282
+ """
283
+ base_type = cls.__pydantic_schema_base_type__()
284
+ if cls is base_type:
285
+ raise TypeError(f"only children of '{cls.__name__}' may be instantiated")
286
+ return super().__new__(cls)
287
+
288
+ @classmethod
289
+ def register_decorator(
290
+ cls, clazz: RegisterClassT, name: str | list[str] | None = None
291
+ ) -> RegisterClassT:
292
+ """
293
+ Register a Pydantic model class with type validation and schema reload.
294
+
295
+ Validates that the class is a proper Pydantic BaseModel subclass before
296
+ registering it in the class registry. Automatically triggers schema
297
+ reload to incorporate the new type into polymorphic validation.
298
+
299
+ :param clazz: Pydantic model class to register in the polymorphic hierarchy
300
+ :param name: Registry identifier for the class. Uses class name if None
301
+ :return: The registered class unchanged for decorator chaining
302
+ :raises TypeError: If clazz is not a Pydantic BaseModel subclass
303
+ """
304
+ if not issubclass(clazz, BaseModel):
305
+ raise TypeError(
306
+ f"Cannot register {clazz.__name__} as it is not a subclass of "
307
+ "Pydantic BaseModel"
308
+ )
309
+
310
+ super().register_decorator(clazz, name=name)
311
+ cls.reload_schema()
312
+
313
+ return cast("RegisterClassT", clazz)
314
+
315
+ @classmethod
316
+ def __get_pydantic_core_schema__(
317
+ cls, source_type: Any, handler: GetCoreSchemaHandler
318
+ ) -> CoreSchema:
319
+ """
320
+ Generate polymorphic validation schema for dynamic type instantiation.
321
+
322
+ Creates a tagged union schema that enables Pydantic to automatically
323
+ instantiate the correct subclass based on the discriminator field value.
324
+ Falls back to base schema generation when no registry is available.
325
+
326
+ :param source_type: Type being processed for schema generation
327
+ :param handler: Pydantic core schema generation handler
328
+ :return: Tagged union schema for polymorphic validation or base schema
329
+ """
330
+ if source_type == cls.__pydantic_schema_base_type__():
331
+ if not cls.registry:
332
+ return cls.__pydantic_generate_base_schema__(handler)
333
+
334
+ choices = {
335
+ name: handler(model_class) for name, model_class in cls.registry.items()
336
+ }
337
+
338
+ return core_schema.tagged_union_schema(
339
+ choices=choices,
340
+ discriminator=cls.schema_discriminator,
341
+ )
342
+
343
+ return handler(cls)
344
+
345
+ @classmethod
346
+ @abstractmethod
347
+ def __pydantic_schema_base_type__(cls) -> type[BaseModelT]:
348
+ """
349
+ Define the base type for polymorphic validation hierarchy.
350
+
351
+ Must be implemented by subclasses to specify which type serves as the
352
+ root of the polymorphic hierarchy for schema generation and validation.
353
+
354
+ :return: Base class type for the polymorphic model hierarchy
355
+ """
356
+ ...
357
+
358
+ @classmethod
359
+ def __pydantic_generate_base_schema__(
360
+ cls, handler: GetCoreSchemaHandler
361
+ ) -> CoreSchema:
362
+ """
363
+ Generate fallback schema for polymorphic models without registry.
364
+
365
+ Provides a base schema that accepts any valid input when no registry
366
+ is available for polymorphic validation. Used as fallback during
367
+ schema generation when the registry has not been populated.
368
+
369
+ :param handler: Pydantic core schema generation handler
370
+ :return: Base CoreSchema that accepts any valid input
371
+ """
372
+ return core_schema.any_schema()
373
+
374
+ @classmethod
375
+ def auto_populate_registry(cls) -> bool:
376
+ """
377
+ Initialize registry with auto-discovery and reload validation schema.
378
+
379
+ Triggers automatic population of the class registry through the parent
380
+ RegistryMixin functionality and ensures the Pydantic validation schema
381
+ is updated to include all discovered types for polymorphic validation.
382
+
383
+ :return: True if registry was populated, False if already populated
384
+ :raises ValueError: If called when registry_auto_discovery is disabled
385
+ """
386
+ populated = super().auto_populate_registry()
387
+ cls.reload_schema()
388
+
389
+ return populated
390
+
391
+ @classmethod
392
+ def registered_classes(cls) -> tuple[type[BaseModelT], ...]:
393
+ """
394
+ Get all registered pydantic classes from the registry.
395
+
396
+ Automatically triggers auto-discovery if registry_auto_discovery is enabled
397
+ to ensure all available implementations are included.
398
+
399
+ :return: Tuple of all registered classes including auto-discovered ones
400
+ :raises ValueError: If called before any objects have been registered
401
+ """
402
+ if cls.registry_auto_discovery:
403
+ cls.auto_populate_registry()
404
+
405
+ if cls.registry is None:
406
+ raise ValueError(
407
+ "ClassRegistryMixin.registered_classes() must be called after "
408
+ "registering classes with ClassRegistryMixin.register()."
409
+ )
410
+
411
+ return tuple(cls.registry.values())
guidellm/utils/random.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import random
2
2
  from collections.abc import Iterator
3
- from typing import Optional
4
3
 
5
4
  __all__ = ["IntegerRangeSampler"]
6
5
 
@@ -9,9 +8,9 @@ class IntegerRangeSampler:
9
8
  def __init__(
10
9
  self,
11
10
  average: int,
12
- variance: Optional[int],
13
- min_value: Optional[int],
14
- max_value: Optional[int],
11
+ variance: int | None,
12
+ min_value: int | None,
13
+ max_value: int | None,
15
14
  random_seed: int,
16
15
  ):
17
16
  self.average = average