Llimona 0.1.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
llimona/__init__.py ADDED
@@ -0,0 +1,4 @@
1
+ def init():
2
+ from .addons import Addons
3
+
4
+ Addons().register_all_providers()
llimona/addons.py ADDED
@@ -0,0 +1,150 @@
1
+ from abc import ABC
2
+ from collections.abc import Iterable
3
+ from logging import Logger, getLogger
4
+ from typing import TYPE_CHECKING
5
+
6
+ from llimona.utils import LoggerMixin
7
+
8
+ if TYPE_CHECKING:
9
+ from llimona.id_builders import IdBuilderRegistry
10
+ from llimona.provider_loaders import ProviderLoaderRegistry
11
+ from llimona.providers import ProviderRegistry
12
+ from llimona.sensors import SensorRegistry
13
+
14
+
15
+ class AddonMetadata(ABC):
16
+ name: str
17
+ display_name: str
18
+ description: str
19
+
20
+ def register_providers(self, registry: ProviderRegistry) -> None: # noqa: B027 # pragma: no cover
21
+ pass
22
+
23
+ def register_provider_loaders(self, registry: ProviderLoaderRegistry) -> None: # noqa: B027 # pragma: no cover
24
+ pass
25
+
26
+ def register_id_builders(self, registry: IdBuilderRegistry) -> None: # noqa: B027 # pragma: no cover
27
+ pass
28
+
29
+ def register_sensors(self, registry: SensorRegistry) -> None: # noqa: B027 # pragma: no cover
30
+ pass
31
+
32
+
33
+ class Addons(LoggerMixin):
34
+ def __init__(self, *, logger: Logger | None = None) -> None:
35
+ super().__init__(logger=logger or getLogger('llimona.addons'))
36
+
37
+ self._provider_addons_registered: set[str] = set()
38
+ self._provider_loader_addons_registered: set[str] = set()
39
+ self._id_builder_addons_registered: set[str] = set()
40
+ self._sensor_addons_registered: set[str] = set()
41
+
42
+ def list_available(self) -> Iterable[AddonMetadata]:
43
+ from importlib.metadata import entry_points
44
+
45
+ for entry_point in entry_points(group='llimona.addon'):
46
+ yield entry_point.load()()
47
+
48
+ def _register_addon_provider(self, addon: AddonMetadata, registry: ProviderRegistry):
49
+ if addon.name in self._provider_addons_registered:
50
+ self._logger.debug(f'Addon "{addon.name}" already registered, skipping.')
51
+ return
52
+
53
+ self._logger.info(f'Registering providers from addon: {addon.name}')
54
+ addon.register_providers(registry)
55
+ self._provider_addons_registered.add(addon.name)
56
+
57
+ def register_all_providers(self):
58
+ from llimona.providers import provider_registry
59
+
60
+ for addon in self.list_available():
61
+ self._register_addon_provider(addon, provider_registry)
62
+
63
+ def register_addon_provider(self, addon_name: str):
64
+ from llimona.providers import provider_registry
65
+
66
+ for addon in self.list_available():
67
+ if addon.name != addon_name:
68
+ continue
69
+ self._register_addon_provider(addon, provider_registry)
70
+ return
71
+
72
+ raise ValueError(f'Addon "{addon_name}" not found.')
73
+
74
+ def _register_addon_provider_loader(self, addon: AddonMetadata, registry: ProviderLoaderRegistry):
75
+ if addon.name in self._provider_loader_addons_registered:
76
+ self._logger.debug(f'Addon "{addon.name}" already registered, skipping.')
77
+ return
78
+
79
+ self._logger.info(f'Registering provider loaders from addon: {addon.name}')
80
+ addon.register_provider_loaders(registry)
81
+ self._provider_loader_addons_registered.add(addon.name)
82
+
83
+ def register_all_provider_loaders(self):
84
+ from llimona.provider_loaders import provider_loader_registry
85
+
86
+ for addon in self.list_available():
87
+ self._register_addon_provider_loader(addon, provider_loader_registry)
88
+
89
+ def register_addon_provider_loader(self, addon_name: str):
90
+ from llimona.provider_loaders import provider_loader_registry
91
+
92
+ for addon in self.list_available():
93
+ if addon.name != addon_name:
94
+ continue
95
+ self._register_addon_provider_loader(addon, provider_loader_registry)
96
+ return
97
+
98
+ raise ValueError(f'Addon "{addon_name}" not found.')
99
+
100
+ def _register_addon_id_builder(self, addon: AddonMetadata, registry: IdBuilderRegistry):
101
+ if addon.name in self._id_builder_addons_registered:
102
+ self._logger.debug(f'Addon "{addon.name}" already registered, skipping.')
103
+ return
104
+
105
+ self._logger.info(f'Registering ID builders from addon: {addon.name}')
106
+ addon.register_id_builders(registry)
107
+ self._id_builder_addons_registered.add(addon.name)
108
+
109
+ def register_all_id_builders(self):
110
+ from llimona.id_builders import id_builder_registry
111
+
112
+ for addon in self.list_available():
113
+ self._register_addon_id_builder(addon, id_builder_registry)
114
+
115
+ def register_addon_id_builder(self, addon_name: str):
116
+ from llimona.id_builders import id_builder_registry
117
+
118
+ for addon in self.list_available():
119
+ if addon.name != addon_name:
120
+ continue
121
+ self._register_addon_id_builder(addon, id_builder_registry)
122
+ return
123
+
124
+ raise ValueError(f'Addon "{addon_name}" not found.')
125
+
126
+ def _register_addon_sensor(self, addon: AddonMetadata, registry: SensorRegistry):
127
+ if addon.name in self._sensor_addons_registered:
128
+ self._logger.debug(f'Addon "{addon.name}" already registered, skipping.')
129
+ return
130
+
131
+ self._logger.info(f'Registering sensors from addon: {addon.name}')
132
+ addon.register_sensors(registry)
133
+ self._sensor_addons_registered.add(addon.name)
134
+
135
+ def register_all_sensors(self):
136
+ from llimona.sensors import sensor_registry
137
+
138
+ for addon in self.list_available():
139
+ self._register_addon_sensor(addon, sensor_registry)
140
+
141
+ def register_addon_sensor(self, addon_name: str):
142
+ from llimona.sensors import sensor_registry
143
+
144
+ for addon in self.list_available():
145
+ if addon.name != addon_name:
146
+ continue
147
+ self._register_addon_sensor(addon, sensor_registry)
148
+ return
149
+
150
+ raise ValueError(f'Addon "{addon_name}" not found.')
llimona/app.py ADDED
@@ -0,0 +1,386 @@
1
+ from abc import ABC
2
+ from collections.abc import AsyncIterable, Awaitable, Callable, Iterable, Sequence
3
+ from functools import lru_cache, reduce
4
+ from logging import Logger, getLogger
5
+ from typing import TYPE_CHECKING, overload
6
+ from weakref import ref
7
+
8
+ from llimona.interfaces.openai.models.api_models import ListModelsRequest, Model, ModelRequest
9
+ from llimona.interfaces.openai.models.api_responses import CreateResponse, DeleteResponse, RetrieveResponse
10
+ from llimona.interfaces.openai.models.events import ResponseStreamEvent
11
+ from llimona.interfaces.openai.models.response import Response
12
+ from llimona.utils import LoggerMixin
13
+
14
+ if TYPE_CHECKING:
15
+ from llimona.context import ActionContext, Actor, Constraint, Context, Origin
16
+ from llimona.id_builders import BaseIdBuilder
17
+ from llimona.providers import BaseProvider
18
+ from llimona.sensors import BaseSensor
19
+
20
+
21
+ class Llimona:
22
+ def __init__(self, providers: Iterable[BaseProvider], id_builder: BaseIdBuilder | None = None) -> None:
23
+ from llimona.id_builders import PlainIdBuilder, PlainIdBuilderDesc
24
+
25
+ self._providers: dict[str, BaseProvider] = {provider.provider.name: provider for provider in providers}
26
+ self._id_builder = id_builder or PlainIdBuilder(desc=PlainIdBuilderDesc())
27
+
28
+ self.openai_responses = OpenAIResponses(app=self)
29
+ self.openai_models = OpenAIModels(app=self)
30
+
31
+ @property
32
+ def id_builder(self) -> BaseIdBuilder:
33
+ return self._id_builder
34
+
35
+ def decompose_model(self, model: str) -> tuple[str, str]:
36
+ return tuple(model.split('/', 1)) # type: ignore
37
+
38
+ def get_provider(self, provider: str) -> BaseProvider:
39
+ return self._providers[provider]
40
+
41
+ def register_provider(self, provider: BaseProvider) -> None:
42
+ assert provider.provider.name not in self._providers, (
43
+ f"Provider with name '{provider.provider.name}' is already registered"
44
+ )
45
+
46
+ self._providers[provider.provider.name] = provider
47
+
48
+ async def validate_actor(self, request_actor_id: str | None, resource_create_actor_id: str | None) -> None:
49
+ if request_actor_id != resource_create_actor_id:
50
+ raise ValueError('Actor ID does not match the expected actor ID.')
51
+
52
+ def build_context[TRequest](
53
+ self,
54
+ request: TRequest,
55
+ *,
56
+ action: ActionContext | None = None,
57
+ origin: Origin | None = None,
58
+ actor: Actor | None = None,
59
+ constraints: list[Constraint] | None = None,
60
+ parent: Context | None = None,
61
+ ) -> Context[TRequest]:
62
+ from llimona.context import Context
63
+
64
+ return Context(
65
+ app=self,
66
+ action=action,
67
+ request=request,
68
+ origin=origin,
69
+ actor=actor,
70
+ constraints=constraints,
71
+ parent=parent,
72
+ )
73
+
74
+
75
+ class BaseService(ABC, LoggerMixin):
76
+ TYPE: str
77
+
78
+ def __init__(
79
+ self,
80
+ app: Llimona,
81
+ *,
82
+ logger: Logger | None = None,
83
+ ) -> None:
84
+ super().__init__(
85
+ logger=logger
86
+ or getLogger(
87
+ f'aicc_proxy.{self.TYPE}',
88
+ ),
89
+ )
90
+
91
+ self._app = ref(app)
92
+
93
+ def _build_context[T](
94
+ self,
95
+ request: T,
96
+ provider: str,
97
+ service_action: str,
98
+ model: str | None = None,
99
+ parent_ctx: Context | None = None,
100
+ constraints: Sequence[Constraint] | None = None,
101
+ ) -> Context[T]:
102
+ from llimona.context import ActionContext, Context
103
+
104
+ if parent_ctx is not None:
105
+ if parent_ctx.app != self.app:
106
+ raise ValueError('Parent context belongs to a different app instance')
107
+ return parent_ctx.create_subcontext(
108
+ ActionContext(provider=provider, service=self.TYPE, service_action=service_action, model=model),
109
+ request,
110
+ constraints=constraints,
111
+ )
112
+ else:
113
+ return Context(
114
+ app=self.app,
115
+ action=ActionContext(provider=provider, service=self.TYPE, service_action=service_action, model=model),
116
+ request=request,
117
+ origin=None,
118
+ actor=None,
119
+ constraints=constraints,
120
+ )
121
+
122
+ @property
123
+ def app(self) -> Llimona:
124
+ app = self._app()
125
+ if app is None:
126
+ raise ReferenceError('The app reference is no longer valid')
127
+ return app
128
+
129
+ @overload
130
+ def apply_sensors[**Params, O](
131
+ self, fn: Callable[Params, Awaitable[O]], action: str, model: str | None = None
132
+ ) -> Callable[Params, Awaitable[O]]: ...
133
+
134
+ @overload
135
+ def apply_sensors[**Params, O](
136
+ self, fn: Callable[Params, AsyncIterable[O]], action: str, model: str | None = None
137
+ ) -> Callable[Params, AsyncIterable[O]]: ...
138
+
139
+ @lru_cache(maxsize=128) # noqa: B019
140
+ def apply_sensors[**Params, O](
141
+ self,
142
+ sensors: Iterable[BaseSensor],
143
+ fn: Callable[Params, Awaitable[O] | AsyncIterable[O]],
144
+ action: str,
145
+ model: str | None = None,
146
+ ) -> Callable[Params, Awaitable[O] | AsyncIterable[O]]:
147
+ return reduce(lambda f, sensor: sensor(f), sensors, fn) # type: ignore
148
+
149
+
150
+ class OpenAIResponses(BaseService):
151
+ TYPE = 'openai_responses'
152
+
153
+ async def create(
154
+ self, request: CreateResponse, *, parent_ctx: Context | None = None, constraints: list[Constraint] | None = None
155
+ ) -> Response | AsyncIterable[ResponseStreamEvent]:
156
+ from llimona.interfaces.openai.mappers import IdMapper
157
+
158
+ self._logger.info(f'Creating response for model: {request.model}')
159
+
160
+ provider_name, provider_model_name = self.app.decompose_model(request.model)
161
+
162
+ with self._build_context(
163
+ request=request.model_copy(update={'model': provider_model_name}),
164
+ provider=provider_name,
165
+ service_action='create',
166
+ model=provider_model_name,
167
+ parent_ctx=parent_ctx,
168
+ constraints=constraints,
169
+ ) as ctx:
170
+ provider = self.app.get_provider(provider_name)
171
+ data = await provider.apply_sensors(
172
+ fn=provider.openai_responses.create,
173
+ service_type=self.TYPE,
174
+ action='create',
175
+ model=provider_model_name,
176
+ )(ctx)
177
+
178
+ return await IdMapper(self.app).map_raw_response(provider_name, data, ctx.actor.id if ctx.actor else None)
179
+
180
+ async def retrieve(
181
+ self,
182
+ request: RetrieveResponse,
183
+ *,
184
+ parent_ctx: Context | None = None,
185
+ constraints: Sequence[Constraint] | None = None,
186
+ ) -> Response | AsyncIterable[ResponseStreamEvent]:
187
+ from llimona.interfaces.openai.mappers import IdMapper
188
+
189
+ self._logger.info(f'Retrieving response with ID: {request.response_id}')
190
+ provider_name, actor_id, resp_id = await self.app.id_builder.debuild_response_id(request.response_id)
191
+
192
+ self._logger.info(
193
+ f'Decomposed ID - Response ID: {resp_id}, Provider Name: {provider_name}, Actor ID: {actor_id}'
194
+ )
195
+
196
+ with self._build_context(
197
+ request=request.model_copy(update={'response_id': resp_id}),
198
+ provider=provider_name,
199
+ service_action='create',
200
+ model=None,
201
+ parent_ctx=parent_ctx,
202
+ constraints=constraints,
203
+ ) as ctx:
204
+ await self.app.validate_actor(ctx.actor.id if ctx.actor else None, actor_id)
205
+
206
+ provider = self.app.get_provider(provider_name)
207
+ data = await provider.apply_sensors(
208
+ fn=provider.openai_responses.retrieve,
209
+ service_type=self.TYPE,
210
+ action='retrieve',
211
+ )(ctx)
212
+ return await IdMapper(self.app).map_raw_response(provider_name, data, ctx.actor.id if ctx.actor else None)
213
+
214
+ async def cancel(
215
+ self,
216
+ request: DeleteResponse,
217
+ *,
218
+ parent_ctx: Context | None = None,
219
+ constraints: Sequence[Constraint] | None = None,
220
+ ) -> Response:
221
+ from llimona.interfaces.openai.mappers import IdMapper
222
+
223
+ self._logger.info(f'Cancelling response with ID: {request.response_id}')
224
+
225
+ provider_name, actor_id, resp_id = await self.app.id_builder.debuild_response_id(request.response_id)
226
+
227
+ self._logger.info(
228
+ f'Decomposed ID - Response ID: {resp_id}, Provider Name: {provider_name}, Actor ID: {actor_id}'
229
+ )
230
+
231
+ with self._build_context(
232
+ request=request.model_copy(update={'response_id': resp_id}),
233
+ provider=provider_name,
234
+ service_action='cancel',
235
+ model=None,
236
+ parent_ctx=parent_ctx,
237
+ constraints=constraints,
238
+ ) as ctx:
239
+ await self.app.validate_actor(ctx.actor.id if ctx.actor else None, actor_id)
240
+
241
+ provider = self.app.get_provider(provider_name)
242
+ data = await provider.apply_sensors(
243
+ fn=provider.openai_responses.cancel,
244
+ service_type=self.TYPE,
245
+ action='cancel',
246
+ )(ctx)
247
+ return await IdMapper(self.app).map_response(provider_name, data, ctx.actor.id if ctx.actor else None)
248
+
249
+
250
+ class OpenAIModels(BaseService):
251
+ TYPE = 'openai_models'
252
+
253
+ def _list_local_models(self, provider: BaseProvider) -> Iterable[Model]:
254
+ for model in provider.provider.models:
255
+ yield Model.model_validate(
256
+ {
257
+ 'id': '/'.join([provider.provider.name, model.name]),
258
+ 'owned_by': provider.provider.owner_id,
259
+ 'created': int(model.created.timestamp()),
260
+ },
261
+ )
262
+
263
+ async def list(
264
+ self,
265
+ *,
266
+ provider_name: str | None = None,
267
+ remote: bool = False,
268
+ parent_ctx: Context | None = None,
269
+ constraints: Sequence[Constraint] | None = None,
270
+ ) -> AsyncIterable[Model]:
271
+ self._logger.info('Listing models...')
272
+
273
+ if provider_name is not None:
274
+ self._logger.info(f'Listing models from provider: {provider_name}')
275
+ prov = self.app.get_provider(provider_name)
276
+
277
+ if not remote:
278
+ self._logger.info(f'Listing local models from provider: {provider_name}')
279
+
280
+ for model in self._list_local_models(prov):
281
+ yield model
282
+ return
283
+
284
+ if not any(s.type == 'openai_models' for s in prov.provider.services):
285
+ self._logger.info(f'Provider {provider_name} does not support openai_models service. Skipping...')
286
+ return
287
+
288
+ ctx = self._build_context(
289
+ request=ListModelsRequest(),
290
+ provider=provider_name,
291
+ service_action='list',
292
+ model=None,
293
+ parent_ctx=parent_ctx,
294
+ constraints=constraints,
295
+ )
296
+ try:
297
+ provider = self.app.get_provider(provider_name)
298
+ async for model in provider.apply_sensors(
299
+ fn=prov.openai_models.list,
300
+ service_type=self.TYPE,
301
+ action='list',
302
+ model=None,
303
+ )(ctx):
304
+ yield model.model_copy(update={'id': '/'.join([provider_name, model.id])})
305
+
306
+ return
307
+ except Exception as e:
308
+ ctx.set_exception(e)
309
+ raise
310
+
311
+ for prov_id, prov in self.app._providers.items():
312
+ self._logger.info(f'Listing models from provider: {prov_id}')
313
+ if not remote:
314
+ self._logger.info(f'Listing local models from provider: {prov_id}')
315
+
316
+ for model in self._list_local_models(prov):
317
+ yield model
318
+ continue
319
+
320
+ if not any(s.type == 'openai_models' for s in prov.provider.services):
321
+ self._logger.info(f'Provider {prov_id} does not support openai_models service. Skipping...')
322
+ continue
323
+
324
+ with self._build_context(
325
+ request=ListModelsRequest(),
326
+ provider=prov_id,
327
+ service_action='list',
328
+ model=None,
329
+ constraints=constraints,
330
+ ) as ctx:
331
+ async for model in prov.apply_sensors(
332
+ fn=prov.openai_models.list,
333
+ service_type=self.TYPE,
334
+ action='list',
335
+ model=None,
336
+ )(ctx):
337
+ yield model.model_copy(update={'id': '/'.join([prov_id, model.id])})
338
+
339
+ async def retrieve(
340
+ self, model_name: str, *, parent_ctx: Context | None = None, constraints: Sequence[Constraint] | None = None
341
+ ) -> Model:
342
+ self._logger.info(f'Retrieving model with ID: {model_name}')
343
+ provider_name, provider_model_name = model_name.split('/', 1)
344
+
345
+ self._logger.info(f'Parsed model name - Provider name: {provider_name}, Model name: {provider_model_name}')
346
+
347
+ with self._build_context(
348
+ request=ModelRequest(model_id=provider_model_name),
349
+ provider=provider_name,
350
+ service_action='retrieve',
351
+ model=provider_model_name,
352
+ constraints=constraints,
353
+ parent_ctx=parent_ctx,
354
+ ) as ctx:
355
+ provider = self.app.get_provider(provider_name)
356
+ return await provider.apply_sensors(
357
+ fn=provider.openai_models.retrieve,
358
+ service_type=self.TYPE,
359
+ action='retrieve',
360
+ model=provider_model_name,
361
+ )(ctx)
362
+
363
+ async def delete(
364
+ self, model_name: str, *, parent_ctx: Context | None = None, constraints: Sequence[Constraint] | None = None
365
+ ) -> bool:
366
+ self._logger.info(f'Deleting model with ID: {model_name}')
367
+
368
+ provider_name, provider_model_name = model_name.split('/', 1)
369
+
370
+ self._logger.info(f'Parsed model name - Provider name: {provider_name}, Model name: {provider_model_name}')
371
+
372
+ with self._build_context(
373
+ request=ModelRequest(model_id=provider_model_name),
374
+ provider=provider_name,
375
+ service_action='delete',
376
+ model=provider_model_name,
377
+ parent_ctx=parent_ctx,
378
+ constraints=constraints,
379
+ ) as ctx:
380
+ provider = self.app.get_provider(provider_name)
381
+ return await provider.apply_sensors(
382
+ fn=provider.openai_models.delete,
383
+ service_type=self.TYPE,
384
+ action='delete',
385
+ model=provider_model_name,
386
+ )(ctx)
@@ -0,0 +1,64 @@
1
+ import asyncio
2
+ from pathlib import Path
3
+
4
+ import click
5
+
6
+ from llimona.cli.addons import addons
7
+ from llimona.cli.openai import openai
8
+ from llimona.cli.providers import providers
9
+
10
+
11
+ @click.group
12
+ @click.option('--log-stdout', is_flag=True, help='Enable logging to stdout')
13
+ @click.option(
14
+ '--log-level',
15
+ type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']),
16
+ help='Set the logging level',
17
+ default='INFO',
18
+ )
19
+ @click.pass_context
20
+ def llimona(ctx: click.Context, log_stdout: bool, log_level: str):
21
+ from logging import CRITICAL, DEBUG, ERROR, INFO, WARNING, StreamHandler, basicConfig, getLogger
22
+
23
+ level = {
24
+ 'DEBUG': DEBUG,
25
+ 'INFO': INFO,
26
+ 'WARNING': WARNING,
27
+ 'ERROR': ERROR,
28
+ 'CRITICAL': CRITICAL,
29
+ }[log_level]
30
+
31
+ basicConfig(level=level, format='%(asctime)s - %(created).6f - %(name)s - %(levelname)s - %(message)s')
32
+
33
+ if log_stdout:
34
+ logger = getLogger()
35
+ logger.addHandler(StreamHandler())
36
+
37
+
38
+ @llimona.group(invoke_without_command=True)
39
+ @click.option(
40
+ '--config-file', type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path), required=True
41
+ )
42
+ @click.pass_context
43
+ def app(ctx: click.Context, config_file: Path):
44
+ import yaml
45
+
46
+ from ..config.app import AppBuilder, AppConfig
47
+ from ..config.yaml import ConfigLoader
48
+
49
+ config = AppConfig.model_validate(
50
+ yaml.load(config_file.read_text(), Loader=ConfigLoader.with_cwd(config_file.parent))
51
+ )
52
+
53
+ ctx.obj = asyncio.run(AppBuilder(config).build())
54
+
55
+ if ctx.invoked_subcommand is None:
56
+ ctx.fail('Missing command.')
57
+
58
+
59
+ app.add_command(openai)
60
+ app.add_command(providers)
61
+ llimona.add_command(addons)
62
+
63
+ if __name__ == '__main__':
64
+ llimona()
llimona/cli/addons.py ADDED
@@ -0,0 +1,9 @@
1
+ import click
2
+
3
+
4
+ @click.command(name='addons')
5
+ def addons():
6
+ from llimona.addons import Addons
7
+
8
+ for addon in Addons().list_available():
9
+ click.echo(f'{addon.name}: {addon.display_name} - {addon.description}')
llimona/cli/openai.py ADDED
@@ -0,0 +1,75 @@
1
+ import asyncio
2
+ from collections.abc import AsyncIterable
3
+ from typing import TYPE_CHECKING
4
+
5
+ import click
6
+
7
+ from llimona.cli.utils import pass_app
8
+
9
+ if TYPE_CHECKING:
10
+ from llimona.app import Llimona
11
+
12
+
13
+ @click.group(name='openai')
14
+ def openai():
15
+ pass
16
+
17
+
18
+ @openai.group(name='responses')
19
+ def responses():
20
+ pass
21
+
22
+
23
+ @responses.command(name='create')
24
+ @click.argument('model', type=str, required=True)
25
+ @click.argument('prompt', type=str, required=True)
26
+ @click.option('--stream', is_flag=True)
27
+ @pass_app
28
+ def responses_create(app: Llimona, model: str, prompt: str, stream: bool):
29
+ async def action():
30
+ from llimona.interfaces.openai.models.api_responses import CreateResponse
31
+
32
+ ctx = app.build_context(None)
33
+ result = await app.openai_responses.create(
34
+ CreateResponse.model_validate({'model': model, 'input': prompt, 'stream': stream}), parent_ctx=ctx
35
+ )
36
+ if isinstance(result, AsyncIterable):
37
+ async for event in result:
38
+ click.echo(event)
39
+ else:
40
+ click.echo(result)
41
+
42
+ for sensor_value in ctx.get_sensor_values():
43
+ click.echo(
44
+ f'Sensor value: {sensor_value.name}={sensor_value.value}'
45
+ + (f' ({sensor_value.description})' if sensor_value.description else '')
46
+ )
47
+
48
+ asyncio.run(action())
49
+
50
+
51
+ @openai.group(name='models')
52
+ def models():
53
+ pass
54
+
55
+
56
+ @models.command(name='list')
57
+ @click.argument('provider', type=str, required=False)
58
+ @click.option(
59
+ '--actor-id',
60
+ type=str,
61
+ required=False,
62
+ help='Actor ID to use for listing models. If not provided, a default actor ID will be used.',
63
+ )
64
+ @click.option(
65
+ '--remote',
66
+ is_flag=True,
67
+ help='Whether to fetch the model list from the remote provider instead of using cached data',
68
+ )
69
+ @pass_app
70
+ def models_list(app: Llimona, provider: str | None = None, actor_id: str | None = None, remote: bool = False):
71
+ async def action():
72
+ async for model in app.openai_models.list(provider_name=provider, remote=remote):
73
+ click.echo(model)
74
+
75
+ asyncio.run(action())