backend.ai-appproxy-common 25.15.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. backend_ai_appproxy_common-25.15.3/MANIFEST.in +1 -0
  2. backend_ai_appproxy_common-25.15.3/PKG-INFO +44 -0
  3. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/VERSION +1 -0
  4. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/__init__.py +3 -0
  5. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/config.py +488 -0
  6. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/defs.py +12 -0
  7. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/etcd.py +30 -0
  8. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/events.py +228 -0
  9. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/exceptions.py +221 -0
  10. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/openapi.py +179 -0
  11. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/py.typed +1 -0
  12. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/templates/error.jinja2 +14 -0
  13. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/types.py +334 -0
  14. backend_ai_appproxy_common-25.15.3/ai/backend/appproxy/common/utils.py +352 -0
  15. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/PKG-INFO +44 -0
  16. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/SOURCES.txt +22 -0
  17. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/dependency_links.txt +1 -0
  18. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/namespace_packages.txt +1 -0
  19. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/not-zip-safe +1 -0
  20. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/requires.txt +11 -0
  21. backend_ai_appproxy_common-25.15.3/backend.ai_appproxy_common.egg-info/top_level.txt +1 -0
  22. backend_ai_appproxy_common-25.15.3/backend_shim.py +31 -0
  23. backend_ai_appproxy_common-25.15.3/setup.cfg +4 -0
  24. backend_ai_appproxy_common-25.15.3/setup.py +63 -0
@@ -0,0 +1 @@
1
+ include *.py
@@ -0,0 +1,44 @@
1
+ Metadata-Version: 2.4
2
+ Name: backend.ai-appproxy-common
3
+ Version: 25.15.3
4
+ Summary: Backend.AI AppProxy Common
5
+ Home-page: https://github.com/lablup/backend.ai
6
+ Author: Lablup Inc. and contributors
7
+ License: Proprietary
8
+ Project-URL: Documentation, https://docs.backend.ai/
9
+ Project-URL: Source, https://github.com/lablup/backend.ai
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: Operating System :: MacOS :: MacOS X
12
+ Classifier: Operating System :: POSIX :: Linux
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Environment :: No Input/Output (Daemon)
16
+ Classifier: Topic :: Scientific/Engineering
17
+ Classifier: Topic :: Software Development
18
+ Classifier: Development Status :: 5 - Production/Stable
19
+ Classifier: Programming Language :: Python :: 3.13
20
+ Requires-Python: >=3.13,<3.14
21
+ Description-Content-Type: text/markdown
22
+ Requires-Dist: PyYAML~=6.0
23
+ Requires-Dist: aiohttp_cors~=0.8.1
24
+ Requires-Dist: aiohttp~=3.12.15
25
+ Requires-Dist: backend.ai-common==25.15.3
26
+ Requires-Dist: backend.ai-logging==25.15.3
27
+ Requires-Dist: backend.ai-plugin==25.15.3
28
+ Requires-Dist: pydantic[email]~=2.11.3
29
+ Requires-Dist: pyhumps~=3.8.0
30
+ Requires-Dist: redis[hiredis]==4.5.5
31
+ Requires-Dist: types-PyYAML
32
+ Requires-Dist: types-redis
33
+ Dynamic: author
34
+ Dynamic: classifier
35
+ Dynamic: description
36
+ Dynamic: description-content-type
37
+ Dynamic: home-page
38
+ Dynamic: license
39
+ Dynamic: project-url
40
+ Dynamic: requires-dist
41
+ Dynamic: requires-python
42
+ Dynamic: summary
43
+
44
+ # appproxy-common
@@ -0,0 +1,3 @@
1
+ from pathlib import Path
2
+
3
+ __version__ = (Path(__file__).parent / "VERSION").read_text().strip()
@@ -0,0 +1,488 @@
1
+ import enum
2
+ import os
3
+ import pwd
4
+ import types
5
+ import typing
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+ from typing import Annotated, Any
9
+
10
+ from pydantic import (
11
+ BaseModel,
12
+ ByteSize,
13
+ ConfigDict,
14
+ Field,
15
+ GetCoreSchemaHandler,
16
+ GetJsonSchemaHandler,
17
+ )
18
+ from pydantic.json_schema import JsonSchemaValue
19
+ from pydantic_core import PydanticUndefined, core_schema
20
+
21
+ from .types import DigestModType
22
+
23
+ # FIXME: merge majority of common definitions to ai.backend.common when ready
24
+
25
+
26
+ class BaseSchema(BaseModel):
27
+ model_config = ConfigDict(
28
+ populate_by_name=True,
29
+ from_attributes=True,
30
+ use_enum_values=True,
31
+ )
32
+
33
+
34
+ class PermitHashConfig(BaseSchema):
35
+ secret: Annotated[
36
+ bytes,
37
+ Field(
38
+ description="Secret string used for creating permit hash.",
39
+ examples=["50M3G00DL00KING53CR3T"],
40
+ ),
41
+ ]
42
+ digest_mod: Annotated[
43
+ DigestModType, Field(description="Hash digest method to use.", default=DigestModType.SHA256)
44
+ ]
45
+
46
+
47
+ class HostPortPair(BaseSchema):
48
+ host: Annotated[str, Field(examples=["127.0.0.1"])]
49
+ port: Annotated[int, Field(gt=0, lt=65536, examples=[8201])]
50
+
51
+ @property
52
+ def host_set_with_protocol(self) -> bool:
53
+ for protocol in ("http://", "https://"):
54
+ if self.host.startswith(protocol):
55
+ return True
56
+ return False
57
+
58
+ def __repr__(self) -> str:
59
+ return f"{self.host}:{self.port}"
60
+
61
+ def __str__(self) -> str:
62
+ return self.__repr__()
63
+
64
+ def __getitem__(self, *args) -> int | str:
65
+ if args[0] == 0:
66
+ return self.host
67
+ elif args[0] == 1:
68
+ return self.port
69
+ else:
70
+ raise KeyError(*args)
71
+
72
+
73
+ class RedisHelperConfig(BaseSchema):
74
+ socket_timeout: Annotated[float, Field(default=5.0)]
75
+ socket_connect_timeout: Annotated[float, Field(default=2.0)]
76
+ reconnect_poll_timeout: Annotated[float, Field(default=0.3)]
77
+
78
+
79
+ class RedisConfig(BaseSchema):
80
+ addr: Annotated[
81
+ HostPortPair | None,
82
+ Field(
83
+ default=None,
84
+ description="Address and port number of redis server.",
85
+ examples=[HostPortPair(host="127.0.0.1", port=8111)],
86
+ ),
87
+ ]
88
+ sentinel: Annotated[
89
+ list[HostPortPair] | None,
90
+ Field(
91
+ default=None,
92
+ description="List of address/port pair of sentinel servers.",
93
+ examples=[
94
+ [
95
+ HostPortPair(host="127.0.0.1", port=9503),
96
+ HostPortPair(host="127.0.0.1", port=9504),
97
+ HostPortPair(host="127.0.0.1", port=9505),
98
+ ]
99
+ ],
100
+ ),
101
+ ]
102
+ service_name: Annotated[
103
+ str | None, Field(default=None, description="Redis service name.", examples=["bai-service"])
104
+ ]
105
+ password: Annotated[
106
+ str | None, Field(default=None, description="Redis password.", examples=["P@ssw0rd!"])
107
+ ]
108
+ redis_helper_config: Annotated[RedisHelperConfig, Field(default=RedisHelperConfig())]
109
+
110
+ def to_dict(self) -> dict[str, Any]:
111
+ base = self.model_dump()
112
+ if self.addr:
113
+ base["addr"] = f"{self.addr.host}:{self.addr.port}"
114
+ if self.sentinel:
115
+ base["sentinel"] = ",".join([f"{r.host}:{r.port}" for r in self.sentinel])
116
+ return base
117
+
118
+
119
+ @dataclass
120
+ class UserID:
121
+ default_uid: int | None = None
122
+
123
+ @classmethod
124
+ def uid_validator(
125
+ cls,
126
+ value: int | str | None,
127
+ ) -> int:
128
+ if value is None:
129
+ assert cls.default_uid, "value is None but default_uid not provided"
130
+ return cls.default_uid
131
+ assert isinstance(value, (int, str)), "value must be an integer"
132
+ match value:
133
+ case int():
134
+ if value == -1:
135
+ return os.getuid()
136
+ else:
137
+ return value
138
+ case str():
139
+ try:
140
+ _value = int(value)
141
+ if _value == -1:
142
+ return os.getuid()
143
+ else:
144
+ return _value
145
+ except ValueError:
146
+ try:
147
+ return pwd.getpwnam(value).pw_uid
148
+ except KeyError:
149
+ assert False, f"no such user {value} in system"
150
+
151
+ @classmethod
152
+ def __get_pydantic_core_schema__(
153
+ cls,
154
+ _source_type: Any,
155
+ _handler: GetCoreSchemaHandler,
156
+ ) -> core_schema.CoreSchema:
157
+ schema = core_schema.chain_schema([
158
+ core_schema.union_schema([
159
+ core_schema.int_schema(),
160
+ core_schema.str_schema(),
161
+ ]),
162
+ core_schema.no_info_plain_validator_function(cls.uid_validator),
163
+ ])
164
+
165
+ return core_schema.json_or_python_schema(
166
+ json_schema=schema,
167
+ python_schema=core_schema.union_schema([
168
+ # check if it's an instance first before doing any further work
169
+ core_schema.union_schema([
170
+ core_schema.is_instance_schema(int),
171
+ core_schema.is_instance_schema(str),
172
+ ]),
173
+ schema,
174
+ ]),
175
+ serialization=core_schema.plain_serializer_function_ser_schema(int),
176
+ )
177
+
178
+ @classmethod
179
+ def __get_pydantic_json_schema__(
180
+ cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
181
+ ) -> JsonSchemaValue:
182
+ # Use the same schema that would be used for `int`
183
+ return handler(
184
+ core_schema.union_schema([
185
+ core_schema.int_schema(),
186
+ core_schema.str_schema(),
187
+ ])
188
+ )
189
+
190
+
191
+ @dataclass
192
+ class GroupID:
193
+ default_gid: int | None = None
194
+
195
+ @classmethod
196
+ def uid_validator(
197
+ cls,
198
+ value: int | str | None,
199
+ ) -> int:
200
+ if value is None:
201
+ assert cls.default_gid, "value is None but default_gid not provided"
202
+ assert isinstance(value, (int, str)), "value must be an integer"
203
+ match value:
204
+ case int():
205
+ if value == -1:
206
+ return os.getgid()
207
+ else:
208
+ return value
209
+ case str():
210
+ try:
211
+ _value = int(value)
212
+ if _value == -1:
213
+ return os.getgid()
214
+ else:
215
+ return _value
216
+ except ValueError:
217
+ try:
218
+ return pwd.getpwnam(value).pw_gid
219
+ except KeyError:
220
+ assert False, f"no such user {value} in system"
221
+
222
+ @classmethod
223
+ def __get_pydantic_core_schema__(
224
+ cls,
225
+ _source_type: Any,
226
+ _handler: GetCoreSchemaHandler,
227
+ ) -> core_schema.CoreSchema:
228
+ schema = core_schema.chain_schema([
229
+ core_schema.union_schema([
230
+ core_schema.int_schema(),
231
+ core_schema.str_schema(),
232
+ ]),
233
+ core_schema.no_info_plain_validator_function(cls.uid_validator),
234
+ ])
235
+
236
+ return core_schema.json_or_python_schema(
237
+ json_schema=schema,
238
+ python_schema=core_schema.union_schema([
239
+ # check if it's an instance first before doing any further work
240
+ core_schema.union_schema([
241
+ core_schema.is_instance_schema(int),
242
+ core_schema.is_instance_schema(str),
243
+ ]),
244
+ schema,
245
+ ]),
246
+ serialization=core_schema.plain_serializer_function_ser_schema(int),
247
+ )
248
+
249
+ @classmethod
250
+ def __get_pydantic_json_schema__(
251
+ cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
252
+ ) -> JsonSchemaValue:
253
+ # Use the same schema that would be used for `int`
254
+ return handler(
255
+ core_schema.union_schema([
256
+ core_schema.int_schema(),
257
+ core_schema.str_schema(),
258
+ ])
259
+ )
260
+
261
+
262
+ class DebugConfig(BaseSchema):
263
+ enabled: Annotated[bool, Field(default=False)]
264
+ asyncio: Annotated[bool, Field(default=False)]
265
+ enhanced_aiomonitor_task_info: Annotated[bool, Field(default=False)]
266
+ log_events: Annotated[bool, Field(default=False)]
267
+ log_stats: Annotated[bool, Field(default=False)]
268
+
269
+
270
+ class SecretConfig(BaseSchema):
271
+ jwt_secret: Annotated[
272
+ str,
273
+ Field(
274
+ description="String used for creating JWT signature. Must be identical across every nodes across single AppProxy cluster.",
275
+ examples=["50M3V3RY53CR3T5TR1NG"],
276
+ ),
277
+ ]
278
+ api_secret: Annotated[
279
+ str,
280
+ Field(
281
+ description="API token used for validating requests from AppProxy worker and Backend.AI manager.",
282
+ examples=["50M3TRULY53CR3T5TR1NG"],
283
+ ),
284
+ ]
285
+
286
+
287
+ class LogLevel(enum.StrEnum):
288
+ DEBUG = "DEBUG"
289
+ INFO = "INFO"
290
+ WARNING = "WARNING"
291
+ ERROR = "ERROR"
292
+ CRITICAL = "CRITICAL"
293
+ NOTSET = "NOTSET"
294
+
295
+
296
+ class LogFormat(enum.StrEnum):
297
+ SIMPLE = "simple"
298
+ VERBOSE = "verbose"
299
+
300
+
301
+ class LogDriver(enum.StrEnum):
302
+ CONSOLE = "console"
303
+ LOGSTASH = "logstash"
304
+ FILE = "file"
305
+ GRAYLOG = "graylog"
306
+
307
+
308
+ class LogstashProtocol(enum.StrEnum):
309
+ ZMQ_PUSH = "zmq.push"
310
+ ZMQ_PUB = "zmq.pub"
311
+ TCP = "tcp"
312
+ UDP = "udp"
313
+
314
+
315
+ default_pkg_ns = {"": "WARNING", "ai.backend": "DEBUG", "tests": "DEBUG", "aiohttp": "INFO"}
316
+
317
+
318
+ class ConsoleLogConfig(BaseSchema):
319
+ colored: Annotated[
320
+ bool | None, Field(default=None, description="Opt to print colorized log.", examples=[True])
321
+ ]
322
+ format: Annotated[
323
+ LogFormat, Field(default=LogFormat.VERBOSE, description="Determine verbosity of log.")
324
+ ]
325
+
326
+
327
+ class FileLogConfig(BaseSchema):
328
+ path: Annotated[Path, Field(description="Path to store log.", examples=["/var/log/backend.ai"])]
329
+ filename: Annotated[str, Field(description="Log file name.", examples=["coordinator.log"])]
330
+ backup_count: Annotated[
331
+ int, Field(description="Number of outdated log files to retain.", default=5)
332
+ ]
333
+ rotation_size: Annotated[
334
+ ByteSize, Field(description="Maximum size for a single log file.", default="10M")
335
+ ]
336
+ format: Annotated[
337
+ LogFormat, Field(default=LogFormat.VERBOSE, description="Determine verbosity of log.")
338
+ ]
339
+
340
+
341
+ class LogstashConfig(BaseSchema):
342
+ endpoint: Annotated[
343
+ HostPortPair,
344
+ Field(
345
+ description="Connection information of logstash node.",
346
+ examples=[HostPortPair(host="127.0.0.1", port=8001)],
347
+ ),
348
+ ]
349
+ protocol: Annotated[
350
+ LogstashProtocol,
351
+ Field(
352
+ description="Protocol to communicate with logstash server.",
353
+ default=LogstashProtocol.TCP,
354
+ ),
355
+ ]
356
+ ssl_enabled: Annotated[
357
+ bool, Field(description="Use TLS to communicate with logstash server.", default=True)
358
+ ]
359
+ ssl_verify: Annotated[
360
+ bool,
361
+ Field(
362
+ description="Verify validity of TLS certificate when communicating with logstash.",
363
+ default=True,
364
+ ),
365
+ ]
366
+
367
+
368
+ class GraylogConfig(BaseSchema):
369
+ host: Annotated[str, Field(description="Graylog hostname.", examples=["127.0.0.1"])]
370
+ port: Annotated[int, Field(description="Graylog server port number.", examples=[8000])]
371
+ level: Annotated[LogLevel, Field(description="Log level.", default=LogLevel.INFO)]
372
+ ssl_verify: Annotated[
373
+ bool,
374
+ Field(
375
+ description="Verify validity of TLS certificate when communicating with logstash.",
376
+ default=True,
377
+ ),
378
+ ]
379
+ ca_certs: Annotated[
380
+ str | None,
381
+ Field(
382
+ description="Path to Root CA certificate file.",
383
+ examples=["/etc/ssl/ca.pem"],
384
+ default=None,
385
+ ),
386
+ ]
387
+ keyfile: Annotated[
388
+ str | None,
389
+ Field(
390
+ description="Path to TLS private key file.",
391
+ examples=["/etc/backend.ai/graylog/privkey.pem"],
392
+ default=None,
393
+ ),
394
+ ]
395
+ certfile: Annotated[
396
+ str | None,
397
+ Field(
398
+ description="Path to TLS certificate file.",
399
+ examples=["/etc/backend.ai/graylog/cert.pem"],
400
+ default=None,
401
+ ),
402
+ ]
403
+
404
+
405
+ class PyroscopeConfig(BaseSchema):
406
+ application_name: str | None = Field(
407
+ description="Pyroscope application name", default=None, examples=["proxy-worker-dev"]
408
+ )
409
+ server_address: str = Field(
410
+ description="Pyroscope server endpoint", examples=["http://localhost:4040"]
411
+ )
412
+ sample_rate: int = Field(default=100, description="Pyroscope sample rate")
413
+ detect_subprocesses: bool = Field(
414
+ default=True,
415
+ description="detect subprocesses started by the main process; default is False",
416
+ )
417
+ oncpu: bool = Field(default=True, description="report cpu time only; default is True")
418
+ gil_only: bool = Field(
419
+ default=True,
420
+ description="only include traces for threads that are holding on to the Global Interpreter Lock; default is True",
421
+ )
422
+ enable_logging: bool = Field(
423
+ default=True, description="does enable logging facility; default is False"
424
+ )
425
+ tags: dict[str, str] = Field(
426
+ default={}, description="Pyroscope tags", examples=[{"environment": "dev"}]
427
+ )
428
+
429
+
430
+ class ProfilingConfig(BaseSchema):
431
+ enable_memray: bool = Field(default=False, description="Starts a memray live server.")
432
+ memray_output_destination: Path = Field(
433
+ default=Path("./memray-output.bin"),
434
+ description="Path to store memray allocation captures.",
435
+ examples=["/home/bai/proxy-worker/profiles/memray/proxy-worker.bin"],
436
+ )
437
+ enable_pyroscope: bool = Field(
438
+ default=False, description="Allows sending pyroscope telemetry to pyroscope server."
439
+ )
440
+ pyroscope_config: PyroscopeConfig | None = Field(default=None)
441
+
442
+
443
+ class Undefined:
444
+ pass
445
+
446
+
447
+ class UnsupportedTypeError(RuntimeError):
448
+ pass
449
+
450
+
451
+ def generate_example_json(
452
+ schema: type[BaseModel] | types.GenericAlias, parent: list[str] = []
453
+ ) -> dict | list:
454
+ if isinstance(schema, types.UnionType):
455
+ return generate_example_json(typing.get_args(schema)[0], parent=[*parent])
456
+ elif isinstance(schema, types.GenericAlias):
457
+ if typing.get_origin(schema) is not list:
458
+ raise RuntimeError("GenericAlias other than list not supported!")
459
+ return [generate_example_json(typing.get_args(schema)[0], parent=[*parent])]
460
+ elif issubclass(schema, BaseModel):
461
+ res = {}
462
+ for name, info in schema.model_fields.items():
463
+ config_key = [*parent, name]
464
+ assert info.annotation
465
+ alternative_example = Undefined
466
+ if info.examples:
467
+ res[name] = info.examples[0]
468
+ elif info.default != PydanticUndefined:
469
+ alternative_example = info.default
470
+ if name not in res:
471
+ try:
472
+ res[name] = generate_example_json(info.annotation, parent=config_key)
473
+ except RuntimeError:
474
+ if alternative_example != Undefined and alternative_example is not None:
475
+ res[name] = alternative_example
476
+ else:
477
+ raise
478
+ return res
479
+ else:
480
+ raise UnsupportedTypeError(str(schema))
481
+
482
+
483
+ def get_default_redis_key_ttl() -> int:
484
+ """
485
+ Returns the default TTL for Redis keys.
486
+ This is used to set the expiration time for keys in Redis.
487
+ """
488
+ return 2 * 24 * 60 * 60 # 2 days in seconds
@@ -0,0 +1,12 @@
1
+ from typing import Final
2
+
3
+ from ai.backend.common.types import AgentId
4
+
5
+ APPPROXY_ANYCAST_STREAM_KEY: Final[str] = "events-appproxy"
6
+ APPPROXY_BROADCAST_CHANNEL: Final[str] = "events_all-appproxy"
7
+
8
+
9
+ PERMIT_COOKIE_NAME: Final[str] = "appproxy_permit"
10
+
11
+ AGENTID_COORDINATOR = AgentId("appproxy-coordinator")
12
+ AGENTID_WORKER = AgentId("appproxy-worker")
@@ -0,0 +1,30 @@
1
+ from typing import Any
2
+
3
+ from ai.backend.common.etcd import AsyncEtcd
4
+
5
+
6
+ class TraefikEtcd(AsyncEtcd):
7
+ def _mangle_key(self, k: str) -> str:
8
+ if k.startswith("/"):
9
+ k = k[1:]
10
+ return f"{self.ns}/{k}"
11
+
12
+ def _demangle_key(self, k: bytes | str) -> str:
13
+ if isinstance(k, bytes):
14
+ k = k.decode(self.encoding)
15
+ prefix = f"{self.ns}/"
16
+ if k.startswith(prefix):
17
+ k = k[len(prefix) :]
18
+ return k
19
+
20
+
21
+ def convert_to_etcd_dict(item: Any) -> dict:
22
+ def _convert(obj: Any) -> Any:
23
+ if isinstance(obj, list):
24
+ return {str(idx): item for idx, item in enumerate(obj)}
25
+ elif isinstance(obj, dict):
26
+ return {k: _convert(v) for k, v in obj.items()}
27
+ else:
28
+ return obj
29
+
30
+ return _convert(item)