pyapi-service-kit 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyapi_service_kit-0.0.1/LICENSE +21 -0
- pyapi_service_kit-0.0.1/PKG-INFO +25 -0
- pyapi_service_kit-0.0.1/README.md +3 -0
- pyapi_service_kit-0.0.1/pyproject.toml +34 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/__init__.py +0 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/db/__init__.py +0 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/db/generic.py +51 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/db/testing.py +49 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/logging.yaml +20 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/__init__.py +0 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/core.py +44 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/jetstream.py +28 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/kv.py +28 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/nats_payload.py +48 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/nats/tasks.py +142 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/__init__.py +0 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/guid.py +18 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/initalisation.py +68 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/nested_enum.py +22 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/templated_enum.py +118 -0
- pyapi_service_kit-0.0.1/src/pyapi_service_kit/utils/time.py +22 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Jayshan Raghunandan
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: pyapi-service-kit
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: some utiltiies used by backend python services
|
|
5
|
+
License: LICENSE
|
|
6
|
+
Author: jr200
|
|
7
|
+
Author-email: jayshan+git@gmail.com
|
|
8
|
+
Requires-Python: >=3.12,<4.0
|
|
9
|
+
Classifier: License :: Other/Proprietary License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Requires-Dist: nats-py (>=2.10.0,<3.0.0)
|
|
14
|
+
Requires-Dist: nkeys (>=0.2.1,<0.3.0)
|
|
15
|
+
Requires-Dist: polars (>=1.30.0,<2.0.0)
|
|
16
|
+
Requires-Dist: polars-hist-db (>=0.8.0)
|
|
17
|
+
Requires-Dist: python-dateutil (>=2.9.0.post0,<3.0.0)
|
|
18
|
+
Requires-Dist: pytz (>=2025.2,<2026.0)
|
|
19
|
+
Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
|
|
22
|
+
# pyapi-service-kit (wip)
|
|
23
|
+
|
|
24
|
+
Collection of functions, types, utiltiies used by backend python services.
|
|
25
|
+
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "pyapi-service-kit"
|
|
3
|
+
version = "0.0.1"
|
|
4
|
+
description = "some utiltiies used by backend python services"
|
|
5
|
+
license = "LICENSE"
|
|
6
|
+
authors = [
|
|
7
|
+
{name = "jr200", email = "jayshan+git@gmail.com"}
|
|
8
|
+
]
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.12,<4.0"
|
|
11
|
+
|
|
12
|
+
dependencies = [
|
|
13
|
+
"nats-py (>=2.10.0,<3.0.0)",
|
|
14
|
+
"nkeys (>=0.2.1,<0.3.0)",
|
|
15
|
+
"polars (>=1.30.0,<2.0.0)",
|
|
16
|
+
"pyyaml (>=6.0.2,<7.0.0)",
|
|
17
|
+
"python-dateutil (>=2.9.0.post0,<3.0.0)",
|
|
18
|
+
"pytz (>=2025.2,<2026.0)",
|
|
19
|
+
"polars-hist-db (>=0.8.0)",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[tool.poetry]
|
|
23
|
+
packages = [{include = "pyapi_service_kit", from = "src"}]
|
|
24
|
+
|
|
25
|
+
[tool.poetry.group.dev.dependencies]
|
|
26
|
+
ruff = "^0.11.13"
|
|
27
|
+
mypy = "^1.16.0"
|
|
28
|
+
types-pyyaml = "^6.0.12.20250516"
|
|
29
|
+
types-pytz = "^2025.2.0.20250516"
|
|
30
|
+
types-python-dateutil = "^2.9.0.20250809"
|
|
31
|
+
|
|
32
|
+
[build-system]
|
|
33
|
+
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
|
34
|
+
build-backend = "poetry.core.masonry.api"
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Awaitable, Callable
|
|
4
|
+
|
|
5
|
+
from polars import DataFrame
|
|
6
|
+
from polars_hist_db.core import TimeHint
|
|
7
|
+
from sqlalchemy.engine import Connection
|
|
8
|
+
|
|
9
|
+
from nats.aio.msg import Msg
|
|
10
|
+
|
|
11
|
+
from ..nats.nats_payload import NatsPayload
|
|
12
|
+
from ..utils.time import parse_zoned_iso
|
|
13
|
+
|
|
14
|
+
LOGGER = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
QueryFn = Callable[[Connection, TimeHint], Awaitable[DataFrame]]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def run_query_asof(
|
|
20
|
+
msg: Msg,
|
|
21
|
+
query_fn: QueryFn,
|
|
22
|
+
connection: Connection,
|
|
23
|
+
) -> NatsPayload:
|
|
24
|
+
try:
|
|
25
|
+
payload = json.loads(msg.data)
|
|
26
|
+
asof_utc = parse_zoned_iso(payload["data"]["asof_utc"])
|
|
27
|
+
LOGGER.info(f"Running query for {msg.subject} asof {asof_utc.isoformat()}")
|
|
28
|
+
time_hint = TimeHint(mode="asof", asof_utc=asof_utc)
|
|
29
|
+
return await _run_query_with_time_hint(time_hint, query_fn, connection)
|
|
30
|
+
except Exception as e:
|
|
31
|
+
return NatsPayload(type="error", data=str(e))
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
async def run_query_simple(
|
|
35
|
+
msg: Msg,
|
|
36
|
+
query_fn: QueryFn,
|
|
37
|
+
connection: Connection,
|
|
38
|
+
) -> NatsPayload:
|
|
39
|
+
LOGGER.info(f"Running query for {msg.subject}")
|
|
40
|
+
|
|
41
|
+
time_hint = TimeHint(mode="none")
|
|
42
|
+
return await _run_query_with_time_hint(time_hint, query_fn, connection)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
async def _run_query_with_time_hint(
|
|
46
|
+
time_hint: TimeHint,
|
|
47
|
+
query_fn: QueryFn,
|
|
48
|
+
connection: Connection,
|
|
49
|
+
) -> NatsPayload:
|
|
50
|
+
df = await query_fn(connection, time_hint)
|
|
51
|
+
return NatsPayload(type="ipc", data=df)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import json
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from dateutil.relativedelta import relativedelta
|
|
5
|
+
|
|
6
|
+
from nats.aio.client import Client as NATS
|
|
7
|
+
from nats.aio.msg import Msg
|
|
8
|
+
import polars as pl
|
|
9
|
+
|
|
10
|
+
from ..nats.nats_payload import NatsPayload
|
|
11
|
+
|
|
12
|
+
LOGGER = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async def simulate_db_update(
|
|
16
|
+
nc: NATS,
|
|
17
|
+
publish_subject: str,
|
|
18
|
+
counter: int,
|
|
19
|
+
initial_time: datetime,
|
|
20
|
+
time_increment: relativedelta,
|
|
21
|
+
):
|
|
22
|
+
parts = publish_subject.split(".")
|
|
23
|
+
fqtn = ".".join(parts[-2:])
|
|
24
|
+
|
|
25
|
+
ts = initial_time + time_increment * counter
|
|
26
|
+
|
|
27
|
+
response = NatsPayload(
|
|
28
|
+
type="json", data={"action": "update", "asof_utc": ts.isoformat()}
|
|
29
|
+
)
|
|
30
|
+
LOGGER.info(
|
|
31
|
+
f"Simulating DB table update[{counter}] for {fqtn} at {response.data['asof_utc']}"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
await nc.publish(publish_subject, response.as_bytes())
|
|
35
|
+
LOGGER.info(f"Published DB table update for {fqtn}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
async def simulate_request_reply_json(msg: Msg) -> NatsPayload:
|
|
39
|
+
request_data = json.loads(msg.data)
|
|
40
|
+
return NatsPayload(type="json", data={"msg": f"Hello '{request_data['data']}'"})
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
async def simulate_request_reply_ipc(msg: Msg) -> NatsPayload:
|
|
44
|
+
request_data = json.loads(msg.data)
|
|
45
|
+
|
|
46
|
+
n = 10000
|
|
47
|
+
data = pl.DataFrame({"a": list(range(n)), "b": [request_data["data"]] * n})
|
|
48
|
+
|
|
49
|
+
return NatsPayload(type="ipc", data=data)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
version: 1
|
|
2
|
+
disable_existing_loggers: true
|
|
3
|
+
formatters:
|
|
4
|
+
simple:
|
|
5
|
+
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
6
|
+
|
|
7
|
+
handlers:
|
|
8
|
+
console:
|
|
9
|
+
class: logging.StreamHandler
|
|
10
|
+
formatter: simple
|
|
11
|
+
stream: ext://sys.stdout
|
|
12
|
+
|
|
13
|
+
loggers:
|
|
14
|
+
pyapi_service_kit:
|
|
15
|
+
level: DEBUG
|
|
16
|
+
|
|
17
|
+
root:
|
|
18
|
+
level: DEBUG
|
|
19
|
+
handlers:
|
|
20
|
+
- console
|
|
File without changes
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from nats.aio.client import Client as NATS
|
|
3
|
+
from nats.js.client import JetStreamContext
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict
|
|
6
|
+
|
|
7
|
+
LOGGER = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def make_nats_client(
|
|
11
|
+
nats_config: Dict[str, Any],
|
|
12
|
+
) -> tuple[NATS, JetStreamContext]:
|
|
13
|
+
nc = NATS()
|
|
14
|
+
|
|
15
|
+
async def error_cb(e):
|
|
16
|
+
LOGGER.error("Error:", e)
|
|
17
|
+
|
|
18
|
+
async def disconnected_cb():
|
|
19
|
+
LOGGER.warning("Got disconnected!")
|
|
20
|
+
|
|
21
|
+
async def reconnected_cb():
|
|
22
|
+
if nc.connected_url:
|
|
23
|
+
LOGGER.info("Got reconnected to {url}".format(url=nc.connected_url.netloc))
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
await nc.connect(
|
|
27
|
+
servers=nats_config["servers"],
|
|
28
|
+
disconnected_cb=disconnected_cb,
|
|
29
|
+
error_cb=error_cb,
|
|
30
|
+
reconnected_cb=reconnected_cb,
|
|
31
|
+
**nats_config["options"],
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
if nc.connected_url:
|
|
35
|
+
LOGGER.info("Connected to NATS at %s" % nc.connected_url.netloc)
|
|
36
|
+
else:
|
|
37
|
+
LOGGER.error("Connected to NATS but URL information not available")
|
|
38
|
+
|
|
39
|
+
js = nc.jetstream()
|
|
40
|
+
|
|
41
|
+
return nc, js
|
|
42
|
+
except Exception as e:
|
|
43
|
+
LOGGER.error(f"Failed to connect to NATS: {e}")
|
|
44
|
+
raise
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, Mapping
|
|
3
|
+
|
|
4
|
+
from nats.js.client import JetStreamContext
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
LOGGER = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def try_delete_stream(js: JetStreamContext, stream_name: str):
|
|
11
|
+
try:
|
|
12
|
+
await js.delete_stream(stream_name)
|
|
13
|
+
return True
|
|
14
|
+
except Exception:
|
|
15
|
+
return False
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def create_jetstream_streams(js: JetStreamContext, config: Mapping[str, Any]):
|
|
19
|
+
for stream_name, stream_config in config.items():
|
|
20
|
+
try:
|
|
21
|
+
if stream_config.get("recreate_if_exists", False):
|
|
22
|
+
await try_delete_stream(js, stream_name)
|
|
23
|
+
|
|
24
|
+
await js.add_stream(name=stream_name, **stream_config["options"])
|
|
25
|
+
LOGGER.info(f"JetStream stream {stream_name} created")
|
|
26
|
+
except Exception as e:
|
|
27
|
+
LOGGER.error(f"Error creating JetStream stream {stream_name}: {e}")
|
|
28
|
+
raise
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, Mapping
|
|
3
|
+
|
|
4
|
+
from nats.js.client import JetStreamContext
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
LOGGER = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def _try_delete_key_value_bucket(js: JetStreamContext, bucket_name: str):
|
|
11
|
+
try:
|
|
12
|
+
await js.delete_key_value(bucket_name)
|
|
13
|
+
return True
|
|
14
|
+
except Exception:
|
|
15
|
+
return False
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def create_nats_key_value_bucket(js: JetStreamContext, config: Mapping[str, Any]):
|
|
19
|
+
for kv_bucket_name, kv_config in config.items():
|
|
20
|
+
try:
|
|
21
|
+
if kv_config.get("recreate_if_exists", False):
|
|
22
|
+
await _try_delete_key_value_bucket(js, kv_bucket_name)
|
|
23
|
+
|
|
24
|
+
await js.create_key_value(bucket=kv_bucket_name, **kv_config["options"])
|
|
25
|
+
LOGGER.info(f"KV bucket {kv_bucket_name} created")
|
|
26
|
+
except Exception as e:
|
|
27
|
+
LOGGER.error(f"Error creating KV bucket {kv_bucket_name}: {e}")
|
|
28
|
+
raise
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any, Literal
|
|
6
|
+
|
|
7
|
+
from polars_hist_db.utils import to_ipc_b64
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class NatsPayload:
|
|
12
|
+
type: Literal["json", "ipc", "epoch_ms", "error"]
|
|
13
|
+
data: Any
|
|
14
|
+
|
|
15
|
+
def as_bytes(self) -> bytes:
|
|
16
|
+
encodable_result: str | int | bytes
|
|
17
|
+
if self.type == "ipc":
|
|
18
|
+
encodable_result = to_ipc_b64(self.data, "zlib").decode()
|
|
19
|
+
elif self.type == "epoch_ms":
|
|
20
|
+
if isinstance(self.data, datetime):
|
|
21
|
+
encodable_result = int(self.data.timestamp() * 1000)
|
|
22
|
+
else:
|
|
23
|
+
assert isinstance(self.data, int), (
|
|
24
|
+
"Data must be a integer object (milliseconds since epoch)"
|
|
25
|
+
)
|
|
26
|
+
encodable_result = self.data
|
|
27
|
+
else:
|
|
28
|
+
# its a json-encodable object
|
|
29
|
+
encodable_result = self.data
|
|
30
|
+
|
|
31
|
+
json_result = json.dumps(
|
|
32
|
+
{
|
|
33
|
+
"data": encodable_result,
|
|
34
|
+
"type": self.type,
|
|
35
|
+
}
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
result = json_result.encode("utf-8")
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def __str__(self) -> str:
|
|
42
|
+
match self.type:
|
|
43
|
+
case "json":
|
|
44
|
+
return f"Response(type={self.type}, len={len(self.data)})"
|
|
45
|
+
case "ipc":
|
|
46
|
+
return f"Response(type={self.type}, rowcount={len(self.data)})"
|
|
47
|
+
case _:
|
|
48
|
+
return f"Response(type={self.type})"
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any, Callable, Awaitable
|
|
5
|
+
import pytz
|
|
6
|
+
|
|
7
|
+
from nats.aio.client import Client as NATS
|
|
8
|
+
|
|
9
|
+
from .nats_payload import NatsPayload
|
|
10
|
+
|
|
11
|
+
LOGGER = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
async def check_health_task(nc: NATS, _subject: str, counter: int):
|
|
15
|
+
if not nc.is_connected:
|
|
16
|
+
LOGGER.warning("NATS connection lost, attempting to reconnect...")
|
|
17
|
+
else:
|
|
18
|
+
if counter % 10 == 0:
|
|
19
|
+
LOGGER.info("NATS connection healthy")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def subscribe_task(
|
|
23
|
+
nc, listen_subject: str, cb: Callable[..., Awaitable[NatsPayload]]
|
|
24
|
+
):
|
|
25
|
+
async def subscription_wrapper():
|
|
26
|
+
try:
|
|
27
|
+
await nc.subscribe(listen_subject, cb=cb)
|
|
28
|
+
# Keep the task running until cancelled
|
|
29
|
+
while True:
|
|
30
|
+
await asyncio.sleep(3600) # Sleep for a long time
|
|
31
|
+
except Exception as e:
|
|
32
|
+
LOGGER.error(f"Error in subscription {listen_subject}: {e}")
|
|
33
|
+
raise
|
|
34
|
+
|
|
35
|
+
return asyncio.create_task(subscription_wrapper())
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
async def request_reply_task(
|
|
39
|
+
nc: NATS, api_subject: str, cb: Callable[..., Awaitable[Any]]
|
|
40
|
+
):
|
|
41
|
+
async def request_response_wrapper():
|
|
42
|
+
try:
|
|
43
|
+
LOGGER.info(f"Subscribing to {api_subject}")
|
|
44
|
+
sub = await nc.subscribe(api_subject)
|
|
45
|
+
async for msg in sub.messages:
|
|
46
|
+
msg.headers = msg.headers or dict()
|
|
47
|
+
msg.headers["received_ts"] = datetime.now(
|
|
48
|
+
pytz.timezone("UTC")
|
|
49
|
+
).isoformat()
|
|
50
|
+
response = await cb(msg)
|
|
51
|
+
msg.headers["processed_ts"] = datetime.now(
|
|
52
|
+
pytz.timezone("UTC")
|
|
53
|
+
).isoformat()
|
|
54
|
+
await msg.respond(response.as_bytes())
|
|
55
|
+
|
|
56
|
+
except Exception as e:
|
|
57
|
+
LOGGER.error(f"Error in request_reply task {api_subject}: {e}")
|
|
58
|
+
raise
|
|
59
|
+
|
|
60
|
+
return asyncio.create_task(request_response_wrapper())
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
async def periodic_publisher_task(
|
|
64
|
+
nc: NATS,
|
|
65
|
+
publish_subject: str,
|
|
66
|
+
timeout: timedelta,
|
|
67
|
+
cb: Callable[..., Awaitable[NatsPayload]],
|
|
68
|
+
*args: Any,
|
|
69
|
+
**kwargs: Any,
|
|
70
|
+
) -> asyncio.Task:
|
|
71
|
+
async def periodic_wrapper():
|
|
72
|
+
counter = 0
|
|
73
|
+
while True:
|
|
74
|
+
try:
|
|
75
|
+
await cb(nc, publish_subject, counter, *args, **kwargs)
|
|
76
|
+
counter += 1
|
|
77
|
+
except Exception as e:
|
|
78
|
+
LOGGER.error(f"Error in periodic task {cb.__name__}: {e}")
|
|
79
|
+
await asyncio.sleep(timeout.total_seconds())
|
|
80
|
+
|
|
81
|
+
return asyncio.create_task(periodic_wrapper())
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
async def triggered_js_publish_task(
|
|
85
|
+
nc: NATS,
|
|
86
|
+
listen_subject: str,
|
|
87
|
+
publish_subject: str,
|
|
88
|
+
cb: Callable[..., Awaitable[NatsPayload]],
|
|
89
|
+
) -> asyncio.Task:
|
|
90
|
+
js = nc.jetstream()
|
|
91
|
+
|
|
92
|
+
async def message_handler(msg):
|
|
93
|
+
try:
|
|
94
|
+
result = await cb(msg)
|
|
95
|
+
# Publish to JetStream instead of regular NATS
|
|
96
|
+
await js.publish(publish_subject, result.as_bytes())
|
|
97
|
+
LOGGER.info(f"Published message to JetStream stream {publish_subject}")
|
|
98
|
+
except Exception as e:
|
|
99
|
+
LOGGER.error(f"Error handling message on {listen_subject}", exc_info=e)
|
|
100
|
+
|
|
101
|
+
return await subscribe_task(nc, listen_subject, message_handler)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
async def triggered_kv_put_task(
|
|
105
|
+
nc: NATS,
|
|
106
|
+
listen_subject: str,
|
|
107
|
+
kv_bucket: str,
|
|
108
|
+
key: str,
|
|
109
|
+
cb: Callable[..., Awaitable[NatsPayload]],
|
|
110
|
+
) -> asyncio.Task:
|
|
111
|
+
js = nc.jetstream()
|
|
112
|
+
|
|
113
|
+
async def message_handler(msg):
|
|
114
|
+
try:
|
|
115
|
+
result = await cb(msg)
|
|
116
|
+
kv = await js.key_value(kv_bucket)
|
|
117
|
+
await kv.put(key, result.as_bytes())
|
|
118
|
+
LOGGER.info(f"Updated key-value {kv_bucket}[{key}] {str(result)}")
|
|
119
|
+
except Exception as e:
|
|
120
|
+
LOGGER.error(f"Error handling message on {listen_subject}", exc_info=e)
|
|
121
|
+
|
|
122
|
+
return await subscribe_task(nc, listen_subject, message_handler)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
async def once_kv_put_task(
|
|
126
|
+
nc: NATS,
|
|
127
|
+
kv_bucket: str,
|
|
128
|
+
key: str,
|
|
129
|
+
cb: Callable[..., Awaitable[NatsPayload]],
|
|
130
|
+
) -> asyncio.Task:
|
|
131
|
+
js = nc.jetstream()
|
|
132
|
+
|
|
133
|
+
async def once_task():
|
|
134
|
+
try:
|
|
135
|
+
result = await cb()
|
|
136
|
+
kv = await js.key_value(kv_bucket)
|
|
137
|
+
await kv.put(key, result.as_bytes())
|
|
138
|
+
LOGGER.info(f"Updated key-value {kv_bucket}[{key}]")
|
|
139
|
+
except Exception as e:
|
|
140
|
+
LOGGER.error(f"Error handling message on {kv_bucket}[{key}]", exc_info=e)
|
|
141
|
+
|
|
142
|
+
return await once_task()
|
|
File without changes
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def validate_guid(guid: str) -> str:
|
|
5
|
+
if not guid or not isinstance(guid, str):
|
|
6
|
+
raise ValueError("GUID must be a non-empty string")
|
|
7
|
+
|
|
8
|
+
guid = guid.strip()
|
|
9
|
+
if not guid:
|
|
10
|
+
raise ValueError("GUID cannot be empty or whitespace")
|
|
11
|
+
|
|
12
|
+
if not re.match(r"^[a-zA-Z0-9_-]+$", guid):
|
|
13
|
+
raise ValueError(
|
|
14
|
+
f"GUID contains invalid characters: {guid}. "
|
|
15
|
+
"Only alphanumeric characters, dots, hyphens, and underscores are allowed."
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
return guid
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import asyncio
|
|
3
|
+
from importlib.resources import files
|
|
4
|
+
import logging
|
|
5
|
+
import logging.config
|
|
6
|
+
import signal
|
|
7
|
+
import sys
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
ROOT_DIR = ".."
|
|
11
|
+
DEFAULT_CONFIG_FILE = str(files(__package__).joinpath(ROOT_DIR, "config.yaml"))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def parse_args() -> argparse.Namespace:
|
|
15
|
+
argv = sys.argv[1:]
|
|
16
|
+
parser = argparse.ArgumentParser(add_help=True)
|
|
17
|
+
|
|
18
|
+
parser.add_argument(
|
|
19
|
+
"-f", "--config", help="Path to config file", default=None, dest="CONFIG_FILE"
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
parser.add_argument(
|
|
23
|
+
"-l",
|
|
24
|
+
"--log-config",
|
|
25
|
+
help="Path to logger config",
|
|
26
|
+
default=str(files(__package__).joinpath(ROOT_DIR, "logging.yaml")),
|
|
27
|
+
action="store",
|
|
28
|
+
dest="CONFIG_LOG",
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
args = parser.parse_args(argv)
|
|
32
|
+
|
|
33
|
+
if not args.CONFIG_FILE:
|
|
34
|
+
args.CONFIG_FILE = DEFAULT_CONFIG_FILE
|
|
35
|
+
|
|
36
|
+
return args
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def initialise_logging(filename: str) -> None:
|
|
40
|
+
try:
|
|
41
|
+
with open(filename, "r") as file:
|
|
42
|
+
logging_dict = yaml.safe_load(file)
|
|
43
|
+
logging.config.dictConfig(logging_dict)
|
|
44
|
+
except FileNotFoundError:
|
|
45
|
+
print(f"Logger config file not found at {filename}")
|
|
46
|
+
except Exception as e:
|
|
47
|
+
logger = logging.getLogger("UNCAUGHT_EXCEPTION")
|
|
48
|
+
logger.fatal("", exc_info=e)
|
|
49
|
+
|
|
50
|
+
# Set up exception hook
|
|
51
|
+
def handle_exception(exc_type, exc_value, exc_traceback):
|
|
52
|
+
if issubclass(exc_type, KeyboardInterrupt):
|
|
53
|
+
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
logger = logging.getLogger("UNCAUGHT_EXCEPTION")
|
|
57
|
+
logger.fatal("", exc_info=(exc_type, exc_value, exc_traceback))
|
|
58
|
+
|
|
59
|
+
sys.excepthook = handle_exception
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def create_stop_event():
|
|
63
|
+
stop = asyncio.Future()
|
|
64
|
+
|
|
65
|
+
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
66
|
+
asyncio.get_event_loop().add_signal_handler(sig, lambda: stop.set_result(None))
|
|
67
|
+
|
|
68
|
+
return stop
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from enum import Enum, EnumMeta
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class NestedEnum(Enum):
|
|
5
|
+
def __new__(cls, *args):
|
|
6
|
+
obj = object.__new__(cls)
|
|
7
|
+
value = None
|
|
8
|
+
if len(args) == 1:
|
|
9
|
+
value = args[0]
|
|
10
|
+
|
|
11
|
+
if len(args) == 2:
|
|
12
|
+
value = args[0]
|
|
13
|
+
|
|
14
|
+
if value:
|
|
15
|
+
obj._value_ = value
|
|
16
|
+
|
|
17
|
+
return obj
|
|
18
|
+
|
|
19
|
+
def __init__(self, items_or_value):
|
|
20
|
+
if isinstance(items_or_value, EnumMeta):
|
|
21
|
+
for enm in items_or_value:
|
|
22
|
+
self.__setattr__(enm.name, enm)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any, Callable
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class TemplatedEnum(Enum):
|
|
6
|
+
def __getattribute__(self, name: str) -> Any:
|
|
7
|
+
if name == "value":
|
|
8
|
+
raise AttributeError(
|
|
9
|
+
f"Direct access to '{name}' is not allowed. "
|
|
10
|
+
f"Use '{self.name}.resolved' to get the resolved subject or '{self.name}.template' for the template."
|
|
11
|
+
)
|
|
12
|
+
return super().__getattribute__(name)
|
|
13
|
+
|
|
14
|
+
@classmethod
|
|
15
|
+
def __getattr__(cls, name: str) -> Any:
|
|
16
|
+
if name in cls.__members__:
|
|
17
|
+
raise AttributeError(
|
|
18
|
+
f"Direct access to enum member '{name}' is not allowed. "
|
|
19
|
+
f"Use '{name}.resolved' or '{name}.template' to access the subject."
|
|
20
|
+
)
|
|
21
|
+
raise AttributeError(f"'{cls.__name__}' has no attribute '{name}'")
|
|
22
|
+
|
|
23
|
+
def __init__(self, *args: Any) -> None:
|
|
24
|
+
# Enum calls __init__ with (cls, *args) where args[0] is the value
|
|
25
|
+
template = args[0] if args else ""
|
|
26
|
+
if not isinstance(template, str):
|
|
27
|
+
raise TypeError(f"Template must be a string, got {type(template)}")
|
|
28
|
+
if not template.strip():
|
|
29
|
+
raise ValueError("Template cannot be empty or whitespace")
|
|
30
|
+
self._template: str = template
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def set_resolver(cls, resolver: Callable[[str], Any]) -> None:
|
|
34
|
+
if not callable(resolver):
|
|
35
|
+
raise TypeError("Resolver must be callable")
|
|
36
|
+
|
|
37
|
+
setattr(cls, "_resolver", resolver)
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def remove_resolver(cls) -> None:
|
|
41
|
+
"""Remove the resolver function from the class."""
|
|
42
|
+
if hasattr(cls, "_resolver"):
|
|
43
|
+
delattr(cls, "_resolver")
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def template(self) -> str:
|
|
47
|
+
return self._template
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def resolved(self) -> Any:
|
|
51
|
+
if not hasattr(self.__class__, "_resolver"):
|
|
52
|
+
raise RuntimeError(
|
|
53
|
+
f"Resolver not set for {self.__class__.__name__}. Call set_resolver() first."
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
result = self.__class__._resolver(self._template)
|
|
57
|
+
return result
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def raw_value(self) -> str:
|
|
61
|
+
"""Access the raw enum value for debugging purposes."""
|
|
62
|
+
return self._value_
|
|
63
|
+
|
|
64
|
+
def __str__(self) -> str:
|
|
65
|
+
try:
|
|
66
|
+
resolved_value = self.resolved
|
|
67
|
+
return str(resolved_value)
|
|
68
|
+
except Exception as e:
|
|
69
|
+
return f"{self.__class__.__name__}.{self.name}[ERROR: {e}]"
|
|
70
|
+
|
|
71
|
+
def __repr__(self) -> str:
|
|
72
|
+
return f"{self.__class__.__name__}.{self.name}"
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class NestedTemplatedEnum(TemplatedEnum):
|
|
76
|
+
def __init__(self, *args: Any) -> None:
|
|
77
|
+
super().__init__(*args)
|
|
78
|
+
# Store the nested enum class as a class attribute
|
|
79
|
+
self._nested_enum = None
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def set_nested_enum(cls, nested_enum_class):
|
|
83
|
+
"""Set the nested enum class for this templated enum."""
|
|
84
|
+
cls._nested_enum = nested_enum_class
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def resolved(self) -> Any:
|
|
88
|
+
if not hasattr(self.__class__, "_resolver"):
|
|
89
|
+
raise RuntimeError(
|
|
90
|
+
f"Resolver not set for {self.__class__.__name__}. Call set_resolver() first."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# Get the resolved base key from the resolver
|
|
94
|
+
resolved_base = self.__class__._resolver(self._template)
|
|
95
|
+
|
|
96
|
+
# Return a simple object that combines the base key with enum values
|
|
97
|
+
class ResolvedNested:
|
|
98
|
+
def __init__(self, base_key: str, enum_class):
|
|
99
|
+
self.base_key = base_key
|
|
100
|
+
self.enum_class = enum_class
|
|
101
|
+
|
|
102
|
+
def __getattr__(self, name: str):
|
|
103
|
+
if hasattr(self.enum_class, name):
|
|
104
|
+
enum_value = getattr(self.enum_class, name)
|
|
105
|
+
return enum_value
|
|
106
|
+
if name == "value":
|
|
107
|
+
return self.base_key
|
|
108
|
+
raise AttributeError(
|
|
109
|
+
f"'{self.enum_class.__name__}' has no attribute '{name}'"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def __str__(self) -> str:
|
|
113
|
+
return self.base_key
|
|
114
|
+
|
|
115
|
+
def __repr__(self) -> str:
|
|
116
|
+
return f"ResolvedNested(base_key='{self.base_key}', enum_class={self.enum_class.__name__})"
|
|
117
|
+
|
|
118
|
+
return ResolvedNested(resolved_base, self.__class__._nested_enum)
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from dateutil.parser import isoparse
|
|
3
|
+
import pytz
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def parse_zoned_iso(s: str) -> datetime:
|
|
7
|
+
if "[" in s and s.endswith("]"):
|
|
8
|
+
base, zone = s.split("[")
|
|
9
|
+
zone = zone.strip("]")
|
|
10
|
+
else:
|
|
11
|
+
base = s
|
|
12
|
+
zone = None
|
|
13
|
+
|
|
14
|
+
dt = isoparse(base)
|
|
15
|
+
if zone:
|
|
16
|
+
try:
|
|
17
|
+
tz = pytz.timezone(zone)
|
|
18
|
+
dt = dt.astimezone(tz)
|
|
19
|
+
except Exception:
|
|
20
|
+
# Fallback: use the parsed offset
|
|
21
|
+
pass
|
|
22
|
+
return dt
|