channel-app 0.0.157a1__py3-none-any.whl → 0.0.157a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- channel_app/logs/encoders.py +22 -0
- channel_app/logs/services.py +74 -22
- {channel_app-0.0.157a1.dist-info → channel_app-0.0.157a2.dist-info}/METADATA +1 -1
- {channel_app-0.0.157a1.dist-info → channel_app-0.0.157a2.dist-info}/RECORD +6 -5
- {channel_app-0.0.157a1.dist-info → channel_app-0.0.157a2.dist-info}/WHEEL +0 -0
- {channel_app-0.0.157a1.dist-info → channel_app-0.0.157a2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,22 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
import json
|
3
|
+
import uuid
|
4
|
+
|
5
|
+
|
6
|
+
class UUIDEncoder(json.JSONEncoder):
|
7
|
+
"""
|
8
|
+
Custom JSON encoder that handles UUID and datetime objects.
|
9
|
+
|
10
|
+
This encoder extends the standard JSONEncoder to properly serialize:
|
11
|
+
- UUID objects (converted to strings)
|
12
|
+
- datetime objects (converted to ISO format)
|
13
|
+
"""
|
14
|
+
def default(self, obj):
|
15
|
+
if isinstance(obj, uuid.UUID):
|
16
|
+
# Convert UUID to string
|
17
|
+
return str(obj)
|
18
|
+
elif isinstance(obj, datetime):
|
19
|
+
# Convert datetime to ISO format string
|
20
|
+
return obj.isoformat()
|
21
|
+
# Let the base class handle other types or raise TypeError
|
22
|
+
return super().default(obj)
|
channel_app/logs/services.py
CHANGED
@@ -1,12 +1,20 @@
|
|
1
1
|
from contextlib import contextmanager
|
2
|
+
import json
|
3
|
+
import os
|
2
4
|
import traceback
|
3
5
|
from typing import Optional
|
4
6
|
import uuid
|
5
7
|
from datetime import datetime, timezone
|
8
|
+
import boto3
|
6
9
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
7
10
|
|
8
|
-
from channel_app.database.models import
|
11
|
+
from channel_app.database.models import (
|
12
|
+
LogFlowModel,
|
13
|
+
LogStepExceptionModel,
|
14
|
+
LogStepModel,
|
15
|
+
)
|
9
16
|
from channel_app.database.services import DatabaseService
|
17
|
+
from channel_app.logs.encoders import UUIDEncoder
|
10
18
|
from channel_app.logs.enums import LogFlowAuthor, LogStepStatus
|
11
19
|
|
12
20
|
|
@@ -17,23 +25,22 @@ class LogService:
|
|
17
25
|
self.flow = {}
|
18
26
|
self.steps = []
|
19
27
|
self.exceptions = []
|
20
|
-
|
28
|
+
|
21
29
|
self.db_engine = self.database_service.create_engine()
|
22
|
-
self.s3_client =
|
23
|
-
self.s3_bucket = None # TODO: Get s3 bucket from exported environment variables
|
30
|
+
self.s3_client = S3Client()
|
24
31
|
|
25
32
|
def create_flow(
|
26
|
-
self,
|
27
|
-
name: str,
|
28
|
-
transaction_id: str,
|
29
|
-
flow_author: LogFlowAuthor = LogFlowAuthor.system
|
33
|
+
self,
|
34
|
+
name: str,
|
35
|
+
transaction_id: str,
|
36
|
+
flow_author: LogFlowAuthor = LogFlowAuthor.system,
|
30
37
|
):
|
31
38
|
self.flow = {
|
32
39
|
"id": uuid.uuid4(),
|
33
40
|
"transaction_id": transaction_id or str(uuid.uuid4()),
|
34
41
|
"flow_name": name,
|
35
42
|
"flow_author": flow_author.value,
|
36
|
-
"started_at": datetime.now(timezone.utc)
|
43
|
+
"started_at": datetime.now(timezone.utc),
|
37
44
|
}
|
38
45
|
|
39
46
|
@contextmanager
|
@@ -46,14 +53,16 @@ class LogService:
|
|
46
53
|
except Exception as exc:
|
47
54
|
self.add_exception(exc)
|
48
55
|
for step in reversed(self.steps):
|
49
|
-
if
|
56
|
+
if (
|
57
|
+
step["step_name"] == name
|
58
|
+
and step.get("status") == LogStepStatus.in_progress.value
|
59
|
+
):
|
50
60
|
step["end_time"] = now
|
51
61
|
step["status"] = LogStepStatus.failure.value
|
52
62
|
step["error"] = str(exc)
|
53
63
|
break
|
54
64
|
raise
|
55
65
|
|
56
|
-
|
57
66
|
def _add_step(self, name, start=False, end=False, metadata=None):
|
58
67
|
now = datetime.now(timezone.utc)
|
59
68
|
if start:
|
@@ -68,10 +77,15 @@ class LogService:
|
|
68
77
|
)
|
69
78
|
elif end:
|
70
79
|
for step in reversed(self.steps):
|
71
|
-
if
|
80
|
+
if (
|
81
|
+
step["step_name"] == name
|
82
|
+
and step["status"] == LogStepStatus.in_progress.value
|
83
|
+
):
|
72
84
|
step["end_time"] = now
|
73
85
|
step["status"] = LogStepStatus.success.value
|
74
|
-
step["duration_ms"] = int(
|
86
|
+
step["duration_ms"] = int(
|
87
|
+
(now - step["start_time"]).total_seconds() * 1000
|
88
|
+
)
|
75
89
|
|
76
90
|
def add_exception(self, exc: Exception):
|
77
91
|
tb = traceback.format_exc()
|
@@ -79,7 +93,7 @@ class LogService:
|
|
79
93
|
"id": uuid.uuid4(),
|
80
94
|
"type": type(exc).__name__,
|
81
95
|
"message": str(exc),
|
82
|
-
"traceback": tb
|
96
|
+
"traceback": tb,
|
83
97
|
}
|
84
98
|
self.exceptions.append(exc_obj)
|
85
99
|
# If this flow has related step, update the step to FAILURE
|
@@ -97,8 +111,7 @@ class LogService:
|
|
97
111
|
}
|
98
112
|
s3_key = f"logs/{self.flow['flow_name']}/{self.flow['transaction_id']}.json"
|
99
113
|
|
100
|
-
self.
|
101
|
-
|
114
|
+
self.s3_client.upload_object(s3_key, full_log_content)
|
102
115
|
|
103
116
|
log_flow_object = LogFlowModel(
|
104
117
|
id=self.flow["id"],
|
@@ -107,7 +120,9 @@ class LogService:
|
|
107
120
|
flow_author=self.flow["flow_author"],
|
108
121
|
started_at=self.flow["started_at"],
|
109
122
|
ended_at=self.flow["ended_at"],
|
110
|
-
status=
|
123
|
+
status=(
|
124
|
+
self.steps[-1]["status"] if self.steps else LogStepStatus.failure.value
|
125
|
+
),
|
111
126
|
s3_key=s3_key,
|
112
127
|
)
|
113
128
|
|
@@ -141,10 +156,6 @@ class LogService:
|
|
141
156
|
|
142
157
|
self._save_to_db(log_flow_object, step_models, exception_models)
|
143
158
|
|
144
|
-
def _upload_to_s3(self, key: str, content: dict):
|
145
|
-
# TODO: Implement this.
|
146
|
-
pass
|
147
|
-
|
148
159
|
def _save_to_db(self, flow_obj, step_objs, exception_objs):
|
149
160
|
session = scoped_session(sessionmaker(bind=self.db_engine))
|
150
161
|
try:
|
@@ -157,4 +168,45 @@ class LogService:
|
|
157
168
|
session.rollback()
|
158
169
|
raise
|
159
170
|
finally:
|
160
|
-
session.close()
|
171
|
+
session.close()
|
172
|
+
|
173
|
+
|
174
|
+
class S3Client:
|
175
|
+
def __init__(self):
|
176
|
+
self._validate_credentials()
|
177
|
+
self.client = boto3.client("s3")
|
178
|
+
self.bucket = os.getenv("LOGGING_S3_BUCKET", "default-bucket-name")
|
179
|
+
|
180
|
+
def _validate_credentials(self):
|
181
|
+
required_env_vars = {
|
182
|
+
"AWS_ACCESS_KEY_ID": os.getenv("AWS_ACCESS_KEY_ID"),
|
183
|
+
"AWS_SECRET_ACCESS_KEY": os.getenv("AWS_SECRET_ACCESS_KEY"),
|
184
|
+
"AWS_REGION": os.getenv("AWS_REGION"),
|
185
|
+
"LOGGING_S3_BUCKET": os.getenv("LOGGING_S3_BUCKET"),
|
186
|
+
}
|
187
|
+
|
188
|
+
missing_vars = [
|
189
|
+
name for name, value in required_env_vars.items() if value is None
|
190
|
+
]
|
191
|
+
|
192
|
+
if missing_vars:
|
193
|
+
raise ValueError(
|
194
|
+
f"S3 Client initialization failed: missing AWS credentials: {', '.join(missing_vars)}"
|
195
|
+
)
|
196
|
+
|
197
|
+
def set_bucket(self, bucket_name: str):
|
198
|
+
self.bucket = bucket_name
|
199
|
+
return self
|
200
|
+
|
201
|
+
def upload_object(self, key: str, content: dict):
|
202
|
+
try:
|
203
|
+
body = json.dumps(content, indent=2, cls=UUIDEncoder).encode("utf-8")
|
204
|
+
self.client.put_object(
|
205
|
+
Bucket=self.bucket,
|
206
|
+
Key=key,
|
207
|
+
Body=body,
|
208
|
+
ContentType="application/json",
|
209
|
+
)
|
210
|
+
except Exception as e:
|
211
|
+
print(f"[S3 Upload Error] {e}")
|
212
|
+
raise
|
@@ -36,8 +36,9 @@ channel_app/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
36
36
|
channel_app/database/models.py,sha256=cdjLcfJe-OYZzk1fl3JL6ght8jryRYLwMF2uV3srM-o,2314
|
37
37
|
channel_app/database/services.py,sha256=0zHLAcJAKRU6hKEaS9DmsX_2gIE29hh__DfHHx3JuSE,216
|
38
38
|
channel_app/logs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
39
|
+
channel_app/logs/encoders.py,sha256=6CVgtkV7DrjxGpNXCJgT9bn9B2Ep0lHgtm-0ES7A57I,703
|
39
40
|
channel_app/logs/enums.py,sha256=If6ZjwRTerbJypYI8WjdsleHR7FjlV-TP2nBppFVEc4,214
|
40
|
-
channel_app/logs/services.py,sha256=
|
41
|
+
channel_app/logs/services.py,sha256=O488OStOe9C-2AIU34bjHEBB3_XywgGlLEn77JJHUu4,7173
|
41
42
|
channel_app/omnitron/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
42
43
|
channel_app/omnitron/batch_request.py,sha256=S8IHtbI1RtVLbnOwtfXSmkrREGp8wUYW2E-eu5omwyY,3550
|
43
44
|
channel_app/omnitron/constants.py,sha256=WZR6k_k2zZfN7lfi1ZLv1PphsHIq_LiZgw6Nd6LduvE,2793
|
@@ -64,7 +65,7 @@ channel_app/omnitron/commands/tests/test_product_images.py,sha256=y6tmiJ00kd2GTq
|
|
64
65
|
channel_app/omnitron/commands/tests/test_product_prices.py,sha256=5HPX9PmjGw6gk3oNrwtWLqdrOkfeNx1mYP-pYwOesZU,3496
|
65
66
|
channel_app/omnitron/commands/tests/test_product_stocks.py,sha256=q4RGlrCNUUJyN5CBL1fzrvdd4Q3xt816mbMRQT0XEd0,3496
|
66
67
|
channel_app/omnitron/commands/tests/test_products.py,sha256=uj5KLaubY3XNu0hidOH-u-Djfboe81Hj7-lP--01Le0,103494
|
67
|
-
channel_app-0.0.
|
68
|
-
channel_app-0.0.
|
69
|
-
channel_app-0.0.
|
70
|
-
channel_app-0.0.
|
68
|
+
channel_app-0.0.157a2.dist-info/METADATA,sha256=qyT2b6_Y3MwOreXTMrwfW3D113VS133PS5sAKy711OI,311
|
69
|
+
channel_app-0.0.157a2.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
70
|
+
channel_app-0.0.157a2.dist-info/top_level.txt,sha256=JT-gM6L5Cwxr1xEoN7NHrREDs-d6iGFGfRnK-NrJ3tU,12
|
71
|
+
channel_app-0.0.157a2.dist-info/RECORD,,
|
File without changes
|
File without changes
|