ddeutil-workflow 0.0.84__py3-none-any.whl → 0.0.85__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -4
- ddeutil/workflow/audits.py +8 -6
- ddeutil/workflow/conf.py +4 -17
- ddeutil/workflow/errors.py +31 -19
- ddeutil/workflow/job.py +276 -156
- ddeutil/workflow/plugins/providers/az.py +2 -2
- ddeutil/workflow/stages.py +404 -314
- ddeutil/workflow/traces.py +125 -185
- ddeutil/workflow/workflow.py +4 -1
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/METADATA +13 -16
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/RECORD +16 -16
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.84.dist-info → ddeutil_workflow-0.0.85.dist-info}/top_level.txt +0 -0
ddeutil/workflow/traces.py
CHANGED
@@ -56,7 +56,12 @@ EMJ_SKIP: str = "⏭️"
|
|
56
56
|
|
57
57
|
|
58
58
|
@lru_cache
|
59
|
-
def set_logging(
|
59
|
+
def set_logging(
|
60
|
+
name: str,
|
61
|
+
*,
|
62
|
+
message_fmt: Optional[str] = None,
|
63
|
+
datetime_fmt: Optional[str] = None,
|
64
|
+
) -> logging.Logger:
|
60
65
|
"""Configure logger with custom formatting and handlers.
|
61
66
|
|
62
67
|
Creates and configures a logger instance with the custom formatter and
|
@@ -64,7 +69,9 @@ def set_logging(name: str) -> logging.Logger:
|
|
64
69
|
console output and proper formatting for workflow execution tracking.
|
65
70
|
|
66
71
|
Args:
|
67
|
-
name:
|
72
|
+
name (str): A module name to create logger for.
|
73
|
+
message_fmt: (str, default None)
|
74
|
+
datetime_fmt: (str, default None)
|
68
75
|
|
69
76
|
Returns:
|
70
77
|
logging.Logger: Configured logger instance with custom formatting.
|
@@ -81,9 +88,9 @@ def set_logging(name: str) -> logging.Logger:
|
|
81
88
|
# `logging.getLogger('ddeutil.workflow').propagate = False`
|
82
89
|
#
|
83
90
|
_logger.addHandler(logging.NullHandler())
|
84
|
-
|
85
91
|
formatter = logging.Formatter(
|
86
|
-
fmt=
|
92
|
+
fmt=message_fmt,
|
93
|
+
datefmt=datetime_fmt,
|
87
94
|
)
|
88
95
|
stream_handler = logging.StreamHandler()
|
89
96
|
stream_handler.setFormatter(formatter)
|
@@ -92,6 +99,16 @@ def set_logging(name: str) -> logging.Logger:
|
|
92
99
|
return _logger
|
93
100
|
|
94
101
|
|
102
|
+
PrefixType = Literal[
|
103
|
+
"caller",
|
104
|
+
"nested",
|
105
|
+
"stage",
|
106
|
+
"job",
|
107
|
+
"workflow",
|
108
|
+
"release",
|
109
|
+
"schedule",
|
110
|
+
"audit",
|
111
|
+
]
|
95
112
|
PREFIX_LOGS: Final[dict[str, dict]] = {
|
96
113
|
"CALLER": {
|
97
114
|
"emoji": "⚙️",
|
@@ -102,7 +119,7 @@ PREFIX_LOGS: Final[dict[str, dict]] = {
|
|
102
119
|
"JOB": {"emoji": "🏗", "desc": "logs from job module."},
|
103
120
|
"WORKFLOW": {"emoji": "👟", "desc": "logs from workflow module."},
|
104
121
|
"RELEASE": {"emoji": "📅", "desc": "logs from release workflow method."},
|
105
|
-
"
|
122
|
+
"SCHEDULE": {"emoji": "⏰", "desc": "logs from poke workflow method."},
|
106
123
|
"AUDIT": {"emoji": "📌", "desc": "logs from audit model."},
|
107
124
|
} # pragma: no cov
|
108
125
|
PREFIX_DEFAULT: Final[str] = "CALLER"
|
@@ -157,6 +174,12 @@ class Message(BaseModel):
|
|
157
174
|
return f"{emoji}[{name}]: {self.message}"
|
158
175
|
|
159
176
|
|
177
|
+
class Metric(BaseModel): # pragma: no cov
|
178
|
+
"""Trace Metric model that will validate data from current logging."""
|
179
|
+
|
180
|
+
execution_time: float
|
181
|
+
|
182
|
+
|
160
183
|
class Metadata(BaseModel): # pragma: no cov
|
161
184
|
"""Trace Metadata model for making the current metadata of this CPU, Memory.
|
162
185
|
|
@@ -181,17 +204,6 @@ class Metadata(BaseModel): # pragma: no cov
|
|
181
204
|
filename: str = Field(description="A filename of this log.")
|
182
205
|
lineno: int = Field(description="A line number of this log.")
|
183
206
|
|
184
|
-
# Enhanced observability fields
|
185
|
-
workflow_name: Optional[str] = Field(
|
186
|
-
default=None, description="Name of the workflow being executed."
|
187
|
-
)
|
188
|
-
stage_name: Optional[str] = Field(
|
189
|
-
default=None, description="Name of the current stage being executed."
|
190
|
-
)
|
191
|
-
job_name: Optional[str] = Field(
|
192
|
-
default=None, description="Name of the current job being executed."
|
193
|
-
)
|
194
|
-
|
195
207
|
# Performance metrics
|
196
208
|
duration_ms: Optional[float] = Field(
|
197
209
|
default=None, description="Execution duration in milliseconds."
|
@@ -203,44 +215,6 @@ class Metadata(BaseModel): # pragma: no cov
|
|
203
215
|
default=None, description="CPU usage percentage at log time."
|
204
216
|
)
|
205
217
|
|
206
|
-
# Distributed tracing support
|
207
|
-
trace_id: Optional[str] = Field(
|
208
|
-
default=None,
|
209
|
-
description="OpenTelemetry trace ID for distributed tracing.",
|
210
|
-
)
|
211
|
-
span_id: Optional[str] = Field(
|
212
|
-
default=None,
|
213
|
-
description="OpenTelemetry span ID for distributed tracing.",
|
214
|
-
)
|
215
|
-
parent_span_id: Optional[str] = Field(
|
216
|
-
default=None, description="Parent span ID for correlation."
|
217
|
-
)
|
218
|
-
|
219
|
-
# Error context
|
220
|
-
exception_type: Optional[str] = Field(
|
221
|
-
default=None, description="Exception class name if error occurred."
|
222
|
-
)
|
223
|
-
exception_message: Optional[str] = Field(
|
224
|
-
default=None, description="Exception message if error occurred."
|
225
|
-
)
|
226
|
-
stack_trace: Optional[str] = Field(
|
227
|
-
default=None, description="Full stack trace if error occurred."
|
228
|
-
)
|
229
|
-
error_code: Optional[str] = Field(
|
230
|
-
default=None, description="Custom error code for categorization."
|
231
|
-
)
|
232
|
-
|
233
|
-
# Business context
|
234
|
-
user_id: Optional[str] = Field(
|
235
|
-
default=None, description="User ID who triggered the workflow."
|
236
|
-
)
|
237
|
-
tenant_id: Optional[str] = Field(
|
238
|
-
default=None, description="Tenant ID for multi-tenant environments."
|
239
|
-
)
|
240
|
-
environment: Optional[str] = Field(
|
241
|
-
default=None, description="Environment (dev, staging, prod)."
|
242
|
-
)
|
243
|
-
|
244
218
|
# NOTE: System context
|
245
219
|
hostname: Optional[str] = Field(
|
246
220
|
default=None, description="Hostname where workflow is running."
|
@@ -259,7 +233,7 @@ class Metadata(BaseModel): # pragma: no cov
|
|
259
233
|
tags: Optional[list[str]] = Field(
|
260
234
|
default_factory=list, description="Custom tags for categorization."
|
261
235
|
)
|
262
|
-
|
236
|
+
metric: Optional[DictData] = Field(
|
263
237
|
default_factory=dict, description="Additional custom metadata."
|
264
238
|
)
|
265
239
|
|
@@ -299,6 +273,7 @@ class Metadata(BaseModel): # pragma: no cov
|
|
299
273
|
run_id: str,
|
300
274
|
parent_run_id: Optional[str],
|
301
275
|
*,
|
276
|
+
metric: Optional[DictData] = None,
|
302
277
|
extras: Optional[DictData] = None,
|
303
278
|
) -> Self:
|
304
279
|
"""Make the current metric for contract this Metadata model instance.
|
@@ -313,6 +288,7 @@ class Metadata(BaseModel): # pragma: no cov
|
|
313
288
|
cutting_id: A cutting ID string.
|
314
289
|
run_id:
|
315
290
|
parent_run_id:
|
291
|
+
metric:
|
316
292
|
extras: An extra parameter that want to override core
|
317
293
|
config values.
|
318
294
|
|
@@ -363,27 +339,10 @@ class Metadata(BaseModel): # pragma: no cov
|
|
363
339
|
parent_run_id=parent_run_id,
|
364
340
|
filename=frame_info.filename.split(os.path.sep)[-1],
|
365
341
|
lineno=frame_info.lineno,
|
366
|
-
# NOTE: Enhanced observability fields
|
367
|
-
workflow_name=extras_data.get("workflow_name"),
|
368
|
-
stage_name=extras_data.get("stage_name"),
|
369
|
-
job_name=extras_data.get("job_name"),
|
370
342
|
# NOTE: Performance metrics
|
371
343
|
duration_ms=extras_data.get("duration_ms"),
|
372
344
|
memory_usage_mb=extras_data.get("memory_usage_mb"),
|
373
345
|
cpu_usage_percent=extras_data.get("cpu_usage_percent"),
|
374
|
-
# NOTE: Distributed tracing support
|
375
|
-
trace_id=extras_data.get("trace_id"),
|
376
|
-
span_id=extras_data.get("span_id"),
|
377
|
-
parent_span_id=extras_data.get("parent_span_id"),
|
378
|
-
# NOTE: Error context
|
379
|
-
exception_type=extras_data.get("exception_type"),
|
380
|
-
exception_message=extras_data.get("exception_message"),
|
381
|
-
stack_trace=extras_data.get("stack_trace"),
|
382
|
-
error_code=extras_data.get("error_code"),
|
383
|
-
# NOTE: Business context
|
384
|
-
user_id=extras_data.get("user_id"),
|
385
|
-
tenant_id=extras_data.get("tenant_id"),
|
386
|
-
environment=extras_data.get("environment"),
|
387
346
|
# NOTE: System context
|
388
347
|
hostname=hostname,
|
389
348
|
ip_address=ip_address,
|
@@ -391,11 +350,17 @@ class Metadata(BaseModel): # pragma: no cov
|
|
391
350
|
package_version=__version__,
|
392
351
|
# NOTE: Custom metadata
|
393
352
|
tags=extras_data.get("tags", []),
|
394
|
-
|
353
|
+
metric=metric,
|
395
354
|
)
|
396
355
|
|
397
356
|
@property
|
398
|
-
def pointer_id(self):
|
357
|
+
def pointer_id(self) -> str:
|
358
|
+
"""Pointer ID of trace metadata.
|
359
|
+
|
360
|
+
Returns:
|
361
|
+
str: A pointer ID that will choose from parent running ID or running
|
362
|
+
ID.
|
363
|
+
"""
|
399
364
|
return self.parent_run_id or self.run_id
|
400
365
|
|
401
366
|
|
@@ -449,6 +414,27 @@ class ConsoleHandler(BaseHandler):
|
|
449
414
|
"""Console Handler model."""
|
450
415
|
|
451
416
|
type: Literal["console"] = "console"
|
417
|
+
name: str = "ddeutil.workflow"
|
418
|
+
format: str = Field(
|
419
|
+
default=(
|
420
|
+
"%(asctime)s.%(msecs)03d (%(process)-5d, "
|
421
|
+
"%(thread)-5d) [%(levelname)-7s] (%(cut_id)s) %(message)-120s "
|
422
|
+
"(%(filename)s:%(lineno)s) (%(name)-10s)"
|
423
|
+
),
|
424
|
+
description="A log format that will use with logging package.",
|
425
|
+
)
|
426
|
+
datetime_format: str = Field(
|
427
|
+
default="%Y-%m-%d %H:%M:%S",
|
428
|
+
description="A log datetime format.",
|
429
|
+
)
|
430
|
+
|
431
|
+
def pre(self) -> None:
|
432
|
+
"""Pre-process."""
|
433
|
+
set_logging(
|
434
|
+
self.name,
|
435
|
+
message_fmt=self.format,
|
436
|
+
datetime_fmt=self.datetime_format,
|
437
|
+
)
|
452
438
|
|
453
439
|
def emit(
|
454
440
|
self, metadata: Metadata, *, extra: Optional[DictData] = None
|
@@ -512,6 +498,9 @@ class FileHandler(BaseHandler):
|
|
512
498
|
return log_file
|
513
499
|
|
514
500
|
def pre(self) -> None: # pragma: no cov
|
501
|
+
"""Pre-method that will call from getting trace model factory function.
|
502
|
+
This method will create filepath of this parent log.
|
503
|
+
"""
|
515
504
|
if not (p := Path(self.path)).exists():
|
516
505
|
p.mkdir(parents=True)
|
517
506
|
|
@@ -521,7 +510,12 @@ class FileHandler(BaseHandler):
|
|
521
510
|
*,
|
522
511
|
extra: Optional[DictData] = None,
|
523
512
|
) -> None:
|
524
|
-
"""Emit trace log.
|
513
|
+
"""Emit trace log to the file with a specific pointer path.
|
514
|
+
|
515
|
+
Args:
|
516
|
+
metadata (Metadata):
|
517
|
+
extra (DictData, default None):
|
518
|
+
"""
|
525
519
|
pointer: Path = self.pointer(metadata.pointer_id)
|
526
520
|
std_file = "stderr" if metadata.error_flag else "stdout"
|
527
521
|
with self._lock:
|
@@ -541,6 +535,7 @@ class FileHandler(BaseHandler):
|
|
541
535
|
*,
|
542
536
|
extra: Optional[DictData] = None,
|
543
537
|
) -> None: # pragma: no cove
|
538
|
+
"""Async emit trace log."""
|
544
539
|
try:
|
545
540
|
import aiofiles
|
546
541
|
except ImportError as e:
|
@@ -717,22 +712,9 @@ class SQLiteHandler(BaseHandler): # pragma: no cov
|
|
717
712
|
filename TEXT NOT NULL,
|
718
713
|
lineno INTEGER NOT NULL,
|
719
714
|
cut_id TEXT,
|
720
|
-
workflow_name TEXT,
|
721
|
-
stage_name TEXT,
|
722
|
-
job_name TEXT,
|
723
715
|
duration_ms REAL,
|
724
716
|
memory_usage_mb REAL,
|
725
717
|
cpu_usage_percent REAL,
|
726
|
-
trace_id TEXT,
|
727
|
-
span_id TEXT,
|
728
|
-
parent_span_id TEXT,
|
729
|
-
exception_type TEXT,
|
730
|
-
exception_message TEXT,
|
731
|
-
stack_trace TEXT,
|
732
|
-
error_code TEXT,
|
733
|
-
user_id TEXT,
|
734
|
-
tenant_id TEXT,
|
735
|
-
environment TEXT,
|
736
718
|
hostname TEXT,
|
737
719
|
ip_address TEXT,
|
738
720
|
python_version TEXT,
|
@@ -938,28 +920,15 @@ class SQLiteHandler(BaseHandler): # pragma: no cov
|
|
938
920
|
cut_id=record[11],
|
939
921
|
filename=record[9],
|
940
922
|
lineno=record[10],
|
941
|
-
workflow_name=record[12],
|
942
|
-
stage_name=record[13],
|
943
|
-
job_name=record[14],
|
944
923
|
duration_ms=record[15],
|
945
924
|
memory_usage_mb=record[16],
|
946
925
|
cpu_usage_percent=record[17],
|
947
|
-
trace_id=record[18],
|
948
|
-
span_id=record[19],
|
949
|
-
parent_span_id=record[20],
|
950
|
-
exception_type=record[21],
|
951
|
-
exception_message=record[22],
|
952
|
-
stack_trace=record[23],
|
953
|
-
error_code=record[24],
|
954
|
-
user_id=record[25],
|
955
|
-
tenant_id=record[26],
|
956
|
-
environment=record[27],
|
957
926
|
hostname=record[28],
|
958
927
|
ip_address=record[29],
|
959
928
|
python_version=record[30],
|
960
929
|
package_version=record[31],
|
961
930
|
tags=json.loads(record[32]) if record[32] else [],
|
962
|
-
|
931
|
+
metric=(
|
963
932
|
json.loads(record[33]) if record[33] else {}
|
964
933
|
),
|
965
934
|
)
|
@@ -1045,28 +1014,15 @@ class SQLiteHandler(BaseHandler): # pragma: no cov
|
|
1045
1014
|
cut_id=record[11],
|
1046
1015
|
filename=record[9],
|
1047
1016
|
lineno=record[10],
|
1048
|
-
workflow_name=record[12],
|
1049
|
-
stage_name=record[13],
|
1050
|
-
job_name=record[14],
|
1051
1017
|
duration_ms=record[15],
|
1052
1018
|
memory_usage_mb=record[16],
|
1053
1019
|
cpu_usage_percent=record[17],
|
1054
|
-
trace_id=record[18],
|
1055
|
-
span_id=record[19],
|
1056
|
-
parent_span_id=record[20],
|
1057
|
-
exception_type=record[21],
|
1058
|
-
exception_message=record[22],
|
1059
|
-
stack_trace=record[23],
|
1060
|
-
error_code=record[24],
|
1061
|
-
user_id=record[25],
|
1062
|
-
tenant_id=record[26],
|
1063
|
-
environment=record[27],
|
1064
1020
|
hostname=record[28],
|
1065
1021
|
ip_address=record[29],
|
1066
1022
|
python_version=record[30],
|
1067
1023
|
package_version=record[31],
|
1068
1024
|
tags=json.loads(record[32]) if record[32] else [],
|
1069
|
-
|
1025
|
+
metric=json.loads(record[33]) if record[33] else {},
|
1070
1026
|
)
|
1071
1027
|
|
1072
1028
|
meta_list.append(trace_meta)
|
@@ -1394,22 +1350,9 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1394
1350
|
"filename": {"type": "keyword"},
|
1395
1351
|
"lineno": {"type": "integer"},
|
1396
1352
|
"cut_id": {"type": "keyword"},
|
1397
|
-
"workflow_name": {"type": "keyword"},
|
1398
|
-
"stage_name": {"type": "keyword"},
|
1399
|
-
"job_name": {"type": "keyword"},
|
1400
1353
|
"duration_ms": {"type": "float"},
|
1401
1354
|
"memory_usage_mb": {"type": "float"},
|
1402
1355
|
"cpu_usage_percent": {"type": "float"},
|
1403
|
-
"trace_id": {"type": "keyword"},
|
1404
|
-
"span_id": {"type": "keyword"},
|
1405
|
-
"parent_span_id": {"type": "keyword"},
|
1406
|
-
"exception_type": {"type": "keyword"},
|
1407
|
-
"exception_message": {"type": "text"},
|
1408
|
-
"stack_trace": {"type": "text"},
|
1409
|
-
"error_code": {"type": "keyword"},
|
1410
|
-
"user_id": {"type": "keyword"},
|
1411
|
-
"tenant_id": {"type": "keyword"},
|
1412
|
-
"environment": {"type": "keyword"},
|
1413
1356
|
"hostname": {"type": "keyword"},
|
1414
1357
|
"ip_address": {"type": "ip"},
|
1415
1358
|
"python_version": {"type": "keyword"},
|
@@ -1453,22 +1396,9 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1453
1396
|
"filename": base_data["filename"],
|
1454
1397
|
"lineno": base_data["lineno"],
|
1455
1398
|
"cut_id": base_data["cut_id"],
|
1456
|
-
"workflow_name": base_data.get("workflow_name"),
|
1457
|
-
"stage_name": base_data.get("stage_name"),
|
1458
|
-
"job_name": base_data.get("job_name"),
|
1459
1399
|
"duration_ms": base_data.get("duration_ms"),
|
1460
1400
|
"memory_usage_mb": base_data.get("memory_usage_mb"),
|
1461
1401
|
"cpu_usage_percent": base_data.get("cpu_usage_percent"),
|
1462
|
-
"trace_id": base_data.get("trace_id"),
|
1463
|
-
"span_id": base_data.get("span_id"),
|
1464
|
-
"parent_span_id": base_data.get("parent_span_id"),
|
1465
|
-
"exception_type": base_data.get("exception_type"),
|
1466
|
-
"exception_message": base_data.get("exception_message"),
|
1467
|
-
"stack_trace": base_data.get("stack_trace"),
|
1468
|
-
"error_code": base_data.get("error_code"),
|
1469
|
-
"user_id": base_data.get("user_id"),
|
1470
|
-
"tenant_id": base_data.get("tenant_id"),
|
1471
|
-
"environment": base_data.get("environment"),
|
1472
1402
|
"hostname": base_data.get("hostname"),
|
1473
1403
|
"ip_address": base_data.get("ip_address"),
|
1474
1404
|
"python_version": base_data.get("python_version"),
|
@@ -1587,28 +1517,15 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1587
1517
|
cut_id=source.get("cut_id"),
|
1588
1518
|
filename=source["filename"],
|
1589
1519
|
lineno=source["lineno"],
|
1590
|
-
workflow_name=source.get("workflow_name"),
|
1591
|
-
stage_name=source.get("stage_name"),
|
1592
|
-
job_name=source.get("job_name"),
|
1593
1520
|
duration_ms=source.get("duration_ms"),
|
1594
1521
|
memory_usage_mb=source.get("memory_usage_mb"),
|
1595
1522
|
cpu_usage_percent=source.get("cpu_usage_percent"),
|
1596
|
-
trace_id=source.get("trace_id"),
|
1597
|
-
span_id=source.get("span_id"),
|
1598
|
-
parent_span_id=source.get("parent_span_id"),
|
1599
|
-
exception_type=source.get("exception_type"),
|
1600
|
-
exception_message=source.get("exception_message"),
|
1601
|
-
stack_trace=source.get("stack_trace"),
|
1602
|
-
error_code=source.get("error_code"),
|
1603
|
-
user_id=source.get("user_id"),
|
1604
|
-
tenant_id=source.get("tenant_id"),
|
1605
|
-
environment=source.get("environment"),
|
1606
1523
|
hostname=source.get("hostname"),
|
1607
1524
|
ip_address=source.get("ip_address"),
|
1608
1525
|
python_version=source.get("python_version"),
|
1609
1526
|
package_version=source.get("package_version"),
|
1610
1527
|
tags=source.get("tags", []),
|
1611
|
-
|
1528
|
+
metric=source.get("metric", {}),
|
1612
1529
|
)
|
1613
1530
|
|
1614
1531
|
meta_list.append(trace_meta)
|
@@ -1697,28 +1614,15 @@ class ElasticHandler(BaseHandler): # pragma: no cov
|
|
1697
1614
|
cut_id=source.get("cut_id"),
|
1698
1615
|
filename=source["filename"],
|
1699
1616
|
lineno=source["lineno"],
|
1700
|
-
workflow_name=source.get("workflow_name"),
|
1701
|
-
stage_name=source.get("stage_name"),
|
1702
|
-
job_name=source.get("job_name"),
|
1703
1617
|
duration_ms=source.get("duration_ms"),
|
1704
1618
|
memory_usage_mb=source.get("memory_usage_mb"),
|
1705
1619
|
cpu_usage_percent=source.get("cpu_usage_percent"),
|
1706
|
-
trace_id=source.get("trace_id"),
|
1707
|
-
span_id=source.get("span_id"),
|
1708
|
-
parent_span_id=source.get("parent_span_id"),
|
1709
|
-
exception_type=source.get("exception_type"),
|
1710
|
-
exception_message=source.get("exception_message"),
|
1711
|
-
stack_trace=source.get("stack_trace"),
|
1712
|
-
error_code=source.get("error_code"),
|
1713
|
-
user_id=source.get("user_id"),
|
1714
|
-
tenant_id=source.get("tenant_id"),
|
1715
|
-
environment=source.get("environment"),
|
1716
1620
|
hostname=source.get("hostname"),
|
1717
1621
|
ip_address=source.get("ip_address"),
|
1718
1622
|
python_version=source.get("python_version"),
|
1719
1623
|
package_version=source.get("package_version"),
|
1720
1624
|
tags=source.get("tags", []),
|
1721
|
-
|
1625
|
+
metric=source.get("metric", {}),
|
1722
1626
|
)
|
1723
1627
|
|
1724
1628
|
meta_list.append(trace_meta)
|
@@ -1774,26 +1678,35 @@ class BaseEmit(ABC):
|
|
1774
1678
|
self,
|
1775
1679
|
msg: str,
|
1776
1680
|
level: Level,
|
1777
|
-
|
1681
|
+
*,
|
1682
|
+
module: Optional[str] = None,
|
1683
|
+
metric: Optional[DictData] = None,
|
1684
|
+
) -> None:
|
1778
1685
|
"""Write trace log with append mode and logging this message with any
|
1779
1686
|
logging level.
|
1780
1687
|
|
1781
1688
|
Args:
|
1782
1689
|
msg: A message that want to log.
|
1783
1690
|
level: A logging level.
|
1691
|
+
metric (DictData, default None): A metric data that want to export
|
1692
|
+
to each target handler.
|
1693
|
+
module (str, default None): A module name that use for adding prefix
|
1694
|
+
at the message value.
|
1784
1695
|
"""
|
1785
1696
|
raise NotImplementedError(
|
1786
|
-
"
|
1697
|
+
"Emit action should be implement for making trace log."
|
1787
1698
|
)
|
1788
1699
|
|
1789
|
-
def debug(self, msg: str):
|
1700
|
+
def debug(self, msg: str, module: Optional[str] = None):
|
1790
1701
|
"""Write trace log with append mode and logging this message with the
|
1791
1702
|
DEBUG level.
|
1792
1703
|
|
1793
1704
|
Args:
|
1794
1705
|
msg: A message that want to log.
|
1706
|
+
module (str, default None): A module name that use for adding prefix
|
1707
|
+
at the message value.
|
1795
1708
|
"""
|
1796
|
-
self.emit(msg, level="debug")
|
1709
|
+
self.emit(msg, level="debug", module=module)
|
1797
1710
|
|
1798
1711
|
def info(self, msg: str) -> None:
|
1799
1712
|
"""Write trace log with append mode and logging this message with the
|
@@ -1839,6 +1752,8 @@ class BaseAsyncEmit(ABC):
|
|
1839
1752
|
self,
|
1840
1753
|
msg: str,
|
1841
1754
|
level: Level,
|
1755
|
+
*,
|
1756
|
+
metric: Optional[DictData] = None,
|
1842
1757
|
) -> None:
|
1843
1758
|
"""Async write trace log with append mode and logging this message with
|
1844
1759
|
any logging level.
|
@@ -1846,6 +1761,8 @@ class BaseAsyncEmit(ABC):
|
|
1846
1761
|
Args:
|
1847
1762
|
msg (str): A message that want to log.
|
1848
1763
|
level (Mode): A logging level.
|
1764
|
+
metric (DictData, default None): A metric data that want to export
|
1765
|
+
to each target handler.
|
1849
1766
|
"""
|
1850
1767
|
raise NotImplementedError(
|
1851
1768
|
"Async Logging action should be implement for making trace log."
|
@@ -1951,12 +1868,23 @@ class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
1951
1868
|
"""
|
1952
1869
|
return prepare_newline(Message.from_str(msg).prepare(self.extras))
|
1953
1870
|
|
1954
|
-
def emit(
|
1871
|
+
def emit(
|
1872
|
+
self,
|
1873
|
+
msg: str,
|
1874
|
+
level: Level,
|
1875
|
+
*,
|
1876
|
+
module: Optional[str] = None,
|
1877
|
+
metric: Optional[DictData] = None,
|
1878
|
+
) -> None:
|
1955
1879
|
"""Emit a trace log to all handler. This will use synchronise process.
|
1956
1880
|
|
1957
1881
|
Args:
|
1958
1882
|
msg (str): A message.
|
1959
1883
|
level (Level): A tracing level.
|
1884
|
+
metric (DictData, default None): A metric data that want to export
|
1885
|
+
to each target handler.
|
1886
|
+
module (str, default None): A module name that use for adding prefix
|
1887
|
+
at the message value.
|
1960
1888
|
"""
|
1961
1889
|
_msg: str = self.make_message(msg)
|
1962
1890
|
metadata: Metadata = Metadata.make(
|
@@ -1966,9 +1894,14 @@ class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
1966
1894
|
cutting_id=self.cut_id,
|
1967
1895
|
run_id=self.run_id,
|
1968
1896
|
parent_run_id=self.parent_run_id,
|
1897
|
+
metric=metric,
|
1969
1898
|
extras=self.extras,
|
1970
1899
|
)
|
1900
|
+
|
1901
|
+
# NOTE: Check enable buffer flag was set or not.
|
1971
1902
|
if not self._enable_buffer:
|
1903
|
+
|
1904
|
+
# NOTE: Start emit tracing log data to each handler.
|
1972
1905
|
for handler in self.handlers:
|
1973
1906
|
handler.emit(metadata, extra=self.extras)
|
1974
1907
|
return
|
@@ -1981,13 +1914,17 @@ class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
1981
1914
|
handler.flush(self._buffer, extra=self.extras)
|
1982
1915
|
self._buffer.clear()
|
1983
1916
|
|
1984
|
-
async def amit(
|
1917
|
+
async def amit(
|
1918
|
+
self, msg: str, level: Level, *, metric: Optional[DictData] = None
|
1919
|
+
) -> None:
|
1985
1920
|
"""Async write trace log with append mode and logging this message with
|
1986
1921
|
any logging level.
|
1987
1922
|
|
1988
1923
|
Args:
|
1989
1924
|
msg (str): A message that want to log.
|
1990
1925
|
level (Level): A logging mode.
|
1926
|
+
metric (DictData, default None): A metric data that want to export
|
1927
|
+
to each target handler.
|
1991
1928
|
"""
|
1992
1929
|
_msg: str = self.make_message(msg)
|
1993
1930
|
metadata: Metadata = Metadata.make(
|
@@ -1997,8 +1934,11 @@ class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
|
|
1997
1934
|
cutting_id=self.cut_id,
|
1998
1935
|
run_id=self.run_id,
|
1999
1936
|
parent_run_id=self.parent_run_id,
|
1937
|
+
metric=metric,
|
2000
1938
|
extras=self.extras,
|
2001
1939
|
)
|
1940
|
+
|
1941
|
+
# NOTE: Start emit tracing log data to each handler.
|
2002
1942
|
for handler in self.handlers:
|
2003
1943
|
await handler.amit(metadata, extra=self.extras)
|
2004
1944
|
|
@@ -2039,7 +1979,7 @@ def get_trace(
|
|
2039
1979
|
handlers: list[Union[DictData, Handler]] = None,
|
2040
1980
|
parent_run_id: Optional[str] = None,
|
2041
1981
|
extras: Optional[DictData] = None,
|
2042
|
-
auto_pre_process: bool =
|
1982
|
+
auto_pre_process: bool = True,
|
2043
1983
|
) -> Trace:
|
2044
1984
|
"""Get dynamic Trace instance from the core config.
|
2045
1985
|
|
@@ -2050,22 +1990,22 @@ def get_trace(
|
|
2050
1990
|
Args:
|
2051
1991
|
run_id (str): A running ID.
|
2052
1992
|
parent_run_id (str | None, default None): A parent running ID.
|
2053
|
-
handlers (list):
|
2054
|
-
|
2055
|
-
config
|
2056
|
-
|
1993
|
+
handlers (list[DictData | Handler], default None): A list of handler or
|
1994
|
+
mapping of handler data that want to direct pass instead use
|
1995
|
+
environment variable config.
|
1996
|
+
extras (DictData, default None): An extra parameter that want to
|
1997
|
+
override the core config values.
|
1998
|
+
auto_pre_process (bool, default False) A flag that will auto call pre
|
1999
|
+
method after validate a trace model.
|
2057
2000
|
|
2058
2001
|
Returns:
|
2059
2002
|
Trace: The appropriate trace instance.
|
2060
2003
|
"""
|
2061
|
-
handlers: list[DictData] = dynamic(
|
2062
|
-
"trace_handlers", f=handlers, extras=extras
|
2063
|
-
)
|
2064
2004
|
trace: Trace = Trace.model_validate(
|
2065
2005
|
{
|
2066
2006
|
"run_id": run_id,
|
2067
2007
|
"parent_run_id": parent_run_id,
|
2068
|
-
"handlers": handlers,
|
2008
|
+
"handlers": dynamic("trace_handlers", f=handlers, extras=extras),
|
2069
2009
|
"extras": extras or {},
|
2070
2010
|
}
|
2071
2011
|
)
|
ddeutil/workflow/workflow.py
CHANGED
@@ -99,7 +99,10 @@ class Workflow(BaseModel):
|
|
99
99
|
description="An extra parameters that want to override config values.",
|
100
100
|
)
|
101
101
|
name: str = Field(description="A workflow name.")
|
102
|
-
type: Literal["Workflow"] = Field(
|
102
|
+
type: Literal["Workflow"] = Field(
|
103
|
+
default="Workflow",
|
104
|
+
description="A type of this config data that will use by discriminator",
|
105
|
+
)
|
103
106
|
desc: Optional[str] = Field(
|
104
107
|
default=None,
|
105
108
|
description=(
|