matrice-analytics 0.1.3__py3-none-any.whl → 0.1.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of matrice-analytics might be problematic. Click here for more details.
- matrice_analytics/post_processing/advanced_tracker/matching.py +3 -3
- matrice_analytics/post_processing/advanced_tracker/strack.py +1 -1
- matrice_analytics/post_processing/config.py +4 -0
- matrice_analytics/post_processing/core/config.py +115 -12
- matrice_analytics/post_processing/face_reg/compare_similarity.py +5 -5
- matrice_analytics/post_processing/face_reg/embedding_manager.py +109 -8
- matrice_analytics/post_processing/face_reg/face_recognition.py +157 -61
- matrice_analytics/post_processing/face_reg/face_recognition_client.py +339 -88
- matrice_analytics/post_processing/face_reg/people_activity_logging.py +67 -29
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/__init__.py +9 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/__init__.py +4 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/cli.py +33 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/dataset_stats.py +139 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/export.py +398 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/train.py +447 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/utils.py +129 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/valid.py +93 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/validate_dataset.py +240 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/visualize_augmentation.py +176 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/cli/visualize_predictions.py +96 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/core/__init__.py +3 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/core/process.py +246 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/core/types.py +60 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/core/utils.py +87 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/inference/__init__.py +3 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/inference/config.py +82 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/inference/hub.py +141 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/inference/plate_recognizer.py +323 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/py.typed +0 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/__init__.py +0 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/data/__init__.py +0 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/data/augmentation.py +101 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/data/dataset.py +97 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/__init__.py +0 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/config.py +114 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/layers.py +553 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/loss.py +55 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/metric.py +86 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/model_builders.py +95 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/model/model_schema.py +395 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/utilities/__init__.py +0 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/utilities/backend_utils.py +38 -0
- matrice_analytics/post_processing/ocr/fast_plate_ocr_py38/train/utilities/utils.py +214 -0
- matrice_analytics/post_processing/ocr/postprocessing.py +0 -1
- matrice_analytics/post_processing/post_processor.py +32 -11
- matrice_analytics/post_processing/usecases/color/clip.py +42 -8
- matrice_analytics/post_processing/usecases/color/color_mapper.py +2 -2
- matrice_analytics/post_processing/usecases/color_detection.py +50 -129
- matrice_analytics/post_processing/usecases/drone_traffic_monitoring.py +41 -386
- matrice_analytics/post_processing/usecases/flare_analysis.py +1 -56
- matrice_analytics/post_processing/usecases/license_plate_detection.py +476 -202
- matrice_analytics/post_processing/usecases/license_plate_monitoring.py +351 -26
- matrice_analytics/post_processing/usecases/people_counting.py +408 -1431
- matrice_analytics/post_processing/usecases/people_counting_bckp.py +1683 -0
- matrice_analytics/post_processing/usecases/vehicle_monitoring.py +39 -10
- matrice_analytics/post_processing/utils/__init__.py +8 -8
- {matrice_analytics-0.1.3.dist-info → matrice_analytics-0.1.32.dist-info}/METADATA +1 -1
- {matrice_analytics-0.1.3.dist-info → matrice_analytics-0.1.32.dist-info}/RECORD +61 -26
- {matrice_analytics-0.1.3.dist-info → matrice_analytics-0.1.32.dist-info}/WHEEL +0 -0
- {matrice_analytics-0.1.3.dist-info → matrice_analytics-0.1.32.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_analytics-0.1.3.dist-info → matrice_analytics-0.1.32.dist-info}/top_level.txt +0 -0
|
@@ -2,6 +2,8 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
import time
|
|
4
4
|
import threading
|
|
5
|
+
import queue
|
|
6
|
+
import base64
|
|
5
7
|
from typing import Dict, Optional, Set
|
|
6
8
|
import numpy as np
|
|
7
9
|
import cv2
|
|
@@ -16,8 +18,8 @@ class PeopleActivityLogging:
|
|
|
16
18
|
self.face_client = face_client
|
|
17
19
|
self.logger = logging.getLogger(__name__)
|
|
18
20
|
|
|
19
|
-
#
|
|
20
|
-
self.activity_queue =
|
|
21
|
+
# Use thread-safe queue for cross-thread communication (Python 3.8 compatibility)
|
|
22
|
+
self.activity_queue = queue.Queue()
|
|
21
23
|
|
|
22
24
|
# Thread for background processing
|
|
23
25
|
self.processing_thread = None
|
|
@@ -75,14 +77,12 @@ class PeopleActivityLogging:
|
|
|
75
77
|
"""Process activity queue continuously"""
|
|
76
78
|
while self.is_running:
|
|
77
79
|
try:
|
|
78
|
-
# Process queued detections with timeout
|
|
80
|
+
# Process queued detections with timeout using thread-safe queue
|
|
79
81
|
try:
|
|
80
|
-
activity_data =
|
|
81
|
-
self.activity_queue.get(), timeout=20
|
|
82
|
-
)
|
|
82
|
+
activity_data = self.activity_queue.get(timeout=20)
|
|
83
83
|
await self._process_activity(activity_data)
|
|
84
84
|
self.activity_queue.task_done()
|
|
85
|
-
except
|
|
85
|
+
except queue.Empty:
|
|
86
86
|
# Continue loop to check for empty detections
|
|
87
87
|
continue
|
|
88
88
|
|
|
@@ -135,7 +135,8 @@ class PeopleActivityLogging:
|
|
|
135
135
|
self.last_detection_time = time.time()
|
|
136
136
|
self.empty_detection_logged = False
|
|
137
137
|
|
|
138
|
-
await
|
|
138
|
+
# Use thread-safe put (no await needed for queue.Queue)
|
|
139
|
+
self.activity_queue.put(activity_data)
|
|
139
140
|
except Exception as e:
|
|
140
141
|
self.logger.error(f"Error enqueueing detection: {e}", exc_info=True)
|
|
141
142
|
|
|
@@ -169,7 +170,7 @@ class PeopleActivityLogging:
|
|
|
169
170
|
return True
|
|
170
171
|
|
|
171
172
|
async def _process_activity(self, activity_data: Dict):
|
|
172
|
-
"""Process activity data - handle all face detections
|
|
173
|
+
"""Process activity data - handle all face detections with embedded image data"""
|
|
173
174
|
detection_type = activity_data["detection_type"]
|
|
174
175
|
current_frame = activity_data["current_frame"]
|
|
175
176
|
bbox = activity_data["bbox"]
|
|
@@ -180,46 +181,83 @@ class PeopleActivityLogging:
|
|
|
180
181
|
|
|
181
182
|
try:
|
|
182
183
|
if not self.face_client:
|
|
184
|
+
self.logger.warning("Face client not available for activity logging")
|
|
183
185
|
return
|
|
184
186
|
|
|
185
187
|
# Check if we should log this detection (avoid duplicates within time window)
|
|
186
188
|
if not self._should_log_detection(employee_id):
|
|
189
|
+
self.logger.debug(f"Skipping activity log for employee_id={employee_id} (within cooldown period)")
|
|
187
190
|
return None
|
|
188
191
|
|
|
189
|
-
#
|
|
190
|
-
|
|
191
|
-
|
|
192
|
+
# Encode frame as base64 JPEG
|
|
193
|
+
image_data = None
|
|
194
|
+
if current_frame is not None:
|
|
195
|
+
try:
|
|
196
|
+
self.logger.debug(f"Encoding frame as base64 JPEG - employee_id={employee_id}")
|
|
197
|
+
_, buffer = cv2.imencode(".jpg", current_frame)
|
|
198
|
+
frame_bytes = buffer.tobytes()
|
|
199
|
+
image_data = base64.b64encode(frame_bytes).decode('utf-8')
|
|
200
|
+
self.logger.debug(f"Encoded image data - employee_id={employee_id}, size={len(frame_bytes)} bytes")
|
|
201
|
+
except Exception as e:
|
|
202
|
+
self.logger.error(f"Error encoding frame for employee_id={employee_id}: {e}", exc_info=True)
|
|
203
|
+
|
|
204
|
+
# Store activity data with embedded image
|
|
205
|
+
self.logger.info(f"Processing activity log - type={detection_type}, employee_id={employee_id}, staff_id={staff_id}, location={location}")
|
|
206
|
+
response = await self.face_client.store_people_activity(
|
|
192
207
|
staff_id=staff_id,
|
|
193
208
|
detection_type=detection_type,
|
|
194
209
|
bbox=bbox,
|
|
195
210
|
location=location,
|
|
196
211
|
employee_id=employee_id,
|
|
197
212
|
timestamp=timestamp,
|
|
213
|
+
image_data=image_data,
|
|
198
214
|
)
|
|
199
215
|
|
|
200
|
-
if
|
|
201
|
-
self.logger.
|
|
202
|
-
await self._upload_frame(current_frame, upload_url, employee_id)
|
|
216
|
+
if response and response.get("success", False):
|
|
217
|
+
self.logger.info(f"Activity log stored successfully for employee_id={employee_id}")
|
|
203
218
|
else:
|
|
204
|
-
|
|
219
|
+
error_msg = response.get("error", "Unknown error") if response else "No response"
|
|
220
|
+
self.logger.warning(f"Failed to store activity log for employee_id={employee_id} - {error_msg}")
|
|
205
221
|
|
|
206
|
-
return
|
|
222
|
+
return response
|
|
207
223
|
except Exception as e:
|
|
208
|
-
self.logger.error(f"Error
|
|
209
|
-
|
|
224
|
+
self.logger.error(f"Error processing activity log for employee_id={employee_id}: {e}", exc_info=True)
|
|
225
|
+
|
|
226
|
+
# async def _upload_frame_to_url(self, current_frame: np.ndarray, upload_url: str, employee_id: str):
|
|
227
|
+
# try:
|
|
228
|
+
# self.logger.debug(f"Encoding frame for upload - employee_id={employee_id}")
|
|
229
|
+
# _, buffer = cv2.imencode(".jpg", current_frame)
|
|
230
|
+
# frame_bytes = buffer.tobytes()
|
|
231
|
+
|
|
232
|
+
# self.logger.info(f"Uploading frame to storage - employee_id={employee_id}, size={len(frame_bytes)} bytes")
|
|
233
|
+
# upload_success = await self.face_client.upload_image_to_url(
|
|
234
|
+
# frame_bytes, upload_url
|
|
235
|
+
# )
|
|
236
|
+
|
|
237
|
+
# if upload_success:
|
|
238
|
+
# self.logger.info(f"Frame uploaded successfully for employee_id={employee_id}")
|
|
239
|
+
# else:
|
|
240
|
+
# self.logger.warning(f"Failed to upload frame for employee_id={employee_id}")
|
|
241
|
+
# except Exception as e:
|
|
242
|
+
# self.logger.error(f"Error uploading frame for employee_id={employee_id}: {e}", exc_info=True)
|
|
210
243
|
|
|
211
244
|
async def _upload_frame(self, current_frame: np.ndarray, upload_url: str, employee_id: str):
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
245
|
+
try:
|
|
246
|
+
self.logger.debug(f"Encoding frame for upload - employee_id={employee_id}")
|
|
247
|
+
_, buffer = cv2.imencode(".jpg", current_frame)
|
|
248
|
+
frame_bytes = buffer.tobytes()
|
|
249
|
+
|
|
250
|
+
self.logger.info(f"Uploading frame to storage - employee_id={employee_id}, size={len(frame_bytes)} bytes")
|
|
251
|
+
upload_success = await self.face_client.upload_image_to_url(
|
|
252
|
+
frame_bytes, upload_url
|
|
253
|
+
)
|
|
218
254
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
255
|
+
if upload_success:
|
|
256
|
+
self.logger.info(f"Frame uploaded successfully for employee_id={employee_id}")
|
|
257
|
+
else:
|
|
258
|
+
self.logger.warning(f"Failed to upload frame for employee_id={employee_id}")
|
|
259
|
+
except Exception as e:
|
|
260
|
+
self.logger.error(f"Error uploading frame for employee_id={employee_id}: {e}", exc_info=True)
|
|
223
261
|
|
|
224
262
|
async def _should_log_activity(self, activity_data: Dict) -> bool:
|
|
225
263
|
"""Check if activity should be logged"""
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Main CLI used when training a FastPlateOCR model.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
import click
|
|
9
|
+
|
|
10
|
+
from fast_plate_ocr.cli.dataset_stats import dataset_stats
|
|
11
|
+
from fast_plate_ocr.cli.export import export
|
|
12
|
+
from fast_plate_ocr.cli.train import train
|
|
13
|
+
from fast_plate_ocr.cli.valid import valid
|
|
14
|
+
from fast_plate_ocr.cli.validate_dataset import validate_dataset
|
|
15
|
+
from fast_plate_ocr.cli.visualize_augmentation import visualize_augmentation
|
|
16
|
+
from fast_plate_ocr.cli.visualize_predictions import visualize_predictions
|
|
17
|
+
|
|
18
|
+
except ImportError as e:
|
|
19
|
+
raise ImportError("Make sure to 'pip install fast-plate-ocr[train]' to run this!") from e
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@click.group(context_settings={"max_content_width": 120})
|
|
23
|
+
def main_cli():
|
|
24
|
+
"""FastPlateOCR CLI."""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
main_cli.add_command(dataset_stats)
|
|
28
|
+
main_cli.add_command(export)
|
|
29
|
+
main_cli.add_command(train)
|
|
30
|
+
main_cli.add_command(valid)
|
|
31
|
+
main_cli.add_command(validate_dataset)
|
|
32
|
+
main_cli.add_command(visualize_augmentation)
|
|
33
|
+
main_cli.add_command(visualize_predictions)
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Display statistics for a `fast-plate-ocr` dataset.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from collections import Counter
|
|
8
|
+
from collections.abc import Sequence
|
|
9
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
import click
|
|
13
|
+
import pandas as pd
|
|
14
|
+
from PIL import Image, UnidentifiedImageError
|
|
15
|
+
from rich import box
|
|
16
|
+
from rich.console import Console, Group
|
|
17
|
+
from rich.markup import escape
|
|
18
|
+
from rich.panel import Panel
|
|
19
|
+
from rich.table import Table
|
|
20
|
+
from typing import Optional
|
|
21
|
+
|
|
22
|
+
from fast_plate_ocr.train.model.config import load_plate_config_from_yaml
|
|
23
|
+
|
|
24
|
+
# pylint: disable=too-many-locals
|
|
25
|
+
|
|
26
|
+
console = Console()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _header_shape(path: Path) -> tuple[bool, Optional[tuple[int, int]]]:
|
|
30
|
+
try:
|
|
31
|
+
with Image.open(path) as im:
|
|
32
|
+
im.verify()
|
|
33
|
+
w, h = im.size
|
|
34
|
+
return True, (h, w)
|
|
35
|
+
except (UnidentifiedImageError, OSError):
|
|
36
|
+
return False, None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _compact_table(title: str, values: Sequence[float]) -> Table:
|
|
40
|
+
s = pd.Series(values, dtype="float64")
|
|
41
|
+
desc = s.describe(percentiles=[0.05, 0.5, 0.95])
|
|
42
|
+
metrics = ["count", "mean", "std", "min", "max", "5%", "50%", "95%"]
|
|
43
|
+
tbl = Table(title=title, box=box.MINIMAL_DOUBLE_HEAD, pad_edge=False, expand=False)
|
|
44
|
+
for m in metrics:
|
|
45
|
+
tbl.add_column(m, justify="right", style="bold")
|
|
46
|
+
tbl.add_row(*[f"{desc[m]:.2f}" if pd.notna(desc[m]) else "-" for m in metrics])
|
|
47
|
+
return tbl
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@click.command(context_settings={"max_content_width": 120})
|
|
51
|
+
@click.option(
|
|
52
|
+
"--annotations",
|
|
53
|
+
"-a",
|
|
54
|
+
required=True,
|
|
55
|
+
type=click.Path(exists=True, dir_okay=False, file_okay=True, path_type=Path),
|
|
56
|
+
help="CSV with image_path and plate_text columns.",
|
|
57
|
+
)
|
|
58
|
+
@click.option(
|
|
59
|
+
"--plate-config-file",
|
|
60
|
+
"-c",
|
|
61
|
+
required=True,
|
|
62
|
+
type=click.Path(exists=True, dir_okay=False, file_okay=True, path_type=Path),
|
|
63
|
+
help="YAML config so we know alphabet/pad char.",
|
|
64
|
+
)
|
|
65
|
+
@click.option(
|
|
66
|
+
"--top-chars",
|
|
67
|
+
default=10,
|
|
68
|
+
show_default=True,
|
|
69
|
+
type=int,
|
|
70
|
+
help="Show N most frequent characters.",
|
|
71
|
+
)
|
|
72
|
+
@click.option(
|
|
73
|
+
"--workers",
|
|
74
|
+
default=8,
|
|
75
|
+
show_default=True,
|
|
76
|
+
type=int,
|
|
77
|
+
help="Parallel header reads (0 disables threading).",
|
|
78
|
+
)
|
|
79
|
+
def dataset_stats(annotations: Path, plate_config_file: Path, top_chars: int, workers: int) -> None:
|
|
80
|
+
"""
|
|
81
|
+
Display statistics for a `fast-plate-ocr` dataset.
|
|
82
|
+
"""
|
|
83
|
+
plate_config = load_plate_config_from_yaml(plate_config_file)
|
|
84
|
+
|
|
85
|
+
df_annots = pd.read_csv(annotations)
|
|
86
|
+
root = annotations.parent
|
|
87
|
+
df_annots["image_path"] = df_annots["image_path"].apply(lambda p: str((root / p).resolve()))
|
|
88
|
+
|
|
89
|
+
# Plate lengths and char frequencies
|
|
90
|
+
plate_lengths = df_annots["plate_text"].str.len().tolist()
|
|
91
|
+
char_counter: Counter[str] = Counter("".join(df_annots["plate_text"].tolist()))
|
|
92
|
+
|
|
93
|
+
# File extension counts
|
|
94
|
+
ext_counter = Counter(df_annots["image_path"].apply(lambda p: Path(p).suffix.lower()))
|
|
95
|
+
|
|
96
|
+
# Image header dimensions
|
|
97
|
+
paths = [Path(p) for p in df_annots["image_path"].tolist()]
|
|
98
|
+
if workers > 1:
|
|
99
|
+
with ThreadPoolExecutor(max_workers=workers) as ex:
|
|
100
|
+
dims = list(ex.map(_header_shape, paths))
|
|
101
|
+
else:
|
|
102
|
+
dims = [_header_shape(p) for p in paths]
|
|
103
|
+
|
|
104
|
+
valid_dims = [dims_pair for ok, dims_pair in dims if ok and dims_pair is not None]
|
|
105
|
+
|
|
106
|
+
heights = [h for h, _ in valid_dims]
|
|
107
|
+
widths = [w for _, w in valid_dims]
|
|
108
|
+
aspects = [w / h for h, w in valid_dims if h > 0]
|
|
109
|
+
|
|
110
|
+
# Build tables
|
|
111
|
+
tbl_len = _compact_table("Plate Lengths", plate_lengths)
|
|
112
|
+
tbl_h = _compact_table("Image Height", heights)
|
|
113
|
+
tbl_w = _compact_table("Image Width", widths)
|
|
114
|
+
tbl_ar = _compact_table("Aspect Ratio", aspects)
|
|
115
|
+
|
|
116
|
+
# Extension table
|
|
117
|
+
tbl_ext = Table(title="Extensions", box=box.MINIMAL_DOUBLE_HEAD, pad_edge=False)
|
|
118
|
+
tbl_ext.add_column("Ext", style="bold", justify="left")
|
|
119
|
+
tbl_ext.add_column("Count", justify="right")
|
|
120
|
+
for ext, cnt in ext_counter.most_common():
|
|
121
|
+
tbl_ext.add_row(ext or "<none>", str(cnt))
|
|
122
|
+
|
|
123
|
+
# Character freq table
|
|
124
|
+
tbl_char = Table(title=f"Top {top_chars} Chars", box=box.MINIMAL_DOUBLE_HEAD, pad_edge=False)
|
|
125
|
+
tbl_char.add_column("Char", style="bold")
|
|
126
|
+
tbl_char.add_column("Count", justify="right")
|
|
127
|
+
for ch, cnt in char_counter.most_common(top_chars):
|
|
128
|
+
if ch == plate_config.pad_char:
|
|
129
|
+
continue
|
|
130
|
+
tbl_char.add_row(escape(ch), str(cnt))
|
|
131
|
+
|
|
132
|
+
group = Group(tbl_len, tbl_h, tbl_w, tbl_ar, tbl_ext, tbl_char)
|
|
133
|
+
console.print(
|
|
134
|
+
Panel.fit(group, title="Dataset Statistics", border_style="green", box=box.SQUARE)
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
if __name__ == "__main__":
|
|
139
|
+
dataset_stats()
|