smartx-rfid 1.1.1__py3-none-any.whl → 1.5.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- smartx_rfid/clients/rchlo.py +9 -0
- smartx_rfid/db/__init__.py +1 -0
- smartx_rfid/db/_main.py +432 -0
- smartx_rfid/devices/RFID/X714/on_receive.py +0 -3
- smartx_rfid/parser/__init__.py +1 -0
- smartx_rfid/parser/main.py +27 -0
- smartx_rfid/parser/rfid_tag_parser/__init__.py +15 -0
- smartx_rfid/parser/rfid_tag_parser/exceptions.py +15 -0
- smartx_rfid/parser/rfid_tag_parser/tag_tid_parser.py +674 -0
- smartx_rfid/schemas/events.py +7 -0
- smartx_rfid/schemas/tag.py +4 -4
- smartx_rfid/utils/__init__.py +1 -0
- smartx_rfid/utils/path.py +2 -2
- smartx_rfid/utils/regex.py +9 -0
- smartx_rfid/utils/tag_list.py +53 -18
- smartx_rfid/webhook/__init__.py +1 -0
- smartx_rfid/webhook/_main.py +88 -0
- smartx_rfid-1.5.4.dist-info/METADATA +344 -0
- {smartx_rfid-1.1.1.dist-info → smartx_rfid-1.5.4.dist-info}/RECORD +21 -9
- smartx_rfid-1.1.1.dist-info/METADATA +0 -83
- {smartx_rfid-1.1.1.dist-info → smartx_rfid-1.5.4.dist-info}/WHEEL +0 -0
- {smartx_rfid-1.1.1.dist-info → smartx_rfid-1.5.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from ._main import DatabaseManager
|
smartx_rfid/db/_main.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, Dict, List, Optional, Type, Union
|
|
3
|
+
from contextlib import contextmanager
|
|
4
|
+
from sqlalchemy import create_engine, text, MetaData, inspect, event
|
|
5
|
+
from sqlalchemy.orm import sessionmaker, DeclarativeBase, scoped_session
|
|
6
|
+
from sqlalchemy.pool import QueuePool
|
|
7
|
+
from sqlalchemy.engine import Engine
|
|
8
|
+
import threading
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DatabaseError(Exception):
|
|
12
|
+
"""Custom database exception for better error handling"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
|
15
|
+
super().__init__(message)
|
|
16
|
+
self.original_error = original_error
|
|
17
|
+
|
|
18
|
+
# Log the error
|
|
19
|
+
if original_error:
|
|
20
|
+
logging.error(f"DatabaseError: {message} | Original: {str(original_error)}")
|
|
21
|
+
else:
|
|
22
|
+
logging.error(f"DatabaseError: {message}")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DatabaseConnectionError(DatabaseError):
|
|
26
|
+
"""Raised when database connection fails"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
|
29
|
+
super().__init__(message, original_error)
|
|
30
|
+
|
|
31
|
+
# Specific logging for connection errors
|
|
32
|
+
logging.critical(f"Database connection failed: {message}")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class DatabaseOperationError(DatabaseError):
|
|
36
|
+
"""Raised when database operations fail"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
|
39
|
+
super().__init__(message, original_error)
|
|
40
|
+
|
|
41
|
+
# Specific logging for operation errors
|
|
42
|
+
logging.error(f"Database operation failed: {message}")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DatabaseManager:
|
|
46
|
+
"""
|
|
47
|
+
Professional database manager using SQLAlchemy with support for multiple database backends.
|
|
48
|
+
|
|
49
|
+
Features:
|
|
50
|
+
- Multiple database support (PostgreSQL, MySQL, SQLite, SQL Server, Oracle, etc.)
|
|
51
|
+
- Connection pooling
|
|
52
|
+
- Session management
|
|
53
|
+
- Automatic table creation
|
|
54
|
+
- Raw SQL query support
|
|
55
|
+
- Transaction management
|
|
56
|
+
- Error handling
|
|
57
|
+
- Thread safety
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
database_url: str,
|
|
63
|
+
echo: bool = False,
|
|
64
|
+
pool_size: int = 10,
|
|
65
|
+
max_overflow: int = 20,
|
|
66
|
+
pool_timeout: int = 30,
|
|
67
|
+
pool_recycle: int = 3600,
|
|
68
|
+
**engine_kwargs,
|
|
69
|
+
):
|
|
70
|
+
"""
|
|
71
|
+
Initialize the database manager.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
database_url (str): Database connection URL
|
|
75
|
+
echo (bool): Enable SQL query logging
|
|
76
|
+
pool_size (int): Number of connections to maintain
|
|
77
|
+
max_overflow (int): Maximum overflow connections
|
|
78
|
+
pool_timeout (int): Timeout for getting connection from pool
|
|
79
|
+
pool_recycle (int): Recycle connections after seconds
|
|
80
|
+
**engine_kwargs: Additional engine configuration
|
|
81
|
+
"""
|
|
82
|
+
self.database_url = database_url
|
|
83
|
+
self.echo = echo
|
|
84
|
+
self._engine: Optional[Engine] = None
|
|
85
|
+
self._session_factory: Optional[sessionmaker] = None
|
|
86
|
+
self._scoped_session: Optional[scoped_session] = None
|
|
87
|
+
self._metadata: Optional[MetaData] = None
|
|
88
|
+
self._models_registry: List[Type[DeclarativeBase]] = []
|
|
89
|
+
self._lock = threading.Lock()
|
|
90
|
+
|
|
91
|
+
# Engine configuration
|
|
92
|
+
self._engine_config = {
|
|
93
|
+
"echo": echo,
|
|
94
|
+
"poolclass": QueuePool,
|
|
95
|
+
"pool_size": pool_size,
|
|
96
|
+
"max_overflow": max_overflow,
|
|
97
|
+
"pool_timeout": pool_timeout,
|
|
98
|
+
"pool_recycle": pool_recycle,
|
|
99
|
+
"pool_pre_ping": True, # Validate connections before use
|
|
100
|
+
**engine_kwargs,
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
# Setup logging
|
|
104
|
+
self.logger = logging.getLogger(__name__)
|
|
105
|
+
|
|
106
|
+
def initialize(self) -> None:
|
|
107
|
+
"""Initialize the database engine and session factory."""
|
|
108
|
+
try:
|
|
109
|
+
with self._lock:
|
|
110
|
+
if self._engine is None:
|
|
111
|
+
self._engine = create_engine(self.database_url, **self._engine_config)
|
|
112
|
+
|
|
113
|
+
# Setup connection event listeners
|
|
114
|
+
self._setup_event_listeners()
|
|
115
|
+
|
|
116
|
+
# Create session factory
|
|
117
|
+
self._session_factory = sessionmaker(bind=self._engine, expire_on_commit=False)
|
|
118
|
+
|
|
119
|
+
# Create scoped session for thread safety
|
|
120
|
+
self._scoped_session = scoped_session(self._session_factory)
|
|
121
|
+
|
|
122
|
+
# Create metadata instance
|
|
123
|
+
self._metadata = MetaData()
|
|
124
|
+
|
|
125
|
+
self.logger.info(f"Database initialized successfully: {self._get_db_info()}")
|
|
126
|
+
|
|
127
|
+
except Exception as e:
|
|
128
|
+
self.logger.error(f"Failed to initialize database: {str(e)}")
|
|
129
|
+
raise DatabaseConnectionError(f"Database initialization failed: {str(e)}", e)
|
|
130
|
+
|
|
131
|
+
def _setup_event_listeners(self) -> None:
|
|
132
|
+
"""Setup SQLAlchemy event listeners for connection management."""
|
|
133
|
+
|
|
134
|
+
@event.listens_for(self._engine, "connect")
|
|
135
|
+
def set_sqlite_pragma(dbapi_connection, connection_record):
|
|
136
|
+
"""Configure SQLite specific settings."""
|
|
137
|
+
if "sqlite" in self.database_url.lower():
|
|
138
|
+
cursor = dbapi_connection.cursor()
|
|
139
|
+
cursor.execute("PRAGMA foreign_keys=ON")
|
|
140
|
+
cursor.close()
|
|
141
|
+
|
|
142
|
+
@event.listens_for(self._engine, "checkout")
|
|
143
|
+
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
|
|
144
|
+
"""Log connection checkout."""
|
|
145
|
+
self.logger.debug("Connection checked out from pool")
|
|
146
|
+
|
|
147
|
+
@event.listens_for(self._engine, "checkin")
|
|
148
|
+
def receive_checkin(dbapi_connection, connection_record):
|
|
149
|
+
"""Log connection checkin."""
|
|
150
|
+
self.logger.debug("Connection returned to pool")
|
|
151
|
+
|
|
152
|
+
def _get_db_info(self) -> str:
|
|
153
|
+
"""Get database information for logging."""
|
|
154
|
+
if self._engine:
|
|
155
|
+
return f"{self._engine.dialect.name} ({self._engine.url.database})"
|
|
156
|
+
return "Unknown"
|
|
157
|
+
|
|
158
|
+
def register_models(self, *models: Type[DeclarativeBase]) -> None:
|
|
159
|
+
"""
|
|
160
|
+
Register SQLAlchemy models for table creation.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
*models: SQLAlchemy model classes
|
|
164
|
+
"""
|
|
165
|
+
for model in models:
|
|
166
|
+
if model not in self._models_registry:
|
|
167
|
+
self._models_registry.append(model)
|
|
168
|
+
self.logger.debug(f"Registered model: {model.__name__}")
|
|
169
|
+
|
|
170
|
+
def create_tables(self, checkfirst: bool = True) -> None:
|
|
171
|
+
"""
|
|
172
|
+
Create all registered model tables.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
checkfirst (bool): Check if tables exist before creating
|
|
176
|
+
"""
|
|
177
|
+
if not self._engine:
|
|
178
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
179
|
+
|
|
180
|
+
if not self._models_registry:
|
|
181
|
+
self.logger.warning("No models registered for table creation")
|
|
182
|
+
return
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
# Get base metadata from first model
|
|
186
|
+
base_metadata = self._models_registry[0].metadata
|
|
187
|
+
|
|
188
|
+
# Create all tables
|
|
189
|
+
base_metadata.create_all(bind=self._engine, checkfirst=checkfirst)
|
|
190
|
+
|
|
191
|
+
self.logger.info(f"Tables created successfully for {len(self._models_registry)} models")
|
|
192
|
+
|
|
193
|
+
except Exception as e:
|
|
194
|
+
self.logger.error(f"Failed to create tables: {str(e)}")
|
|
195
|
+
raise DatabaseOperationError(f"Table creation failed: {str(e)}", e)
|
|
196
|
+
|
|
197
|
+
def drop_tables(self, checkfirst: bool = True) -> None:
|
|
198
|
+
"""
|
|
199
|
+
Drop all registered model tables.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
checkfirst (bool): Check if tables exist before dropping
|
|
203
|
+
"""
|
|
204
|
+
if not self._engine:
|
|
205
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
206
|
+
|
|
207
|
+
if not self._models_registry:
|
|
208
|
+
self.logger.warning("No models registered for table dropping")
|
|
209
|
+
return
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
# Get base metadata from first model
|
|
213
|
+
base_metadata = self._models_registry[0].metadata
|
|
214
|
+
|
|
215
|
+
# Drop all tables
|
|
216
|
+
base_metadata.drop_all(bind=self._engine, checkfirst=checkfirst)
|
|
217
|
+
|
|
218
|
+
self.logger.info("Tables dropped successfully")
|
|
219
|
+
|
|
220
|
+
except Exception as e:
|
|
221
|
+
self.logger.error(f"Failed to drop tables: {str(e)}")
|
|
222
|
+
raise DatabaseOperationError(f"Table dropping failed: {str(e)}", e)
|
|
223
|
+
|
|
224
|
+
def table_exists(self, table_name: str) -> bool:
|
|
225
|
+
"""
|
|
226
|
+
Check if a table exists in the database.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
table_name (str): Name of the table to check
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
bool: True if table exists, False otherwise
|
|
233
|
+
"""
|
|
234
|
+
if not self._engine:
|
|
235
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
inspector = inspect(self._engine)
|
|
239
|
+
return table_name in inspector.get_table_names()
|
|
240
|
+
except Exception as e:
|
|
241
|
+
self.logger.error(f"Failed to check table existence: {str(e)}")
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
def get_table_names(self) -> List[str]:
|
|
245
|
+
"""
|
|
246
|
+
Get list of all table names in the database.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
List[str]: List of table names
|
|
250
|
+
"""
|
|
251
|
+
if not self._engine:
|
|
252
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
253
|
+
|
|
254
|
+
try:
|
|
255
|
+
inspector = inspect(self._engine)
|
|
256
|
+
return inspector.get_table_names()
|
|
257
|
+
except Exception as e:
|
|
258
|
+
self.logger.error(f"Failed to get table names: {str(e)}")
|
|
259
|
+
raise DatabaseOperationError(f"Failed to get table names: {str(e)}", e)
|
|
260
|
+
|
|
261
|
+
@contextmanager
|
|
262
|
+
def get_session(self):
|
|
263
|
+
"""
|
|
264
|
+
Get a database session with automatic cleanup.
|
|
265
|
+
|
|
266
|
+
Yields:
|
|
267
|
+
Session: SQLAlchemy session
|
|
268
|
+
"""
|
|
269
|
+
if not self._session_factory:
|
|
270
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
271
|
+
|
|
272
|
+
session = self._session_factory()
|
|
273
|
+
try:
|
|
274
|
+
yield session
|
|
275
|
+
session.commit()
|
|
276
|
+
except Exception as e:
|
|
277
|
+
session.rollback()
|
|
278
|
+
self.logger.error(f"Session error: {str(e)}")
|
|
279
|
+
raise DatabaseOperationError(f"Database operation failed: {str(e)}", e)
|
|
280
|
+
finally:
|
|
281
|
+
session.close()
|
|
282
|
+
|
|
283
|
+
def get_scoped_session(self) -> scoped_session:
|
|
284
|
+
"""
|
|
285
|
+
Get a scoped session for thread-safe operations.
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
scoped_session: Thread-safe scoped session
|
|
289
|
+
"""
|
|
290
|
+
if not self._scoped_session:
|
|
291
|
+
raise DatabaseError("Database not initialized. Call initialize() first.")
|
|
292
|
+
return self._scoped_session
|
|
293
|
+
|
|
294
|
+
def execute_query(self, query: Union[str, text], params: Optional[Dict[str, Any]] = None) -> Any:
|
|
295
|
+
"""
|
|
296
|
+
Execute a raw SQL query.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
query (Union[str, text]): SQL query to execute
|
|
300
|
+
params (Optional[Dict[str, Any]]): Query parameters
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
Any: Query result
|
|
304
|
+
"""
|
|
305
|
+
with self.get_session() as session:
|
|
306
|
+
try:
|
|
307
|
+
if isinstance(query, str):
|
|
308
|
+
query = text(query)
|
|
309
|
+
|
|
310
|
+
result = session.execute(query, params or {})
|
|
311
|
+
return result
|
|
312
|
+
except Exception as e:
|
|
313
|
+
self.logger.error(f"Query execution failed: {str(e)}")
|
|
314
|
+
raise DatabaseOperationError(f"Query execution failed: {str(e)}", e)
|
|
315
|
+
|
|
316
|
+
def execute_query_fetchall(self, query: Union[str, text], params: Optional[Dict[str, Any]] = None) -> List[Any]:
|
|
317
|
+
"""
|
|
318
|
+
Execute a raw SQL query and fetch all results.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
query (Union[str, text]): SQL query to execute
|
|
322
|
+
params (Optional[Dict[str, Any]]): Query parameters
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
List[Any]: Query results
|
|
326
|
+
"""
|
|
327
|
+
result = self.execute_query(query, params)
|
|
328
|
+
return result.fetchall()
|
|
329
|
+
|
|
330
|
+
def execute_query_fetchone(self, query: Union[str, text], params: Optional[Dict[str, Any]] = None) -> Any:
|
|
331
|
+
"""
|
|
332
|
+
Execute a raw SQL query and fetch one result.
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
query (Union[str, text]): SQL query to execute
|
|
336
|
+
params (Optional[Dict[str, Any]]): Query parameters
|
|
337
|
+
|
|
338
|
+
Returns:
|
|
339
|
+
Any: Query result
|
|
340
|
+
"""
|
|
341
|
+
result = self.execute_query(query, params)
|
|
342
|
+
return result.fetchone()
|
|
343
|
+
|
|
344
|
+
def bulk_insert(self, model_class: Type[DeclarativeBase], data: List[Dict[str, Any]]) -> None:
|
|
345
|
+
"""
|
|
346
|
+
Perform bulk insert operation.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
model_class (Type[DeclarativeBase]): Model class
|
|
350
|
+
data (List[Dict[str, Any]]): List of data dictionaries
|
|
351
|
+
"""
|
|
352
|
+
with self.get_session() as session:
|
|
353
|
+
try:
|
|
354
|
+
session.bulk_insert_mappings(model_class, data)
|
|
355
|
+
self.logger.info(f"Bulk inserted {len(data)} records into {model_class.__name__}")
|
|
356
|
+
except Exception as e:
|
|
357
|
+
self.logger.error(f"Bulk insert failed: {str(e)}")
|
|
358
|
+
raise DatabaseOperationError(f"Bulk insert failed: {str(e)}", e)
|
|
359
|
+
|
|
360
|
+
def bulk_update(self, model_class: Type[DeclarativeBase], data: List[Dict[str, Any]]) -> None:
|
|
361
|
+
"""
|
|
362
|
+
Perform bulk update operation.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
model_class (Type[DeclarativeBase]): Model class
|
|
366
|
+
data (List[Dict[str, Any]]): List of data dictionaries
|
|
367
|
+
"""
|
|
368
|
+
with self.get_session() as session:
|
|
369
|
+
try:
|
|
370
|
+
session.bulk_update_mappings(model_class, data)
|
|
371
|
+
self.logger.info(f"Bulk updated {len(data)} records in {model_class.__name__}")
|
|
372
|
+
except Exception as e:
|
|
373
|
+
self.logger.error(f"Bulk update failed: {str(e)}")
|
|
374
|
+
raise DatabaseOperationError(f"Bulk update failed: {str(e)}", e)
|
|
375
|
+
|
|
376
|
+
def get_connection_info(self) -> Dict[str, Any]:
|
|
377
|
+
"""
|
|
378
|
+
Get database connection information.
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
Dict[str, Any]: Connection information
|
|
382
|
+
"""
|
|
383
|
+
if not self._engine:
|
|
384
|
+
return {"status": "not_initialized"}
|
|
385
|
+
|
|
386
|
+
try:
|
|
387
|
+
pool = self._engine.pool
|
|
388
|
+
return {
|
|
389
|
+
"status": "connected",
|
|
390
|
+
"database_type": self._engine.dialect.name,
|
|
391
|
+
"database_name": self._engine.url.database,
|
|
392
|
+
"pool_size": pool.size(),
|
|
393
|
+
"checked_in_connections": pool.checkedin(),
|
|
394
|
+
"checked_out_connections": pool.checkedout(),
|
|
395
|
+
"overflow": pool.overflow(),
|
|
396
|
+
}
|
|
397
|
+
except Exception as e:
|
|
398
|
+
self.logger.error(f"Failed to get connection info: {str(e)}")
|
|
399
|
+
return {"status": "error", "error": str(e)}
|
|
400
|
+
|
|
401
|
+
def close(self) -> None:
|
|
402
|
+
"""Close all database connections and cleanup resources."""
|
|
403
|
+
try:
|
|
404
|
+
if self._scoped_session:
|
|
405
|
+
self._scoped_session.remove()
|
|
406
|
+
|
|
407
|
+
if self._engine:
|
|
408
|
+
self._engine.dispose()
|
|
409
|
+
|
|
410
|
+
self.logger.info("Database connections closed successfully")
|
|
411
|
+
|
|
412
|
+
except Exception as e:
|
|
413
|
+
self.logger.error(f"Error closing database connections: {str(e)}")
|
|
414
|
+
finally:
|
|
415
|
+
self._engine = None
|
|
416
|
+
self._session_factory = None
|
|
417
|
+
self._scoped_session = None
|
|
418
|
+
self._metadata = None
|
|
419
|
+
|
|
420
|
+
def __enter__(self):
|
|
421
|
+
"""Context manager entry."""
|
|
422
|
+
self.initialize()
|
|
423
|
+
return self
|
|
424
|
+
|
|
425
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
426
|
+
"""Context manager exit."""
|
|
427
|
+
self.close()
|
|
428
|
+
|
|
429
|
+
def __repr__(self) -> str:
|
|
430
|
+
"""String representation of the database manager."""
|
|
431
|
+
status = "initialized" if self._engine else "not_initialized"
|
|
432
|
+
return f"DatabaseManager(status={status}, url={self.database_url})"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .main import get_serial_from_tid
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from .rfid_tag_parser.tag_tid_parser import parse_tid
|
|
3
|
+
from smartx_rfid.utils import regex_hex
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_serial_from_tid(tid: str) -> bool:
|
|
7
|
+
"""
|
|
8
|
+
Extract serial number from TID.
|
|
9
|
+
"""
|
|
10
|
+
# validate TID format
|
|
11
|
+
if not regex_hex(tid, 24):
|
|
12
|
+
return None
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
# SUFIX
|
|
16
|
+
parse = parse_tid(tid)
|
|
17
|
+
if not parse:
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
serial = parse.get("serial_decimal")
|
|
21
|
+
if serial is None:
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
return str(serial)
|
|
25
|
+
except Exception as e:
|
|
26
|
+
logging.error(f"Parse ERROR: {e}")
|
|
27
|
+
return None
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
RFID Tag TID Parser
|
|
3
|
+
|
|
4
|
+
Um parser Python para análise de TID (Tag Identifier) de tags RFID.
|
|
5
|
+
Extrai informações como fabricante, modelo, número serial e outras características.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .tag_tid_parser import TagTidParser, parse_tid, get_serial_from_tid
|
|
9
|
+
from .exceptions import TagTidParserError, InvalidTidError
|
|
10
|
+
|
|
11
|
+
__version__ = "1.0.0"
|
|
12
|
+
__author__ = "Seu Nome"
|
|
13
|
+
__email__ = "seu.email@exemplo.com"
|
|
14
|
+
|
|
15
|
+
__all__ = ["TagTidParser", "parse_tid", "get_serial_from_tid", "TagTidParserError", "InvalidTidError"]
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Exceções customizadas para o RFID Tag TID Parser.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TagTidParserError(Exception):
|
|
7
|
+
"""Exceção base para erros do TagTidParser."""
|
|
8
|
+
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class InvalidTidError(TagTidParserError):
|
|
13
|
+
"""Exceção levantada quando o TID fornecido é inválido."""
|
|
14
|
+
|
|
15
|
+
pass
|