fastmssql 0.4.2__cp310-cp310-manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fastmssql/__init__.py ADDED
@@ -0,0 +1,957 @@
1
+ """
2
+ FastMSSQL - High-Performance Microsoft SQL Server Driver for Python
3
+ ===================================================================
4
+
5
+ This library provides direct access to high-performance Rust implementations
6
+ with minimal Python overhead for maximum performance. Built on top of the
7
+ tiberius crate, it offers both synchronous and asynchronous database operations
8
+ with advanced features like connection pooling, SSL/TLS configuration, and
9
+ efficient parameter handling.
10
+
11
+ Key Features:
12
+ - High-performance Rust backend with Python bindings
13
+ - Async/await support for non-blocking operations
14
+ - Connection pooling with configurable parameters
15
+ - SSL/TLS encryption with certificate validation
16
+ - Parameterized queries with automatic type conversion
17
+ - Memory-efficient result iteration
18
+ - Comprehensive error handling and logging
19
+
20
+ Basic Usage (Async):
21
+ >>> import asyncio
22
+ >>> from fastmssql import Connection
23
+ >>>
24
+ >>> async def main():
25
+ ... async with Connection("Server=localhost;Database=test;Trusted_Connection=yes") as conn:
26
+ ... # SELECT queries - use query() method
27
+ ... result = await conn.query("SELECT * FROM users")
28
+ ... async for row in result:
29
+ ... print(f"User: {row['name']}, Age: {row['age']}")
30
+ ...
31
+ ... # Parameterized SELECT query
32
+ ... result = await conn.query(
33
+ ... "SELECT * FROM users WHERE age > @P1 AND city = @P2",
34
+ ... [18, "New York"]
35
+ ... )
36
+ ... rows = await result.fetchall()
37
+ ... print(f"Found {len(rows)} users")
38
+ ...
39
+ ... # INSERT/UPDATE/DELETE - use execute() method
40
+ ... affected = await conn.execute(
41
+ ... "INSERT INTO users (name, email, age) VALUES (@P1, @P2, @P3)",
42
+ ... ["John Doe", "john@example.com", 30]
43
+ ... )
44
+ ... print(f"Inserted {affected} rows")
45
+ >>>
46
+ >>> asyncio.run(main())
47
+
48
+ Basic Usage (Sync):
49
+ >>> from fastmssql import Connection
50
+ >>>
51
+ >>> with Connection("Server=localhost;Database=test;Trusted_Connection=yes") as conn:
52
+ ... # For SELECT queries, you would typically use the async API
53
+ ... # Sync usage is primarily for simple operations
54
+ ... pass # Connection established and will be closed on exit
55
+
56
+ Advanced Configuration:
57
+ >>> from fastmssql import Connection, PoolConfig, SslConfig, EncryptionLevel
58
+ >>>
59
+ >>> # Configure connection pool
60
+ >>> pool_config = PoolConfig(
61
+ ... max_connections=20,
62
+ ... min_connections=2,
63
+ ... acquire_timeout_seconds=30,
64
+ ... idle_timeout_seconds=600
65
+ ... )
66
+ >>>
67
+ >>> # Configure SSL/TLS
68
+ >>> ssl_config = SslConfig(
69
+ ... encryption_level=EncryptionLevel.Required,
70
+ ... trust_server_certificate=False,
71
+ ... certificate_path="/path/to/cert.pem"
72
+ ... )
73
+ >>>
74
+ >>> conn = Connection(
75
+ ... server="myserver.database.windows.net",
76
+ ... database="mydatabase",
77
+ ... username="myuser",
78
+ ... password="mypassword",
79
+ ... pool_config=pool_config,
80
+ ... ssl_config=ssl_config
81
+ ... )
82
+
83
+ Performance Considerations:
84
+ - Use parameterized queries to prevent SQL injection and improve performance
85
+ - Leverage connection pooling for applications with multiple concurrent operations
86
+ - Use async methods for I/O-bound applications to improve throughput
87
+ - Consider batch operations for bulk data manipulation
88
+ - Monitor connection pool statistics to optimize pool configuration
89
+
90
+ Thread Safety:
91
+ This library is thread-safe and can be used in multi-threaded applications.
92
+ Each Connection instance maintains its own connection pool and can be safely
93
+ shared across threads when using async methods.
94
+ """
95
+
96
+ # Import from the maturin-generated module
97
+ from .fastmssql import Connection as _RustConnection
98
+ from .fastmssql import PoolConfig
99
+ from .fastmssql import SslConfig
100
+ from .fastmssql import FastExecutionResult
101
+ from .fastmssql import version, EncryptionLevel, Parameter, Parameters
102
+
103
+ # Wrapper class to handle async execution result conversion
104
+ class Connection:
105
+ """
106
+ High-performance connection to Microsoft SQL Server.
107
+
108
+ This class provides a Python wrapper around the Rust-based connection implementation,
109
+ offering both synchronous and asynchronous database operations with advanced features
110
+ like connection pooling, SSL/TLS configuration, and efficient parameter handling.
111
+
112
+ The Connection class supports multiple initialization patterns:
113
+ 1. Connection string-based initialization
114
+ 2. Individual parameter initialization
115
+ 3. Advanced configuration with pool and SSL settings
116
+
117
+ Connection Patterns:
118
+ # Using connection string
119
+ conn = Connection("Server=localhost;Database=test;Trusted_Connection=yes")
120
+
121
+ # Using individual parameters
122
+ conn = Connection(
123
+ server="localhost",
124
+ database="test",
125
+ trusted_connection=True
126
+ )
127
+
128
+ # Using username/password authentication
129
+ conn = Connection(
130
+ server="myserver.database.windows.net",
131
+ database="mydatabase",
132
+ username="myuser",
133
+ password="mypassword"
134
+ )
135
+
136
+ Thread Safety:
137
+ This class is thread-safe and maintains an internal connection pool that can
138
+ be safely accessed from multiple threads when using async methods.
139
+
140
+ Performance Notes:
141
+ - Async methods are recommended for I/O-bound applications
142
+ - Connection pooling is automatically managed for optimal resource usage
143
+ - Parameterized queries provide better performance and security
144
+ - Results are streamed efficiently to minimize memory usage
145
+
146
+ Attributes:
147
+ _conn: The underlying Rust connection implementation
148
+ """
149
+
150
+ def __init__(
151
+ self,
152
+ connection_string=None,
153
+ pool_config=None,
154
+ ssl_config=None,
155
+ server=None,
156
+ database=None,
157
+ username=None,
158
+ password=None,
159
+ trusted_connection=None
160
+ ):
161
+ """
162
+ Initialize a new SQL Server connection.
163
+
164
+ Args:
165
+ connection_string (str, optional): Complete ADO.NET-style connection string.
166
+ Takes precedence over individual parameters if provided.
167
+ Example: "Server=localhost;Database=test;Trusted_Connection=yes"
168
+
169
+ pool_config (PoolConfig, optional): Configuration for the connection pool.
170
+ Allows customization of pool size, timeouts, and behavior.
171
+
172
+ ssl_config (SslConfig, optional): SSL/TLS configuration for secure connections.
173
+ Required for encrypted connections to Azure SQL Database and other
174
+ secure SQL Server instances.
175
+
176
+ server (str, optional): SQL Server hostname or IP address.
177
+ Can include instance name (e.g., "localhost\\SQLEXPRESS") or port
178
+ (e.g., "localhost:1433").
179
+
180
+ database (str, optional): Name of the database to connect to.
181
+ If not specified, connects to the default database for the user.
182
+
183
+ username (str, optional): Username for SQL Server authentication.
184
+ Required when not using Windows Authentication.
185
+
186
+ password (str, optional): Password for SQL Server authentication.
187
+ Required when username is provided.
188
+
189
+ trusted_connection (bool, optional): Whether to use Windows Authentication.
190
+ When True, uses the current Windows user's credentials.
191
+ Mutually exclusive with username/password.
192
+
193
+ Raises:
194
+ ValueError: If connection parameters are invalid or conflicting.
195
+ ConnectionError: If unable to establish initial connection pool.
196
+
197
+ Examples:
198
+ # Connection string approach
199
+ >>> conn = Connection("Server=localhost;Database=AdventureWorks;Trusted_Connection=yes")
200
+
201
+ # Individual parameters
202
+ >>> conn = Connection(
203
+ ... server="localhost",
204
+ ... database="AdventureWorks",
205
+ ... trusted_connection=True
206
+ ... )
207
+
208
+ # SQL Server authentication
209
+ >>> conn = Connection(
210
+ ... server="myserver.database.windows.net",
211
+ ... database="mydatabase",
212
+ ... username="myuser@mydomain.com",
213
+ ... password="SecurePassword123!"
214
+ ... )
215
+
216
+ # With advanced configuration
217
+ >>> from fastmssql import PoolConfig, SslConfig, EncryptionLevel
218
+ >>> pool_config = PoolConfig(max_connections=10, min_connections=2)
219
+ >>> ssl_config = SslConfig(encryption_level=EncryptionLevel.Required)
220
+ >>> conn = Connection(
221
+ ... server="secure-server.example.com",
222
+ ... database="production_db",
223
+ ... username="app_user",
224
+ ... password="app_password",
225
+ ... pool_config=pool_config,
226
+ ... ssl_config=ssl_config
227
+ ... )
228
+ """
229
+ self._conn = _RustConnection(
230
+ connection_string=connection_string,
231
+ pool_config=pool_config,
232
+ ssl_config=ssl_config,
233
+ server=server,
234
+ database=database,
235
+ username=username,
236
+ password=password,
237
+ trusted_connection=trusted_connection
238
+ )
239
+
240
+ async def query(self, query, parameters=None):
241
+ """
242
+ Execute a SQL query that returns rows (SELECT statements) asynchronously.
243
+
244
+ This method is specifically designed for SELECT queries and other statements
245
+ that return result sets. It uses the optimized query() method internally
246
+ for maximum performance when fetching data.
247
+
248
+ Use this method for:
249
+ - SELECT statements
250
+ - Stored procedures that return result sets
251
+ - SHOW commands
252
+ - Any query that returns tabular data
253
+
254
+ Parameter Binding:
255
+ Parameters are bound using positional placeholders (@P1, @P2, etc.) in the
256
+ query string. The parameter values are provided as a list in the same order.
257
+
258
+ Supported Parameter Types:
259
+ - None (NULL)
260
+ - bool
261
+ - int (32-bit and 64-bit)
262
+ - float (32-bit and 64-bit)
263
+ - str (varchar, nvarchar, text)
264
+ - bytes (varbinary, image)
265
+ - datetime.datetime (datetime, datetime2)
266
+ - datetime.date (date)
267
+ - datetime.time (time)
268
+ - decimal.Decimal (decimal, money)
269
+ - uuid.UUID (uniqueidentifier)
270
+
271
+ Args:
272
+ query (str): SQL SELECT query to execute. Use @P1, @P2, etc. for parameters.
273
+ Example: "SELECT * FROM users WHERE age > @P1 AND city = @P2"
274
+
275
+ parameters (list, optional): List of parameter values in order.
276
+ Values are automatically converted to appropriate SQL types.
277
+ Example: [18, "New York"]
278
+
279
+ Returns:
280
+ FastExecutionResult: An async iterable result object that provides:
281
+ - Async iteration over result rows
282
+ - fetchone(), fetchmany(), fetchall() methods
283
+ - Row count and column metadata
284
+ - Efficient memory usage for large result sets
285
+
286
+ Raises:
287
+ SqlError: If the SQL query contains syntax errors or constraint violations.
288
+ ConnectionError: If the database connection is lost during execution.
289
+ TimeoutError: If the query execution exceeds configured timeouts.
290
+ ParameterError: If parameter types cannot be converted or are invalid.
291
+
292
+ Examples:
293
+ # Simple SELECT query
294
+ >>> result = await conn.query("SELECT * FROM users")
295
+ >>> async for row in result:
296
+ ... print(f"User ID: {row['id']}, Name: {row['name']}")
297
+
298
+ # Parameterized query
299
+ >>> result = await conn.query(
300
+ ... "SELECT * FROM orders WHERE created_date > @P1 AND amount > @P2",
301
+ ... [datetime(2023, 1, 1), 100.0]
302
+ ... )
303
+ >>> rows = await result.fetchall()
304
+ >>> print(f"Found {len(rows)} orders")
305
+
306
+ # Complex SELECT with joins
307
+ >>> result = await conn.query(
308
+ ... \"\"\"SELECT u.name, u.email, COUNT(o.id) as order_count
309
+ ... FROM users u
310
+ ... LEFT JOIN orders o ON u.id = o.user_id
311
+ ... WHERE u.created_date > @P1
312
+ ... GROUP BY u.id, u.name, u.email
313
+ ... ORDER BY order_count DESC\"\"\",
314
+ ... [datetime(2023, 1, 1)]
315
+ ... )
316
+ >>> async for row in result:
317
+ ... print(f"{row['name']}: {row['order_count']} orders")
318
+
319
+ # Stored procedure that returns data
320
+ >>> result = await conn.query(
321
+ ... "EXEC GetUsersByDepartment @P1, @P2",
322
+ ... ["Engineering", True] # department, active_only
323
+ ... )
324
+ >>> users = await result.fetchall()
325
+
326
+ Performance Tips:
327
+ - Use this method instead of execute() for SELECT queries for better performance
328
+ - For large result sets, iterate asynchronously rather than calling fetchall()
329
+ - Reuse Connection instances to benefit from connection pooling
330
+ - Use appropriate indexes on filtered columns
331
+ """
332
+ return await self._conn.query(query, parameters)
333
+
334
+ async def execute_batch(self, commands):
335
+ """
336
+ Execute multiple SQL commands in a single batch operation for optimal performance.
337
+
338
+ This method executes multiple INSERT, UPDATE, DELETE, or DDL commands in sequence
339
+ on a single connection, minimizing network round-trips and connection overhead.
340
+
341
+ Use this method for:
342
+ - Multiple INSERT/UPDATE/DELETE operations
343
+ - Batch DDL operations (CREATE TABLE, CREATE INDEX, etc.)
344
+ - Mixed command operations that don't need to return result sets
345
+ - Any sequence of commands that modify data
346
+
347
+ Performance Benefits:
348
+ - Single connection usage reduces pool contention
349
+ - Reduced network round-trips compared to individual execute() calls
350
+ - Parameter pre-processing optimization
351
+ - Efficient memory usage for large batch operations
352
+
353
+ Args:
354
+ commands (list): List of tuples, each containing (command, parameters).
355
+ Each tuple should be in the format: (sql_command, parameter_list)
356
+ Example: [
357
+ ("INSERT INTO users (name, age) VALUES (@P1, @P2)", ["Alice", 25]),
358
+ ("UPDATE products SET price = @P1 WHERE id = @P2", [99.99, 123]),
359
+ ("DELETE FROM logs WHERE created_date < @P1", [datetime(2023, 1, 1)]),
360
+ ]
361
+
362
+ Returns:
363
+ list: List of affected row counts for each command, in the same order as input.
364
+ Each element is an integer representing the number of rows affected by
365
+ the corresponding command.
366
+
367
+ Raises:
368
+ SqlError: If any SQL command contains syntax errors or constraint violations.
369
+ ConnectionError: If the database connection is lost during execution.
370
+ TimeoutError: If the batch execution exceeds configured timeouts.
371
+ ParameterError: If parameter types cannot be converted or are invalid.
372
+ ValueError: If the commands list format is incorrect.
373
+
374
+ Examples:
375
+ # Basic batch execution
376
+ >>> commands = [
377
+ ... ("INSERT INTO users (name, email) VALUES (@P1, @P2)", ["John", "john@example.com"]),
378
+ ... ("INSERT INTO users (name, email) VALUES (@P1, @P2)", ["Jane", "jane@example.com"]),
379
+ ... ("UPDATE settings SET value = @P1 WHERE key = @P2", ["enabled", "notifications"])
380
+ ... ]
381
+ >>> results = await conn.execute_batch(commands)
382
+ >>> print(f"Affected rows: {results}") # [1, 1, 1]
383
+
384
+ # Mixed operations batch
385
+ >>> operations = [
386
+ ... ("CREATE TABLE temp_data (id INT, value NVARCHAR(50))", None),
387
+ ... ("INSERT INTO temp_data VALUES (@P1, @P2)", [1, "test"]),
388
+ ... ("UPDATE temp_data SET value = @P1 WHERE id = @P2", ["updated", 1]),
389
+ ... ("DROP TABLE temp_data", None)
390
+ ... ]
391
+ >>> results = await conn.execute_batch(operations)
392
+
393
+ # Bulk data modification
394
+ >>> user_updates = [
395
+ ... ("UPDATE users SET last_login = @P1 WHERE id = @P2", [datetime.now(), user_id])
396
+ ... for user_id in [1, 2, 3, 4, 5]
397
+ ... ]
398
+ >>> results = await conn.execute_batch(user_updates)
399
+ >>> total_updated = sum(results)
400
+ """
401
+ return await self._conn.execute_batch(commands)
402
+
403
+ async def query_batch(self, queries):
404
+ """
405
+ Execute multiple SQL queries in a single batch operation for optimal performance.
406
+
407
+ This method executes multiple SELECT queries in sequence on a single connection,
408
+ minimizing network round-trips and connection overhead while returning all result sets.
409
+
410
+ Use this method for:
411
+ - Multiple related SELECT queries
412
+ - Data analysis operations requiring multiple result sets
413
+ - Report generation with multiple data sources
414
+ - Any sequence of queries that return tabular data
415
+
416
+ Performance Benefits:
417
+ - Single connection usage reduces pool contention
418
+ - Reduced network round-trips compared to individual query() calls
419
+ - Parameter pre-processing optimization
420
+ - Efficient memory usage for multiple result sets
421
+
422
+ Args:
423
+ queries (list): List of tuples, each containing (query, parameters).
424
+ Each tuple should be in the format: (sql_query, parameter_list)
425
+ Example: [
426
+ ("SELECT * FROM users WHERE age > @P1", [18]),
427
+ ("SELECT COUNT(*) as total FROM products", None),
428
+ ("SELECT * FROM orders WHERE created_date > @P1", [datetime(2023, 1, 1)]),
429
+ ]
430
+
431
+ Returns:
432
+ list: List of FastExecutionResult objects for each query, in the same order as input.
433
+ Each FastExecutionResult provides the same interface as individual query() results:
434
+ - Async iteration over rows
435
+ - fetchone(), fetchmany(), fetchall() methods
436
+ - Row count and column metadata
437
+
438
+ Raises:
439
+ SqlError: If any SQL query contains syntax errors or constraint violations.
440
+ ConnectionError: If the database connection is lost during execution.
441
+ TimeoutError: If the query execution exceeds configured timeouts.
442
+ ParameterError: If parameter types cannot be converted or are invalid.
443
+ ValueError: If the queries list format is incorrect.
444
+
445
+ Examples:
446
+ # Basic batch queries
447
+ >>> queries = [
448
+ ... ("SELECT COUNT(*) as user_count FROM users", None),
449
+ ... ("SELECT COUNT(*) as product_count FROM products", None),
450
+ ... ("SELECT * FROM users WHERE created_date > @P1", [datetime(2023, 1, 1)])
451
+ ... ]
452
+ >>> results = await conn.query_batch(queries)
453
+ >>>
454
+ >>> # Process each result
455
+ >>> user_count = (await results[0].fetchone())['user_count']
456
+ >>> product_count = (await results[1].fetchone())['product_count']
457
+ >>> recent_users = await results[2].fetchall()
458
+
459
+ # Analytics batch
460
+ >>> analytics_queries = [
461
+ ... ("SELECT DATE(created_date) as date, COUNT(*) as registrations FROM users GROUP BY DATE(created_date)", None),
462
+ ... ("SELECT category, AVG(price) as avg_price FROM products GROUP BY category", None),
463
+ ... ("SELECT status, COUNT(*) as order_count FROM orders GROUP BY status", None)
464
+ ... ]
465
+ >>> results = await conn.query_batch(analytics_queries)
466
+ >>>
467
+ >>> # Process analytics data
468
+ >>> for i, result in enumerate(results):
469
+ ... print(f"Query {i+1} results:")
470
+ ... async for row in result:
471
+ ... print(f" {dict(row)}")
472
+
473
+ # Related data batch
474
+ >>> user_id = 123
475
+ >>> related_queries = [
476
+ ... ("SELECT * FROM users WHERE id = @P1", [user_id]),
477
+ ... ("SELECT * FROM orders WHERE user_id = @P1 ORDER BY created_date DESC", [user_id]),
478
+ ... ("SELECT * FROM user_preferences WHERE user_id = @P1", [user_id])
479
+ ... ]
480
+ >>> results = await conn.query_batch(related_queries)
481
+ >>> user_data = await results[0].fetchone()
482
+ >>> user_orders = await results[1].fetchall()
483
+ >>> user_prefs = await results[2].fetchall()
484
+ """
485
+ return await self._conn.query_batch(queries)
486
+
487
+ async def bulk_insert(self, table_name, columns, data_rows):
488
+ """
489
+ Perform high-performance bulk insert operation for large datasets.
490
+
491
+ This method is optimized for inserting many rows into a single table with
492
+ maximum performance. It processes data in batches to optimize memory usage
493
+ and network efficiency while maintaining consistency.
494
+
495
+ Use this method for:
496
+ - Large data imports (CSV, JSON, API data)
497
+ - ETL operations and data migration
498
+ - Batch data processing pipelines
499
+ - Any scenario requiring insertion of many rows
500
+
501
+ Performance Benefits:
502
+ - Optimized batch processing with configurable batch sizes
503
+ - Minimal memory overhead through streaming processing
504
+ - Single connection usage reduces pool contention
505
+ - Pre-compiled parameter handling for maximum speed
506
+ - Automatic transaction batching for consistency
507
+
508
+ Args:
509
+ table_name (str): Name of the target table for insertion.
510
+ Can be schema-qualified (e.g., "dbo.my_table" or "my_schema.my_table").
511
+
512
+ columns (list): List of column names in the order they appear in data_rows.
513
+ Example: ["name", "email", "age", "created_date"]
514
+
515
+ data_rows (list): List of data rows, where each row is a list of values
516
+ corresponding to the columns. All rows must have the same number of
517
+ values as there are columns.
518
+ Example: [
519
+ ["Alice", "alice@example.com", 25, datetime(2023, 1, 1)],
520
+ ["Bob", "bob@example.com", 30, datetime(2023, 1, 2)],
521
+ ["Charlie", "charlie@example.com", 35, datetime(2023, 1, 3)]
522
+ ]
523
+
524
+ Returns:
525
+ int: Total number of rows successfully inserted.
526
+
527
+ Raises:
528
+ SqlError: If table doesn't exist, column names are invalid, or constraint violations occur.
529
+ ConnectionError: If the database connection is lost during execution.
530
+ TimeoutError: If the bulk insert exceeds configured timeouts.
531
+ ParameterError: If data types cannot be converted to appropriate SQL types.
532
+ ValueError: If columns and data_rows have mismatched sizes or invalid format.
533
+
534
+ Examples:
535
+ # Basic bulk insert
536
+ >>> columns = ["name", "email", "age"]
537
+ >>> data = [
538
+ ... ["Alice", "alice@example.com", 25],
539
+ ... ["Bob", "bob@example.com", 30],
540
+ ... ["Charlie", "charlie@example.com", 35]
541
+ ... ]
542
+ >>> rows_inserted = await conn.bulk_insert("users", columns, data)
543
+ >>> print(f"Inserted {rows_inserted} rows")
544
+
545
+ # Large dataset import
546
+ >>> import csv
547
+ >>> columns = ["product_name", "category", "price", "in_stock"]
548
+ >>> data_rows = []
549
+ >>>
550
+ >>> with open('products.csv', 'r') as file:
551
+ ... reader = csv.reader(file)
552
+ ... next(reader) # Skip header
553
+ ... for row in reader:
554
+ ... data_rows.append([row[0], row[1], float(row[2]), bool(int(row[3]))])
555
+ >>>
556
+ >>> total_inserted = await conn.bulk_insert("products", columns, data_rows)
557
+ >>> print(f"Imported {total_inserted} products from CSV")
558
+
559
+ # Generated data bulk insert
560
+ >>> from datetime import datetime, timedelta
561
+ >>> import random
562
+ >>>
563
+ >>> columns = ["user_id", "activity", "timestamp", "value"]
564
+ >>> activities = ["login", "logout", "view_page", "click_button", "purchase"]
565
+ >>>
566
+ >>> # Generate 10,000 activity records
567
+ >>> data_rows = []
568
+ >>> for i in range(10000):
569
+ ... user_id = random.randint(1, 1000)
570
+ ... activity = random.choice(activities)
571
+ ... timestamp = datetime.now() - timedelta(days=random.randint(0, 30))
572
+ ... value = random.randint(1, 100)
573
+ ... data_rows.append([user_id, activity, timestamp, value])
574
+ >>>
575
+ >>> rows_inserted = await conn.bulk_insert("user_activities", columns, data_rows)
576
+ >>> print(f"Inserted {rows_inserted} activity records")
577
+
578
+ # Data transformation during bulk insert
579
+ >>> raw_data = fetch_api_data() # Some external data source
580
+ >>> columns = ["name", "email", "normalized_phone", "registration_date"]
581
+ >>>
582
+ >>> processed_data = []
583
+ >>> for record in raw_data:
584
+ ... processed_data.append([
585
+ ... record['full_name'].strip().title(),
586
+ ... record['email'].lower(),
587
+ ... normalize_phone(record['phone']),
588
+ ... datetime.fromisoformat(record['reg_date'])
589
+ ... ])
590
+ >>>
591
+ >>> result = await conn.bulk_insert("customers", columns, processed_data)
592
+ """
593
+ return await self._conn.bulk_insert(table_name, columns, data_rows)
594
+
595
+ async def execute(self, query, parameters=None):
596
+ """
597
+ Execute a SQL command that doesn't return rows (INSERT/UPDATE/DELETE/DDL) asynchronously.
598
+
599
+ This method is specifically designed for SQL commands that modify data or database
600
+ structure but don't return result sets. It uses the optimized execute() method
601
+ internally for maximum performance when performing data modifications.
602
+
603
+ Use this method for:
604
+ - INSERT statements
605
+ - UPDATE statements
606
+ - DELETE statements
607
+ - DDL commands (CREATE, ALTER, DROP)
608
+ - Stored procedures that don't return result sets
609
+ - MERGE statements
610
+
611
+ Parameter Binding:
612
+ Parameters are bound using positional placeholders (@P1, @P2, etc.) in the
613
+ query string. The parameter values are provided as a list in the same order.
614
+
615
+ Supported Parameter Types:
616
+ - None (NULL)
617
+ - bool
618
+ - int (32-bit and 64-bit)
619
+ - float (32-bit and 64-bit)
620
+ - str (varchar, nvarchar, text)
621
+ - bytes (varbinary, image)
622
+ - datetime.datetime (datetime, datetime2)
623
+ - datetime.date (date)
624
+ - datetime.time (time)
625
+ - decimal.Decimal (decimal, money)
626
+ - uuid.UUID (uniqueidentifier)
627
+
628
+ Args:
629
+ query (str): SQL command to execute. Use @P1, @P2, etc. for parameters.
630
+ Example: "INSERT INTO users (name, email, age) VALUES (@P1, @P2, @P3)"
631
+
632
+ parameters (list, optional): List of parameter values in order.
633
+ Values are automatically converted to appropriate SQL types.
634
+ Example: ["John Doe", "john@example.com", 30]
635
+
636
+ Returns:
637
+ int: Number of rows affected by the command.
638
+ - For INSERT: Number of rows inserted
639
+ - For UPDATE: Number of rows updated
640
+ - For DELETE: Number of rows deleted
641
+ - For DDL: Usually 0 (structure changes don't affect rows)
642
+
643
+ Raises:
644
+ SqlError: If the SQL command contains syntax errors or constraint violations.
645
+ ConnectionError: If the database connection is lost during execution.
646
+ TimeoutError: If the command execution exceeds configured timeouts.
647
+ ParameterError: If parameter types cannot be converted or are invalid.
648
+
649
+ Examples:
650
+ # INSERT with parameters
651
+ >>> affected = await conn.execute(
652
+ ... "INSERT INTO users (name, email, age) VALUES (@P1, @P2, @P3)",
653
+ ... ["John Doe", "john@example.com", 30]
654
+ ... )
655
+ >>> print(f"Inserted {affected} row(s)")
656
+
657
+ # UPDATE with conditions
658
+ >>> affected = await conn.execute(
659
+ ... "UPDATE users SET age = @P1, updated_date = @P2 WHERE id = @P3",
660
+ ... [31, datetime.now(), 123]
661
+ ... )
662
+ >>> print(f"Updated {affected} user(s)")
663
+
664
+ # DELETE with parameters
665
+ >>> affected = await conn.execute(
666
+ ... "DELETE FROM users WHERE age < @P1 AND last_login < @P2",
667
+ ... [18, datetime(2020, 1, 1)]
668
+ ... )
669
+ >>> print(f"Deleted {affected} inactive users")
670
+
671
+ # DDL commands
672
+ >>> affected = await conn.execute(
673
+ ... \"\"\"CREATE TABLE temp_data (
674
+ ... id INT IDENTITY(1,1) PRIMARY KEY,
675
+ ... name NVARCHAR(100) NOT NULL,
676
+ ... created_date DATETIME2 DEFAULT GETDATE()
677
+ ... )\"\"\"
678
+ ... )
679
+ >>> print(f"Table created (affected rows: {affected})")
680
+
681
+ # Stored procedure that modifies data
682
+ >>> affected = await conn.execute(
683
+ ... "EXEC UpdateUserPreferences @P1, @P2",
684
+ ... [user_id, json.dumps(preferences)]
685
+ ... )
686
+ >>> print(f"Updated preferences for {affected} user(s)")
687
+
688
+ # Batch operations
689
+ >>> users_to_insert = [
690
+ ... ["Alice Johnson", "alice@example.com", 28],
691
+ ... ["Bob Smith", "bob@example.com", 32],
692
+ ... ["Carol Davis", "carol@example.com", 25]
693
+ ... ]
694
+ >>> total_affected = 0
695
+ >>> for user_data in users_to_insert:
696
+ ... affected = await conn.execute(
697
+ ... "INSERT INTO users (name, email, age) VALUES (@P1, @P2, @P3)",
698
+ ... user_data
699
+ ... )
700
+ ... total_affected += affected
701
+ >>> print(f"Inserted {total_affected} users total")
702
+
703
+ Performance Tips:
704
+ - Use this method instead of query() for data modification commands
705
+ - For bulk operations, consider using batch processing or table-valued parameters
706
+ - Use transactions for multiple related operations
707
+ - Monitor the returned affected row count for validation
708
+
709
+ Security Notes:
710
+ - Always use parameterized queries to prevent SQL injection attacks
711
+ - Validate affected row counts match expectations
712
+ - Consider using transactions for data consistency
713
+ """
714
+ return await self._conn.execute(query, parameters)
715
+
716
+
717
+ async def is_connected(self):
718
+ """
719
+ Check if the connection is active and available for queries.
720
+
721
+ This method performs a lightweight check to determine if the underlying
722
+ connection pool has active connections and can accept new queries.
723
+ It's useful for health checks and connection validation in long-running
724
+ applications.
725
+
726
+ The check verifies:
727
+ - Connection pool is initialized and operational
728
+ - At least one connection in the pool is active
729
+ - Network connectivity to the SQL Server instance
730
+ - Authentication credentials are still valid
731
+
732
+ Returns:
733
+ bool: True if the connection is active and ready for queries,
734
+ False if the connection is closed, failed, or unavailable.
735
+
736
+ Raises:
737
+ ConnectionError: If there's an unexpected error checking connection status.
738
+
739
+ Examples:
740
+ # Basic connection check
741
+ >>> if await conn.is_connected():
742
+ ... result = await conn.execute("SELECT COUNT(*) FROM users")
743
+ ... else:
744
+ ... await conn.connect() # Reconnect if needed
745
+
746
+ # Health check in a web application
747
+ >>> async def health_check():
748
+ ... try:
749
+ ... if await conn.is_connected():
750
+ ... return {"database": "healthy", "status": "connected"}
751
+ ... else:
752
+ ... return {"database": "unhealthy", "status": "disconnected"}
753
+ ... except Exception as e:
754
+ ... return {"database": "error", "status": str(e)}
755
+
756
+ # Periodic connection monitoring
757
+ >>> import asyncio
758
+ >>>
759
+ >>> async def monitor_connection():
760
+ ... while True:
761
+ ... if await conn.is_connected():
762
+ ... print(f"{datetime.now()}: Database connection is healthy")
763
+ ... else:
764
+ ... print(f"{datetime.now()}: Database connection is down!")
765
+ ... # Attempt to reconnect
766
+ ... try:
767
+ ... await conn.connect()
768
+ ... print("Reconnection successful")
769
+ ... except Exception as e:
770
+ ... print(f"Reconnection failed: {e}")
771
+ ...
772
+ ... await asyncio.sleep(60) # Check every minute
773
+
774
+ Performance Notes:
775
+ - This is a lightweight operation that doesn't execute actual SQL
776
+ - The check uses connection pool metadata and cached connection state
777
+ - Suitable for frequent health checks without performance impact
778
+ - Does not count against connection pool limits
779
+
780
+ Use Cases:
781
+ - Application startup validation
782
+ - Periodic health monitoring
783
+ - Circuit breaker pattern implementation
784
+ - Load balancer health checks
785
+ - Graceful degradation in microservices
786
+ """
787
+ return await self._conn.is_connected()
788
+
789
+ async def pool_stats(self):
790
+ """
791
+ Get comprehensive connection pool statistics and health metrics.
792
+
793
+ This method provides detailed information about the current state of the
794
+ connection pool, including active connections, idle connections, and
795
+ configuration parameters. It's essential for monitoring, debugging, and
796
+ optimizing connection pool performance in production environments.
797
+
798
+ The statistics help identify:
799
+ - Connection pool utilization patterns
800
+ - Potential connection leaks
801
+ - Optimal pool sizing configuration
802
+ - Performance bottlenecks
803
+ - Resource contention issues
804
+
805
+ Returns:
806
+ dict: A dictionary containing pool statistics with the following keys:
807
+
808
+ When connected:
809
+ - 'connections' (int): Total number of connections in the pool
810
+ - 'idle_connections' (int): Number of idle connections available
811
+ - 'active_connections' (int): Number of connections currently in use
812
+ - 'max_size' (int): Maximum allowed connections in the pool
813
+ - 'min_idle' (int): Minimum idle connections maintained
814
+
815
+ When disconnected:
816
+ - 'connected' (bool): False, indicating no active pool
817
+
818
+ Raises:
819
+ ConnectionError: If unable to retrieve pool statistics due to connection issues.
820
+
821
+ Examples:
822
+ # Basic pool monitoring
823
+ >>> stats = await conn.pool_stats()
824
+ >>> if stats.get('connected', True): # Handle disconnected case
825
+ ... print(f"Active connections: {stats['active_connections']}")
826
+ ... print(f"Idle connections: {stats['idle_connections']}")
827
+ ... print(f"Pool utilization: {stats['active_connections']/stats['max_size']*100:.1f}%")
828
+
829
+ # Comprehensive pool monitoring
830
+ >>> async def monitor_pool():
831
+ ... stats = await conn.pool_stats()
832
+ ...
833
+ ... if not stats.get('connected', True):
834
+ ... print("❌ Connection pool is not active")
835
+ ... return
836
+ ...
837
+ ... total = stats['connections']
838
+ ... active = stats['active_connections']
839
+ ... idle = stats['idle_connections']
840
+ ... max_size = stats['max_size']
841
+ ... min_idle = stats['min_idle']
842
+ ...
843
+ ... utilization = (active / max_size) * 100
844
+ ...
845
+ ... print(f"📊 Connection Pool Statistics:")
846
+ ... print(f" Total connections: {total}")
847
+ ... print(f" Active connections: {active}")
848
+ ... print(f" Idle connections: {idle}")
849
+ ... print(f" Max pool size: {max_size}")
850
+ ... print(f" Min idle: {min_idle}")
851
+ ... print(f" Utilization: {utilization:.1f}%")
852
+ ...
853
+ ... # Health assessment
854
+ ... if utilization > 90:
855
+ ... print("âš ī¸ High pool utilization - consider increasing max_size")
856
+ ... elif idle < min_idle:
857
+ ... print("âš ī¸ Low idle connections - pool may be under pressure")
858
+ ... elif utilization < 10 and total > min_idle * 2:
859
+ ... print("â„šī¸ Low utilization - consider reducing max_size")
860
+ ... else:
861
+ ... print("✅ Pool appears healthy")
862
+
863
+ # Pool statistics for alerting
864
+ >>> async def check_pool_health():
865
+ ... stats = await conn.pool_stats()
866
+ ...
867
+ ... if not stats.get('connected', True):
868
+ ... return {"status": "critical", "message": "Pool disconnected"}
869
+ ...
870
+ ... utilization = stats['active_connections'] / stats['max_size']
871
+ ... idle_ratio = stats['idle_connections'] / stats['max_size']
872
+ ...
873
+ ... if utilization > 0.9:
874
+ ... return {
875
+ ... "status": "warning",
876
+ ... "message": f"High utilization: {utilization:.1%}",
877
+ ... "stats": stats
878
+ ... }
879
+ ... elif idle_ratio < 0.1:
880
+ ... return {
881
+ ... "status": "warning",
882
+ ... "message": f"Low idle connections: {stats['idle_connections']}",
883
+ ... "stats": stats
884
+ ... }
885
+ ... else:
886
+ ... return {"status": "healthy", "stats": stats}
887
+
888
+ # Logging pool metrics
889
+ >>> import logging
890
+ >>>
891
+ >>> async def log_pool_metrics():
892
+ ... stats = await conn.pool_stats()
893
+ ... if stats.get('connected', True):
894
+ ... logging.info(
895
+ ... "Pool metrics: active=%d, idle=%d, total=%d, utilization=%.1f%%",
896
+ ... stats['active_connections'],
897
+ ... stats['idle_connections'],
898
+ ... stats['connections'],
899
+ ... (stats['active_connections'] / stats['max_size']) * 100
900
+ ... )
901
+
902
+ Monitoring Best Practices:
903
+ - Monitor pool utilization during peak load periods
904
+ - Set up alerts for utilization > 80% or idle connections < min_idle
905
+ - Track connection acquisition times and pool exhaustion events
906
+ - Use metrics for capacity planning and performance optimization
907
+ - Log pool statistics periodically for historical analysis
908
+
909
+ Performance Impact:
910
+ - This operation has minimal performance overhead
911
+ - Safe to call frequently for monitoring purposes
912
+ - Does not affect active connections or pool operation
913
+ - Recommended for inclusion in health check endpoints
914
+ """
915
+ result_tuple = await self._conn.pool_stats()
916
+
917
+ # Convert tuple to dictionary
918
+ connected, connections, idle_connections, max_size, min_idle = result_tuple
919
+
920
+ if connected:
921
+ return {
922
+ 'connections': connections,
923
+ 'idle_connections': idle_connections,
924
+ 'max_size': max_size,
925
+ 'min_idle': min_idle,
926
+ 'active_connections': connections - idle_connections,
927
+ }
928
+ else:
929
+ return {'connected': False}
930
+
931
+ async def connect(self):
932
+ """Explicitly connect to the database."""
933
+ return await self._conn.connect()
934
+
935
+ async def disconnect(self):
936
+ """Explicitly disconnect from the database."""
937
+ return await self._conn.disconnect()
938
+
939
+ async def __aenter__(self):
940
+ await self._conn.__aenter__()
941
+ return self
942
+
943
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
944
+ await self._conn.__aexit__(exc_type, exc_val, exc_tb)
945
+ return None
946
+
947
+ def __enter__(self):
948
+ return self._conn.__enter__()
949
+
950
+ def __exit__(self, exc_type, exc_val, exc_tb):
951
+ return self._conn.__exit__(exc_type, exc_val, exc_tb)
952
+
953
+ # Preserve module documentation
954
+ if hasattr(_RustConnection, "__doc__"):
955
+ __doc__ = _RustConnection.__doc__
956
+
957
+ __all__ = ["Connection", "PoolConfig", "SslConfig", "FastExecutionResult", "version", "EncryptionLevel", "Parameter", "Parameters"]