fastmssql 0.3.2__cp310-cp310-macosx_11_0_arm64.whl → 0.3.3__cp310-cp310-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fastmssql might be problematic. Click here for more details.

fastmssql/__init__.py CHANGED
@@ -331,6 +331,267 @@ class Connection:
331
331
  """
332
332
  return await self._conn.query(query, parameters)
333
333
 
334
+ async def execute_batch(self, commands):
335
+ """
336
+ Execute multiple SQL commands in a single batch operation for optimal performance.
337
+
338
+ This method executes multiple INSERT, UPDATE, DELETE, or DDL commands in sequence
339
+ on a single connection, minimizing network round-trips and connection overhead.
340
+
341
+ Use this method for:
342
+ - Multiple INSERT/UPDATE/DELETE operations
343
+ - Batch DDL operations (CREATE TABLE, CREATE INDEX, etc.)
344
+ - Mixed command operations that don't need to return result sets
345
+ - Any sequence of commands that modify data
346
+
347
+ Performance Benefits:
348
+ - Single connection usage reduces pool contention
349
+ - Reduced network round-trips compared to individual execute() calls
350
+ - Parameter pre-processing optimization
351
+ - Efficient memory usage for large batch operations
352
+
353
+ Args:
354
+ commands (list): List of tuples, each containing (command, parameters).
355
+ Each tuple should be in the format: (sql_command, parameter_list)
356
+ Example: [
357
+ ("INSERT INTO users (name, age) VALUES (@P1, @P2)", ["Alice", 25]),
358
+ ("UPDATE products SET price = @P1 WHERE id = @P2", [99.99, 123]),
359
+ ("DELETE FROM logs WHERE created_date < @P1", [datetime(2023, 1, 1)]),
360
+ ]
361
+
362
+ Returns:
363
+ list: List of affected row counts for each command, in the same order as input.
364
+ Each element is an integer representing the number of rows affected by
365
+ the corresponding command.
366
+
367
+ Raises:
368
+ SqlError: If any SQL command contains syntax errors or constraint violations.
369
+ ConnectionError: If the database connection is lost during execution.
370
+ TimeoutError: If the batch execution exceeds configured timeouts.
371
+ ParameterError: If parameter types cannot be converted or are invalid.
372
+ ValueError: If the commands list format is incorrect.
373
+
374
+ Examples:
375
+ # Basic batch execution
376
+ >>> commands = [
377
+ ... ("INSERT INTO users (name, email) VALUES (@P1, @P2)", ["John", "john@example.com"]),
378
+ ... ("INSERT INTO users (name, email) VALUES (@P1, @P2)", ["Jane", "jane@example.com"]),
379
+ ... ("UPDATE settings SET value = @P1 WHERE key = @P2", ["enabled", "notifications"])
380
+ ... ]
381
+ >>> results = await conn.execute_batch(commands)
382
+ >>> print(f"Affected rows: {results}") # [1, 1, 1]
383
+
384
+ # Mixed operations batch
385
+ >>> operations = [
386
+ ... ("CREATE TABLE temp_data (id INT, value NVARCHAR(50))", None),
387
+ ... ("INSERT INTO temp_data VALUES (@P1, @P2)", [1, "test"]),
388
+ ... ("UPDATE temp_data SET value = @P1 WHERE id = @P2", ["updated", 1]),
389
+ ... ("DROP TABLE temp_data", None)
390
+ ... ]
391
+ >>> results = await conn.execute_batch(operations)
392
+
393
+ # Bulk data modification
394
+ >>> user_updates = [
395
+ ... ("UPDATE users SET last_login = @P1 WHERE id = @P2", [datetime.now(), user_id])
396
+ ... for user_id in [1, 2, 3, 4, 5]
397
+ ... ]
398
+ >>> results = await conn.execute_batch(user_updates)
399
+ >>> total_updated = sum(results)
400
+ """
401
+ return await self._conn.execute_batch(commands)
402
+
403
+ async def query_batch(self, queries):
404
+ """
405
+ Execute multiple SQL queries in a single batch operation for optimal performance.
406
+
407
+ This method executes multiple SELECT queries in sequence on a single connection,
408
+ minimizing network round-trips and connection overhead while returning all result sets.
409
+
410
+ Use this method for:
411
+ - Multiple related SELECT queries
412
+ - Data analysis operations requiring multiple result sets
413
+ - Report generation with multiple data sources
414
+ - Any sequence of queries that return tabular data
415
+
416
+ Performance Benefits:
417
+ - Single connection usage reduces pool contention
418
+ - Reduced network round-trips compared to individual query() calls
419
+ - Parameter pre-processing optimization
420
+ - Efficient memory usage for multiple result sets
421
+
422
+ Args:
423
+ queries (list): List of tuples, each containing (query, parameters).
424
+ Each tuple should be in the format: (sql_query, parameter_list)
425
+ Example: [
426
+ ("SELECT * FROM users WHERE age > @P1", [18]),
427
+ ("SELECT COUNT(*) as total FROM products", None),
428
+ ("SELECT * FROM orders WHERE created_date > @P1", [datetime(2023, 1, 1)]),
429
+ ]
430
+
431
+ Returns:
432
+ list: List of FastExecutionResult objects for each query, in the same order as input.
433
+ Each FastExecutionResult provides the same interface as individual query() results:
434
+ - Async iteration over rows
435
+ - fetchone(), fetchmany(), fetchall() methods
436
+ - Row count and column metadata
437
+
438
+ Raises:
439
+ SqlError: If any SQL query contains syntax errors or constraint violations.
440
+ ConnectionError: If the database connection is lost during execution.
441
+ TimeoutError: If the query execution exceeds configured timeouts.
442
+ ParameterError: If parameter types cannot be converted or are invalid.
443
+ ValueError: If the queries list format is incorrect.
444
+
445
+ Examples:
446
+ # Basic batch queries
447
+ >>> queries = [
448
+ ... ("SELECT COUNT(*) as user_count FROM users", None),
449
+ ... ("SELECT COUNT(*) as product_count FROM products", None),
450
+ ... ("SELECT * FROM users WHERE created_date > @P1", [datetime(2023, 1, 1)])
451
+ ... ]
452
+ >>> results = await conn.query_batch(queries)
453
+ >>>
454
+ >>> # Process each result
455
+ >>> user_count = (await results[0].fetchone())['user_count']
456
+ >>> product_count = (await results[1].fetchone())['product_count']
457
+ >>> recent_users = await results[2].fetchall()
458
+
459
+ # Analytics batch
460
+ >>> analytics_queries = [
461
+ ... ("SELECT DATE(created_date) as date, COUNT(*) as registrations FROM users GROUP BY DATE(created_date)", None),
462
+ ... ("SELECT category, AVG(price) as avg_price FROM products GROUP BY category", None),
463
+ ... ("SELECT status, COUNT(*) as order_count FROM orders GROUP BY status", None)
464
+ ... ]
465
+ >>> results = await conn.query_batch(analytics_queries)
466
+ >>>
467
+ >>> # Process analytics data
468
+ >>> for i, result in enumerate(results):
469
+ ... print(f"Query {i+1} results:")
470
+ ... async for row in result:
471
+ ... print(f" {dict(row)}")
472
+
473
+ # Related data batch
474
+ >>> user_id = 123
475
+ >>> related_queries = [
476
+ ... ("SELECT * FROM users WHERE id = @P1", [user_id]),
477
+ ... ("SELECT * FROM orders WHERE user_id = @P1 ORDER BY created_date DESC", [user_id]),
478
+ ... ("SELECT * FROM user_preferences WHERE user_id = @P1", [user_id])
479
+ ... ]
480
+ >>> results = await conn.query_batch(related_queries)
481
+ >>> user_data = await results[0].fetchone()
482
+ >>> user_orders = await results[1].fetchall()
483
+ >>> user_prefs = await results[2].fetchall()
484
+ """
485
+ return await self._conn.query_batch(queries)
486
+
487
+ async def bulk_insert(self, table_name, columns, data_rows):
488
+ """
489
+ Perform high-performance bulk insert operation for large datasets.
490
+
491
+ This method is optimized for inserting many rows into a single table with
492
+ maximum performance. It processes data in batches to optimize memory usage
493
+ and network efficiency while maintaining consistency.
494
+
495
+ Use this method for:
496
+ - Large data imports (CSV, JSON, API data)
497
+ - ETL operations and data migration
498
+ - Batch data processing pipelines
499
+ - Any scenario requiring insertion of many rows
500
+
501
+ Performance Benefits:
502
+ - Optimized batch processing with configurable batch sizes
503
+ - Minimal memory overhead through streaming processing
504
+ - Single connection usage reduces pool contention
505
+ - Pre-compiled parameter handling for maximum speed
506
+ - Automatic transaction batching for consistency
507
+
508
+ Args:
509
+ table_name (str): Name of the target table for insertion.
510
+ Can be schema-qualified (e.g., "dbo.my_table" or "my_schema.my_table").
511
+
512
+ columns (list): List of column names in the order they appear in data_rows.
513
+ Example: ["name", "email", "age", "created_date"]
514
+
515
+ data_rows (list): List of data rows, where each row is a list of values
516
+ corresponding to the columns. All rows must have the same number of
517
+ values as there are columns.
518
+ Example: [
519
+ ["Alice", "alice@example.com", 25, datetime(2023, 1, 1)],
520
+ ["Bob", "bob@example.com", 30, datetime(2023, 1, 2)],
521
+ ["Charlie", "charlie@example.com", 35, datetime(2023, 1, 3)]
522
+ ]
523
+
524
+ Returns:
525
+ int: Total number of rows successfully inserted.
526
+
527
+ Raises:
528
+ SqlError: If table doesn't exist, column names are invalid, or constraint violations occur.
529
+ ConnectionError: If the database connection is lost during execution.
530
+ TimeoutError: If the bulk insert exceeds configured timeouts.
531
+ ParameterError: If data types cannot be converted to appropriate SQL types.
532
+ ValueError: If columns and data_rows have mismatched sizes or invalid format.
533
+
534
+ Examples:
535
+ # Basic bulk insert
536
+ >>> columns = ["name", "email", "age"]
537
+ >>> data = [
538
+ ... ["Alice", "alice@example.com", 25],
539
+ ... ["Bob", "bob@example.com", 30],
540
+ ... ["Charlie", "charlie@example.com", 35]
541
+ ... ]
542
+ >>> rows_inserted = await conn.bulk_insert("users", columns, data)
543
+ >>> print(f"Inserted {rows_inserted} rows")
544
+
545
+ # Large dataset import
546
+ >>> import csv
547
+ >>> columns = ["product_name", "category", "price", "in_stock"]
548
+ >>> data_rows = []
549
+ >>>
550
+ >>> with open('products.csv', 'r') as file:
551
+ ... reader = csv.reader(file)
552
+ ... next(reader) # Skip header
553
+ ... for row in reader:
554
+ ... data_rows.append([row[0], row[1], float(row[2]), bool(int(row[3]))])
555
+ >>>
556
+ >>> total_inserted = await conn.bulk_insert("products", columns, data_rows)
557
+ >>> print(f"Imported {total_inserted} products from CSV")
558
+
559
+ # Generated data bulk insert
560
+ >>> from datetime import datetime, timedelta
561
+ >>> import random
562
+ >>>
563
+ >>> columns = ["user_id", "activity", "timestamp", "value"]
564
+ >>> activities = ["login", "logout", "view_page", "click_button", "purchase"]
565
+ >>>
566
+ >>> # Generate 10,000 activity records
567
+ >>> data_rows = []
568
+ >>> for i in range(10000):
569
+ ... user_id = random.randint(1, 1000)
570
+ ... activity = random.choice(activities)
571
+ ... timestamp = datetime.now() - timedelta(days=random.randint(0, 30))
572
+ ... value = random.randint(1, 100)
573
+ ... data_rows.append([user_id, activity, timestamp, value])
574
+ >>>
575
+ >>> rows_inserted = await conn.bulk_insert("user_activities", columns, data_rows)
576
+ >>> print(f"Inserted {rows_inserted} activity records")
577
+
578
+ # Data transformation during bulk insert
579
+ >>> raw_data = fetch_api_data() # Some external data source
580
+ >>> columns = ["name", "email", "normalized_phone", "registration_date"]
581
+ >>>
582
+ >>> processed_data = []
583
+ >>> for record in raw_data:
584
+ ... processed_data.append([
585
+ ... record['full_name'].strip().title(),
586
+ ... record['email'].lower(),
587
+ ... normalize_phone(record['phone']),
588
+ ... datetime.fromisoformat(record['reg_date'])
589
+ ... ])
590
+ >>>
591
+ >>> result = await conn.bulk_insert("customers", columns, processed_data)
592
+ """
593
+ return await self._conn.bulk_insert(table_name, columns, data_rows)
594
+
334
595
  async def execute(self, query, parameters=None):
335
596
  """
336
597
  Execute a SQL command that doesn't return rows (INSERT/UPDATE/DELETE/DDL) asynchronously.
Binary file
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fastmssql
3
- Version: 0.3.2
3
+ Version: 0.3.3
4
4
  Classifier: Development Status :: 4 - Beta
5
5
  Classifier: Intended Audience :: Developers
6
6
  Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
@@ -67,11 +67,12 @@ Great for data ingestion, bulk inserts, and large-scale query workloads.
67
67
  - Strong typing: fast conversions for common SQL Server types
68
68
  - Thread‑safe: safe to use in concurrent apps
69
69
  - Cross‑platform: Windows, macOS, Linux
70
+ - Batch operations: high-performance bulk inserts and batch query execution
70
71
 
71
72
 
72
73
  ## Key API methods
73
74
 
74
- Two focused methods cover most use cases:
75
+ Core methods for individual operations:
75
76
 
76
77
  - `query()` — SELECT statements that return rows
77
78
  - `execute()` — INSERT/UPDATE/DELETE/DDL that return affected row count
@@ -209,6 +210,52 @@ asyncio.run(main())
209
210
  Parameters use positional placeholders: `@P1`, `@P2`, ... Provide values as a list in the same order.
210
211
 
211
212
 
213
+ ### Batch operations
214
+
215
+ For high-throughput scenarios, use batch methods to reduce network round-trips:
216
+
217
+ ```python
218
+ import asyncio
219
+ from fastmssql import Connection
220
+
221
+ async def main():
222
+ async with Connection("Server=.;Database=MyDB;User Id=sa;Password=StrongPwd;") as conn:
223
+ # Bulk insert for fast data loading
224
+ columns = ["name", "email", "age"]
225
+ data_rows = [
226
+ ["Alice Johnson", "alice@example.com", 28],
227
+ ["Bob Smith", "bob@example.com", 32],
228
+ ["Carol Davis", "carol@example.com", 25]
229
+ ]
230
+
231
+ rows_inserted = await conn.bulk_insert("users", columns, data_rows)
232
+ print(f"Bulk inserted {rows_inserted} rows")
233
+
234
+ # Batch queries for multiple SELECT operations
235
+ queries = [
236
+ ("SELECT COUNT(*) as total FROM users WHERE age > @P1", [25]),
237
+ ("SELECT AVG(age) as avg_age FROM users", None),
238
+ ("SELECT name FROM users WHERE email LIKE @P1", ["%@example.com"])
239
+ ]
240
+
241
+ results = await conn.query_batch(queries)
242
+ print(f"Total users over 25: {results[0].rows()[0]['total']}")
243
+ print(f"Average age: {results[1].rows()[0]['avg_age']:.1f}")
244
+ print(f"Example.com users: {len(results[2].rows())}")
245
+
246
+ # Batch commands for multiple operations
247
+ commands = [
248
+ ("UPDATE users SET last_login = GETDATE() WHERE name = @P1", ["Alice Johnson"]),
249
+ ("INSERT INTO user_logs (action, user_name) VALUES (@P1, @P2)", ["login", "Alice Johnson"])
250
+ ]
251
+
252
+ affected_counts = await conn.execute_batch(commands)
253
+ print(f"Updated {affected_counts[0]} users, inserted {affected_counts[1]} logs")
254
+
255
+ asyncio.run(main())
256
+ ```
257
+
258
+
212
259
  ### Connection strings
213
260
 
214
261
  ```python
@@ -0,0 +1,6 @@
1
+ fastmssql-0.3.3.dist-info/RECORD,,
2
+ fastmssql-0.3.3.dist-info/WHEEL,sha256=jZxrHC-ILb5xRoXJp_MecAw4BxeAsi-L_ZpkO8fG8As,132
3
+ fastmssql-0.3.3.dist-info/METADATA,sha256=A3lMTmqMqrsS2KsYpcnXnw6SveDs7fh1Ybps8T9pfg4,11986
4
+ fastmssql-0.3.3.dist-info/licenses/LICENSE,sha256=OHj2nKice3tSk2Us200EWXDpwDKtAzeOu4NF4rwg5gk,33858
5
+ fastmssql/__init__.py,sha256=H2sCgkVlgRo3oIbejLqnHUIsFCqcbJIc20TJ4CCPkQo,43543
6
+ fastmssql/fastmssql.cpython-310-darwin.so,sha256=2Ara7YjRl7yr_Wz2M477PAKYiyiVUHYNfPFOaFVQBWc,2068832
@@ -1,6 +0,0 @@
1
- fastmssql-0.3.2.dist-info/RECORD,,
2
- fastmssql-0.3.2.dist-info/WHEEL,sha256=jZxrHC-ILb5xRoXJp_MecAw4BxeAsi-L_ZpkO8fG8As,132
3
- fastmssql-0.3.2.dist-info/METADATA,sha256=-QFPw7BapAZLIykvsOkYX90ubR8PoAgl3ARZc_TNUaU,10182
4
- fastmssql-0.3.2.dist-info/licenses/LICENSE,sha256=OHj2nKice3tSk2Us200EWXDpwDKtAzeOu4NF4rwg5gk,33858
5
- fastmssql/__init__.py,sha256=pFvT4Iki4PdpHWl3APWVeXccWxakueCkU57jbEKcIDA,30407
6
- fastmssql/fastmssql.cpython-310-darwin.so,sha256=1U6WqxXkEsEw9Xrwx1yJViSFQerNyFSYBji-DltDtWI,1985904