dtSpark 1.1.0a3__py3-none-any.whl → 1.1.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. dtSpark/_version.txt +1 -1
  2. dtSpark/aws/authentication.py +1 -1
  3. dtSpark/aws/bedrock.py +238 -239
  4. dtSpark/aws/costs.py +9 -5
  5. dtSpark/aws/pricing.py +25 -21
  6. dtSpark/cli_interface.py +77 -68
  7. dtSpark/conversation_manager.py +54 -47
  8. dtSpark/core/application.py +114 -91
  9. dtSpark/core/context_compaction.py +241 -226
  10. dtSpark/daemon/__init__.py +36 -22
  11. dtSpark/daemon/action_monitor.py +46 -17
  12. dtSpark/daemon/daemon_app.py +126 -104
  13. dtSpark/daemon/daemon_manager.py +59 -23
  14. dtSpark/daemon/pid_file.py +3 -2
  15. dtSpark/database/autonomous_actions.py +3 -0
  16. dtSpark/database/credential_prompt.py +52 -54
  17. dtSpark/files/manager.py +6 -12
  18. dtSpark/limits/__init__.py +1 -1
  19. dtSpark/limits/tokens.py +2 -2
  20. dtSpark/llm/anthropic_direct.py +246 -141
  21. dtSpark/llm/ollama.py +3 -1
  22. dtSpark/mcp_integration/manager.py +4 -4
  23. dtSpark/mcp_integration/tool_selector.py +83 -77
  24. dtSpark/resources/config.yaml.template +11 -0
  25. dtSpark/safety/patterns.py +45 -46
  26. dtSpark/safety/prompt_inspector.py +8 -1
  27. dtSpark/scheduler/creation_tools.py +273 -181
  28. dtSpark/scheduler/executor.py +503 -221
  29. dtSpark/tools/builtin.py +70 -53
  30. dtSpark/web/endpoints/autonomous_actions.py +12 -9
  31. dtSpark/web/endpoints/chat.py +8 -6
  32. dtSpark/web/endpoints/conversations.py +18 -9
  33. dtSpark/web/endpoints/main_menu.py +132 -105
  34. dtSpark/web/endpoints/streaming.py +2 -2
  35. dtSpark/web/server.py +70 -5
  36. dtSpark/web/ssl_utils.py +3 -3
  37. dtSpark/web/static/css/dark-theme.css +8 -29
  38. dtSpark/web/static/js/chat.js +6 -8
  39. dtSpark/web/static/js/main.js +8 -8
  40. dtSpark/web/static/js/sse-client.js +130 -122
  41. dtSpark/web/templates/actions.html +5 -5
  42. dtSpark/web/templates/base.html +15 -0
  43. dtSpark/web/templates/chat.html +10 -10
  44. dtSpark/web/templates/conversations.html +6 -2
  45. dtSpark/web/templates/goodbye.html +2 -2
  46. dtSpark/web/templates/main_menu.html +19 -17
  47. dtSpark/web/web_interface.py +2 -2
  48. {dtspark-1.1.0a3.dist-info → dtspark-1.1.0a7.dist-info}/METADATA +9 -2
  49. dtspark-1.1.0a7.dist-info/RECORD +96 -0
  50. dtspark-1.1.0a3.dist-info/RECORD +0 -96
  51. {dtspark-1.1.0a3.dist-info → dtspark-1.1.0a7.dist-info}/WHEEL +0 -0
  52. {dtspark-1.1.0a3.dist-info → dtspark-1.1.0a7.dist-info}/entry_points.txt +0 -0
  53. {dtspark-1.1.0a3.dist-info → dtspark-1.1.0a7.dist-info}/licenses/LICENSE +0 -0
  54. {dtspark-1.1.0a3.dist-info → dtspark-1.1.0a7.dist-info}/top_level.txt +0 -0
dtSpark/tools/builtin.py CHANGED
@@ -16,6 +16,23 @@ from datetime import datetime
16
16
  from typing import Dict, List, Any, Optional
17
17
  from zoneinfo import ZoneInfo, available_timezones
18
18
 
19
+ # Common error/description string constants (SonarCloud S1192)
20
+ _ERR_FS_NOT_CONFIGURED = "Filesystem tools not configured"
21
+ _ERR_FILE_PATH_REQUIRED = "File path is required"
22
+ _ERR_DOC_NOT_CONFIGURED = "Document tools not configured"
23
+ _ERR_DOC_NOT_ENABLED = "Document tools are not enabled"
24
+ _ERR_WRITE_MODE_REQUIRED = "Write operations require access_mode: read_write"
25
+ _ERR_OUTPUT_PATH_REQUIRED = "Output file path is required"
26
+ _ERR_ARCHIVE_NOT_CONFIGURED = "Archive tools not configured"
27
+ _ERR_ARCHIVE_NOT_ENABLED = "Archive tools are not enabled"
28
+ _ERR_ARCHIVE_PATH_REQUIRED = "Archive path is required"
29
+ _DESC_ARCHIVE_PATH = "Path to the archive file"
30
+ _TAR_GZ = 'tar.gz'
31
+ _TAR_BZ2 = 'tar.bz2'
32
+ _TAR_BZ2_MODE = 'r:bz2'
33
+ _TAR_OPEN_MODES = {_TAR_GZ: 'r:gz', _TAR_BZ2: _TAR_BZ2_MODE}
34
+
35
+
19
36
 
20
37
  def get_builtin_tools(config: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]:
21
38
  """
@@ -459,7 +476,7 @@ def _execute_list_files_recursive(tool_input: Dict[str, Any],
459
476
  Dictionary with success status and file listing
460
477
  """
461
478
  if not config.get('embedded_tools'):
462
- return {"success": False, "error": "Filesystem tools not configured"}
479
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
463
480
 
464
481
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
465
482
  allowed_path = fs_config.get('allowed_path', '.')
@@ -539,7 +556,7 @@ def _execute_search_files(tool_input: Dict[str, Any],
539
556
  Dictionary with success status and search results
540
557
  """
541
558
  if not config.get('embedded_tools'):
542
- return {"success": False, "error": "Filesystem tools not configured"}
559
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
543
560
 
544
561
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
545
562
  allowed_path = fs_config.get('allowed_path', '.')
@@ -609,7 +626,7 @@ def _execute_read_file_text(tool_input: Dict[str, Any],
609
626
  Dictionary with success status and file content
610
627
  """
611
628
  if not config.get('embedded_tools'):
612
- return {"success": False, "error": "Filesystem tools not configured"}
629
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
613
630
 
614
631
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
615
632
  allowed_path = fs_config.get('allowed_path', '.')
@@ -618,7 +635,7 @@ def _execute_read_file_text(tool_input: Dict[str, Any],
618
635
  file_path = tool_input.get('path')
619
636
 
620
637
  if not file_path:
621
- return {"success": False, "error": "File path is required"}
638
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
622
639
 
623
640
  # Validate path
624
641
  validation = _validate_path(file_path, allowed_path)
@@ -673,7 +690,7 @@ def _execute_read_file_binary(tool_input: Dict[str, Any],
673
690
  Dictionary with success status and base64-encoded content
674
691
  """
675
692
  if not config.get('embedded_tools'):
676
- return {"success": False, "error": "Filesystem tools not configured"}
693
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
677
694
 
678
695
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
679
696
  allowed_path = fs_config.get('allowed_path', '.')
@@ -683,7 +700,7 @@ def _execute_read_file_binary(tool_input: Dict[str, Any],
683
700
  max_size_mb = tool_input.get('max_size_mb', 10)
684
701
 
685
702
  if not file_path:
686
- return {"success": False, "error": "File path is required"}
703
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
687
704
 
688
705
  # Validate path
689
706
  validation = _validate_path(file_path, allowed_path)
@@ -748,7 +765,7 @@ def _execute_write_file(tool_input: Dict[str, Any],
748
765
 
749
766
  if not config.get('embedded_tools'):
750
767
  logging.warning("write_file failed: embedded_tools not in config")
751
- return {"success": False, "error": "Filesystem tools not configured"}
768
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
752
769
 
753
770
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
754
771
  allowed_path = fs_config.get('allowed_path', '.')
@@ -773,7 +790,7 @@ def _execute_write_file(tool_input: Dict[str, Any],
773
790
 
774
791
  if not file_path:
775
792
  logging.warning("write_file failed: no file path provided")
776
- return {"success": False, "error": "File path is required"}
793
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
777
794
 
778
795
  if content is None:
779
796
  logging.warning("write_file failed: no content provided")
@@ -830,7 +847,7 @@ def _execute_create_directories(tool_input: Dict[str, Any],
830
847
  Dictionary with success status
831
848
  """
832
849
  if not config.get('embedded_tools'):
833
- return {"success": False, "error": "Filesystem tools not configured"}
850
+ return {"success": False, "error": _ERR_FS_NOT_CONFIGURED}
834
851
 
835
852
  fs_config = config.get('embedded_tools', {}).get('filesystem', {})
836
853
  allowed_path = fs_config.get('allowed_path', '.')
@@ -1145,17 +1162,17 @@ def _execute_get_file_info(tool_input: Dict[str, Any],
1145
1162
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1146
1163
  """Execute the get_file_info tool."""
1147
1164
  if not config.get('embedded_tools'):
1148
- return {"success": False, "error": "Document tools not configured"}
1165
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1149
1166
 
1150
1167
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1151
1168
  if not doc_config.get('enabled', False):
1152
- return {"success": False, "error": "Document tools are not enabled"}
1169
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1153
1170
 
1154
1171
  allowed_path = doc_config.get('allowed_path', '.')
1155
1172
  file_path = tool_input.get('path')
1156
1173
 
1157
1174
  if not file_path:
1158
- return {"success": False, "error": "File path is required"}
1175
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
1159
1176
 
1160
1177
  validation = _validate_path(file_path, allowed_path)
1161
1178
  if not validation['valid']:
@@ -1220,11 +1237,11 @@ def _execute_read_word_document(tool_input: Dict[str, Any],
1220
1237
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1221
1238
  """Execute the read_word_document tool."""
1222
1239
  if not config.get('embedded_tools'):
1223
- return {"success": False, "error": "Document tools not configured"}
1240
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1224
1241
 
1225
1242
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1226
1243
  if not doc_config.get('enabled', False):
1227
- return {"success": False, "error": "Document tools are not enabled"}
1244
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1228
1245
 
1229
1246
  allowed_path = doc_config.get('allowed_path', '.')
1230
1247
  max_size_mb = doc_config.get('max_file_size_mb', 50)
@@ -1234,7 +1251,7 @@ def _execute_read_word_document(tool_input: Dict[str, Any],
1234
1251
  include_headers_footers = tool_input.get('include_headers_footers', False)
1235
1252
 
1236
1253
  if not file_path:
1237
- return {"success": False, "error": "File path is required"}
1254
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
1238
1255
 
1239
1256
  validation = _validate_path(file_path, allowed_path)
1240
1257
  if not validation['valid']:
@@ -1307,11 +1324,11 @@ def _execute_read_excel_document(tool_input: Dict[str, Any],
1307
1324
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1308
1325
  """Execute the read_excel_document tool."""
1309
1326
  if not config.get('embedded_tools'):
1310
- return {"success": False, "error": "Document tools not configured"}
1327
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1311
1328
 
1312
1329
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1313
1330
  if not doc_config.get('enabled', False):
1314
- return {"success": False, "error": "Document tools are not enabled"}
1331
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1315
1332
 
1316
1333
  allowed_path = doc_config.get('allowed_path', '.')
1317
1334
  max_size_mb = doc_config.get('max_file_size_mb', 50)
@@ -1323,7 +1340,7 @@ def _execute_read_excel_document(tool_input: Dict[str, Any],
1323
1340
  max_rows = tool_input.get('max_rows', 0) or default_max_rows
1324
1341
 
1325
1342
  if not file_path:
1326
- return {"success": False, "error": "File path is required"}
1343
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
1327
1344
 
1328
1345
  validation = _validate_path(file_path, allowed_path)
1329
1346
  if not validation['valid']:
@@ -1395,11 +1412,11 @@ def _execute_read_powerpoint_document(tool_input: Dict[str, Any],
1395
1412
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1396
1413
  """Execute the read_powerpoint_document tool."""
1397
1414
  if not config.get('embedded_tools'):
1398
- return {"success": False, "error": "Document tools not configured"}
1415
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1399
1416
 
1400
1417
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1401
1418
  if not doc_config.get('enabled', False):
1402
- return {"success": False, "error": "Document tools are not enabled"}
1419
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1403
1420
 
1404
1421
  allowed_path = doc_config.get('allowed_path', '.')
1405
1422
  max_size_mb = doc_config.get('max_file_size_mb', 50)
@@ -1408,7 +1425,7 @@ def _execute_read_powerpoint_document(tool_input: Dict[str, Any],
1408
1425
  include_notes = tool_input.get('include_notes', True)
1409
1426
 
1410
1427
  if not file_path:
1411
- return {"success": False, "error": "File path is required"}
1428
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
1412
1429
 
1413
1430
  validation = _validate_path(file_path, allowed_path)
1414
1431
  if not validation['valid']:
@@ -1478,11 +1495,11 @@ def _execute_read_pdf_document(tool_input: Dict[str, Any],
1478
1495
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1479
1496
  """Execute the read_pdf_document tool."""
1480
1497
  if not config.get('embedded_tools'):
1481
- return {"success": False, "error": "Document tools not configured"}
1498
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1482
1499
 
1483
1500
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1484
1501
  if not doc_config.get('enabled', False):
1485
- return {"success": False, "error": "Document tools are not enabled"}
1502
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1486
1503
 
1487
1504
  allowed_path = doc_config.get('allowed_path', '.')
1488
1505
  max_size_mb = doc_config.get('max_file_size_mb', 50)
@@ -1493,7 +1510,7 @@ def _execute_read_pdf_document(tool_input: Dict[str, Any],
1493
1510
  include_metadata = tool_input.get('include_metadata', True)
1494
1511
 
1495
1512
  if not file_path:
1496
- return {"success": False, "error": "File path is required"}
1513
+ return {"success": False, "error": _ERR_FILE_PATH_REQUIRED}
1497
1514
 
1498
1515
  validation = _validate_path(file_path, allowed_path)
1499
1516
  if not validation['valid']:
@@ -1560,14 +1577,14 @@ def _execute_create_word_document(tool_input: Dict[str, Any],
1560
1577
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1561
1578
  """Execute the create_word_document tool."""
1562
1579
  if not config.get('embedded_tools'):
1563
- return {"success": False, "error": "Document tools not configured"}
1580
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1564
1581
 
1565
1582
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1566
1583
  if not doc_config.get('enabled', False):
1567
- return {"success": False, "error": "Document tools are not enabled"}
1584
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1568
1585
 
1569
1586
  if doc_config.get('access_mode', 'read') != 'read_write':
1570
- return {"success": False, "error": "Write operations require access_mode: read_write"}
1587
+ return {"success": False, "error": _ERR_WRITE_MODE_REQUIRED}
1571
1588
 
1572
1589
  allowed_path = doc_config.get('allowed_path', '.')
1573
1590
  templates_path = doc_config.get('creation', {}).get('templates_path')
@@ -1578,7 +1595,7 @@ def _execute_create_word_document(tool_input: Dict[str, Any],
1578
1595
  placeholders = tool_input.get('placeholders', {})
1579
1596
 
1580
1597
  if not file_path:
1581
- return {"success": False, "error": "Output file path is required"}
1598
+ return {"success": False, "error": _ERR_OUTPUT_PATH_REQUIRED}
1582
1599
 
1583
1600
  validation = _validate_path(file_path, allowed_path)
1584
1601
  if not validation['valid']:
@@ -1680,14 +1697,14 @@ def _execute_create_excel_document(tool_input: Dict[str, Any],
1680
1697
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1681
1698
  """Execute the create_excel_document tool."""
1682
1699
  if not config.get('embedded_tools'):
1683
- return {"success": False, "error": "Document tools not configured"}
1700
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1684
1701
 
1685
1702
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1686
1703
  if not doc_config.get('enabled', False):
1687
- return {"success": False, "error": "Document tools are not enabled"}
1704
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1688
1705
 
1689
1706
  if doc_config.get('access_mode', 'read') != 'read_write':
1690
- return {"success": False, "error": "Write operations require access_mode: read_write"}
1707
+ return {"success": False, "error": _ERR_WRITE_MODE_REQUIRED}
1691
1708
 
1692
1709
  allowed_path = doc_config.get('allowed_path', '.')
1693
1710
 
@@ -1695,7 +1712,7 @@ def _execute_create_excel_document(tool_input: Dict[str, Any],
1695
1712
  sheets = tool_input.get('sheets', [])
1696
1713
 
1697
1714
  if not file_path:
1698
- return {"success": False, "error": "Output file path is required"}
1715
+ return {"success": False, "error": _ERR_OUTPUT_PATH_REQUIRED}
1699
1716
 
1700
1717
  if not sheets:
1701
1718
  return {"success": False, "error": "At least one sheet is required"}
@@ -1758,14 +1775,14 @@ def _execute_create_powerpoint_document(tool_input: Dict[str, Any],
1758
1775
  config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
1759
1776
  """Execute the create_powerpoint_document tool."""
1760
1777
  if not config.get('embedded_tools'):
1761
- return {"success": False, "error": "Document tools not configured"}
1778
+ return {"success": False, "error": _ERR_DOC_NOT_CONFIGURED}
1762
1779
 
1763
1780
  doc_config = config.get('embedded_tools', {}).get('documents', {})
1764
1781
  if not doc_config.get('enabled', False):
1765
- return {"success": False, "error": "Document tools are not enabled"}
1782
+ return {"success": False, "error": _ERR_DOC_NOT_ENABLED}
1766
1783
 
1767
1784
  if doc_config.get('access_mode', 'read') != 'read_write':
1768
- return {"success": False, "error": "Write operations require access_mode: read_write"}
1785
+ return {"success": False, "error": _ERR_WRITE_MODE_REQUIRED}
1769
1786
 
1770
1787
  allowed_path = doc_config.get('allowed_path', '.')
1771
1788
  templates_path = doc_config.get('creation', {}).get('templates_path')
@@ -1776,7 +1793,7 @@ def _execute_create_powerpoint_document(tool_input: Dict[str, Any],
1776
1793
  placeholders = tool_input.get('placeholders', {})
1777
1794
 
1778
1795
  if not file_path:
1779
- return {"success": False, "error": "Output file path is required"}
1796
+ return {"success": False, "error": _ERR_OUTPUT_PATH_REQUIRED}
1780
1797
 
1781
1798
  if not slides_data and not template_path:
1782
1799
  return {"success": False, "error": "Either slides or template_path is required"}
@@ -1914,7 +1931,7 @@ def _get_archive_tools(archive_config: Dict[str, Any]) -> List[Dict[str, Any]]:
1914
1931
  "properties": {
1915
1932
  "path": {
1916
1933
  "type": "string",
1917
- "description": "Path to the archive file"
1934
+ "description": _DESC_ARCHIVE_PATH
1918
1935
  }
1919
1936
  },
1920
1937
  "required": ["path"]
@@ -1929,7 +1946,7 @@ def _get_archive_tools(archive_config: Dict[str, Any]) -> List[Dict[str, Any]]:
1929
1946
  "properties": {
1930
1947
  "archive_path": {
1931
1948
  "type": "string",
1932
- "description": "Path to the archive file"
1949
+ "description": _DESC_ARCHIVE_PATH
1933
1950
  },
1934
1951
  "file_path": {
1935
1952
  "type": "string",
@@ -1963,7 +1980,7 @@ def _get_archive_tools(archive_config: Dict[str, Any]) -> List[Dict[str, Any]]:
1963
1980
  "properties": {
1964
1981
  "archive_path": {
1965
1982
  "type": "string",
1966
- "description": "Path to the archive file"
1983
+ "description": _DESC_ARCHIVE_PATH
1967
1984
  },
1968
1985
  "destination": {
1969
1986
  "type": "string",
@@ -1997,9 +2014,9 @@ def _get_archive_type(file_path: Path) -> Optional[str]:
1997
2014
  elif suffix == '.tar':
1998
2015
  return 'tar'
1999
2016
  elif name.endswith('.tar.gz') or suffix == '.tgz':
2000
- return 'tar.gz'
2017
+ return _TAR_GZ
2001
2018
  elif name.endswith('.tar.bz2'):
2002
- return 'tar.bz2'
2019
+ return _TAR_BZ2
2003
2020
  return None
2004
2021
 
2005
2022
 
@@ -2010,11 +2027,11 @@ def _execute_list_archive_contents(tool_input: Dict[str, Any],
2010
2027
  import tarfile
2011
2028
 
2012
2029
  if not config.get('embedded_tools'):
2013
- return {"success": False, "error": "Archive tools not configured"}
2030
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_CONFIGURED}
2014
2031
 
2015
2032
  archive_config = config.get('embedded_tools', {}).get('archives', {})
2016
2033
  if not archive_config.get('enabled', False):
2017
- return {"success": False, "error": "Archive tools are not enabled"}
2034
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_ENABLED}
2018
2035
 
2019
2036
  allowed_path = archive_config.get('allowed_path', '.')
2020
2037
  max_size_mb = archive_config.get('max_file_size_mb', 100)
@@ -2023,7 +2040,7 @@ def _execute_list_archive_contents(tool_input: Dict[str, Any],
2023
2040
  file_path = tool_input.get('path')
2024
2041
 
2025
2042
  if not file_path:
2026
- return {"success": False, "error": "Archive path is required"}
2043
+ return {"success": False, "error": _ERR_ARCHIVE_PATH_REQUIRED}
2027
2044
 
2028
2045
  validation = _validate_path(file_path, allowed_path)
2029
2046
  if not validation['valid']:
@@ -2055,7 +2072,7 @@ def _execute_list_archive_contents(tool_input: Dict[str, Any],
2055
2072
  "modified": datetime(*info.date_time).isoformat() if info.date_time else None
2056
2073
  })
2057
2074
  else:
2058
- mode = 'r:gz' if archive_type == 'tar.gz' else 'r:bz2' if archive_type == 'tar.bz2' else 'r'
2075
+ mode = _TAR_OPEN_MODES.get(archive_type, 'r')
2059
2076
  with tarfile.open(str(full_path), mode) as tf:
2060
2077
  count = 0
2061
2078
  for member in tf:
@@ -2093,11 +2110,11 @@ def _execute_read_archive_file(tool_input: Dict[str, Any],
2093
2110
  import tarfile
2094
2111
 
2095
2112
  if not config.get('embedded_tools'):
2096
- return {"success": False, "error": "Archive tools not configured"}
2113
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_CONFIGURED}
2097
2114
 
2098
2115
  archive_config = config.get('embedded_tools', {}).get('archives', {})
2099
2116
  if not archive_config.get('enabled', False):
2100
- return {"success": False, "error": "Archive tools are not enabled"}
2117
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_ENABLED}
2101
2118
 
2102
2119
  allowed_path = archive_config.get('allowed_path', '.')
2103
2120
  max_size_mb = archive_config.get('max_file_size_mb', 100)
@@ -2108,7 +2125,7 @@ def _execute_read_archive_file(tool_input: Dict[str, Any],
2108
2125
  as_binary = tool_input.get('as_binary', False)
2109
2126
 
2110
2127
  if not archive_path:
2111
- return {"success": False, "error": "Archive path is required"}
2128
+ return {"success": False, "error": _ERR_ARCHIVE_PATH_REQUIRED}
2112
2129
 
2113
2130
  if not file_path:
2114
2131
  return {"success": False, "error": "File path within archive is required"}
@@ -2138,7 +2155,7 @@ def _execute_read_archive_file(tool_input: Dict[str, Any],
2138
2155
  return {"success": False, "error": f"File not found in archive: {file_path}"}
2139
2156
  content = zf.read(file_path)
2140
2157
  else:
2141
- mode = 'r:gz' if archive_type == 'tar.gz' else 'r:bz2' if archive_type == 'tar.bz2' else 'r'
2158
+ mode = _TAR_OPEN_MODES.get(archive_type, 'r')
2142
2159
  with tarfile.open(str(full_path), mode) as tf:
2143
2160
  try:
2144
2161
  member = tf.getmember(file_path)
@@ -2195,11 +2212,11 @@ def _execute_extract_archive(tool_input: Dict[str, Any],
2195
2212
  import tarfile
2196
2213
 
2197
2214
  if not config.get('embedded_tools'):
2198
- return {"success": False, "error": "Archive tools not configured"}
2215
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_CONFIGURED}
2199
2216
 
2200
2217
  archive_config = config.get('embedded_tools', {}).get('archives', {})
2201
2218
  if not archive_config.get('enabled', False):
2202
- return {"success": False, "error": "Archive tools are not enabled"}
2219
+ return {"success": False, "error": _ERR_ARCHIVE_NOT_ENABLED}
2203
2220
 
2204
2221
  if archive_config.get('access_mode', 'read') != 'read_write':
2205
2222
  return {"success": False, "error": "Extract operations require access_mode: read_write"}
@@ -2213,7 +2230,7 @@ def _execute_extract_archive(tool_input: Dict[str, Any],
2213
2230
  overwrite = tool_input.get('overwrite', False)
2214
2231
 
2215
2232
  if not archive_path:
2216
- return {"success": False, "error": "Archive path is required"}
2233
+ return {"success": False, "error": _ERR_ARCHIVE_PATH_REQUIRED}
2217
2234
 
2218
2235
  if not destination:
2219
2236
  return {"success": False, "error": "Destination directory is required"}
@@ -2259,7 +2276,7 @@ def _execute_extract_archive(tool_input: Dict[str, Any],
2259
2276
  zf.extract(member, str(full_dest_path))
2260
2277
  extracted_files.append(member)
2261
2278
  else:
2262
- mode = 'r:gz' if archive_type == 'tar.gz' else 'r:bz2' if archive_type == 'tar.bz2' else 'r'
2279
+ mode = _TAR_OPEN_MODES.get(archive_type, 'r')
2263
2280
  with tarfile.open(str(full_archive_path), mode) as tf:
2264
2281
  if files_to_extract:
2265
2282
  members = [tf.getmember(f) for f in files_to_extract if f in tf.getnames()]
@@ -20,6 +20,9 @@ from pydantic import BaseModel, Field
20
20
 
21
21
  from ..dependencies import get_current_session
22
22
 
23
+ # Error message constants
24
+ _ERR_ACTION_NOT_FOUND = "Action not found"
25
+
23
26
 
24
27
  logger = logging.getLogger(__name__)
25
28
 
@@ -35,10 +38,10 @@ def parse_datetime(dt_value):
35
38
  if isinstance(dt_value, str):
36
39
  try:
37
40
  return datetime.fromisoformat(dt_value.replace('Z', '+00:00'))
38
- except:
41
+ except (ValueError, TypeError):
39
42
  try:
40
43
  return datetime.strptime(dt_value, '%Y-%m-%d %H:%M:%S.%f')
41
- except:
44
+ except (ValueError, TypeError):
42
45
  return None
43
46
  return None
44
47
 
@@ -213,7 +216,7 @@ async def get_action(
213
216
 
214
217
  action = database.get_action(action_id)
215
218
  if not action:
216
- raise HTTPException(status_code=404, detail="Action not found")
219
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
217
220
 
218
221
  tool_permissions = database.get_action_tool_permissions(action_id)
219
222
 
@@ -326,7 +329,7 @@ async def update_action(
326
329
 
327
330
  action = database.get_action(action_id)
328
331
  if not action:
329
- raise HTTPException(status_code=404, detail="Action not found")
332
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
330
333
 
331
334
  # Build updates dict
332
335
  updates = {}
@@ -390,7 +393,7 @@ async def delete_action(
390
393
 
391
394
  action = database.get_action(action_id)
392
395
  if not action:
393
- raise HTTPException(status_code=404, detail="Action not found")
396
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
394
397
 
395
398
  # Unschedule the action
396
399
  if hasattr(app_instance, 'action_scheduler') and app_instance.action_scheduler:
@@ -432,7 +435,7 @@ async def enable_action(
432
435
 
433
436
  action = database.get_action(action_id)
434
437
  if not action:
435
- raise HTTPException(status_code=404, detail="Action not found")
438
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
436
439
 
437
440
  database.enable_action(action_id)
438
441
 
@@ -479,7 +482,7 @@ async def disable_action(
479
482
 
480
483
  action = database.get_action(action_id)
481
484
  if not action:
482
- raise HTTPException(status_code=404, detail="Action not found")
485
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
483
486
 
484
487
  database.disable_action(action_id)
485
488
 
@@ -520,7 +523,7 @@ async def run_action_now(
520
523
 
521
524
  action = database.get_action(action_id)
522
525
  if not action:
523
- raise HTTPException(status_code=404, detail="Action not found")
526
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
524
527
 
525
528
  # Check if action is currently locked by another process (e.g., daemon)
526
529
  from dtSpark.database.autonomous_actions import get_action_lock_info
@@ -583,7 +586,7 @@ async def list_action_runs(
583
586
 
584
587
  action = database.get_action(action_id)
585
588
  if not action:
586
- raise HTTPException(status_code=404, detail="Action not found")
589
+ raise HTTPException(status_code=404, detail=_ERR_ACTION_NOT_FOUND)
587
590
 
588
591
  runs = database.get_action_runs(action_id, limit=limit, offset=offset)
589
592
 
@@ -10,9 +10,11 @@ Provides REST API for chat operations:
10
10
 
11
11
  """
12
12
 
13
+ import asyncio
13
14
  import logging
14
15
  import tempfile
15
16
  import os
17
+ from pathlib import Path
16
18
  from typing import Optional, List
17
19
  from datetime import datetime
18
20
 
@@ -319,15 +321,15 @@ async def command_attach(
319
321
  # Create temporary file
320
322
  suffix = os.path.splitext(upload_file.filename)[1]
321
323
  temp_fd, temp_path = tempfile.mkstemp(suffix=suffix)
324
+ os.close(temp_fd)
322
325
 
323
- # Write uploaded content to temp file
324
- with os.fdopen(temp_fd, 'wb') as f:
325
- content = await upload_file.read()
326
- f.write(content)
326
+ # Write uploaded content to temp file asynchronously
327
+ content = await upload_file.read()
328
+ await asyncio.to_thread(Path(temp_path).write_bytes, content)
327
329
 
328
330
  temp_files.append(temp_path)
329
331
  attached_filenames.append(upload_file.filename)
330
- logger.info(f"Saved uploaded file {upload_file.filename} to {temp_path}")
332
+ logger.info("Saved uploaded file to %s", temp_path)
331
333
 
332
334
  # Attach files using conversation manager
333
335
  if temp_files:
@@ -366,7 +368,7 @@ async def command_attach(
366
368
  try:
367
369
  if os.path.exists(temp_path):
368
370
  os.unlink(temp_path)
369
- except:
371
+ except OSError:
370
372
  pass
371
373
 
372
374
  except Exception as e:
@@ -10,9 +10,11 @@ Provides REST API for conversation operations:
10
10
 
11
11
  """
12
12
 
13
+ import asyncio
13
14
  import logging
14
15
  import tempfile
15
16
  import os
17
+ from pathlib import Path
16
18
  from typing import Optional, List
17
19
  from datetime import datetime
18
20
 
@@ -37,7 +39,7 @@ def parse_datetime(dt_value):
37
39
  # SQLite returns timestamps as strings
38
40
  try:
39
41
  return datetime.fromisoformat(dt_value.replace('Z', '+00:00'))
40
- except:
42
+ except (ValueError, TypeError):
41
43
  return datetime.strptime(dt_value, '%Y-%m-%d %H:%M:%S.%f')
42
44
  return None
43
45
 
@@ -175,6 +177,13 @@ async def create_conversation(
175
177
  ConversationDetail for the created conversation
176
178
  """
177
179
  try:
180
+ # Check if new conversations are allowed
181
+ if not getattr(request.app.state, 'new_conversations_allowed', True):
182
+ raise HTTPException(
183
+ status_code=403,
184
+ detail="Creating new conversations is disabled by configuration"
185
+ )
186
+
178
187
  app_instance = request.app.state.app_instance
179
188
  database = app_instance.database
180
189
  conversation_manager = app_instance.conversation_manager
@@ -220,7 +229,7 @@ async def create_conversation(
220
229
  suffix = os.path.splitext(upload_file.filename)[1]
221
230
  if not suffix:
222
231
  upload_errors.append(f"File '{upload_file.filename}' has no file extension")
223
- logger.warning(f"File '{upload_file.filename}' uploaded without extension")
232
+ logger.warning("File uploaded without extension")
224
233
  continue
225
234
 
226
235
  # Check if extension is supported (using FileManager's validation)
@@ -230,19 +239,19 @@ async def create_conversation(
230
239
  FileManager.SUPPORTED_DOCUMENT_FILES |
231
240
  FileManager.SUPPORTED_IMAGE_FILES):
232
241
  upload_errors.append(f"File type '{suffix}' is not supported for '{upload_file.filename}'")
233
- logger.warning(f"Unsupported file type '{suffix}' for file '{upload_file.filename}'")
242
+ logger.warning("Unsupported file type uploaded")
234
243
  continue
235
244
 
236
245
  # Create temporary file with proper extension
237
246
  temp_fd, temp_path = tempfile.mkstemp(suffix=suffix)
247
+ os.close(temp_fd)
238
248
 
239
- # Write uploaded content to temp file
240
- with os.fdopen(temp_fd, 'wb') as f:
241
- content = await upload_file.read()
242
- f.write(content)
249
+ # Write uploaded content to temp file asynchronously
250
+ content = await upload_file.read()
251
+ await asyncio.to_thread(Path(temp_path).write_bytes, content)
243
252
 
244
253
  temp_files.append(temp_path)
245
- logger.info(f"Saved uploaded file {upload_file.filename} to {temp_path}")
254
+ logger.info("Saved uploaded file to %s", temp_path)
246
255
 
247
256
  # Attach files using conversation manager
248
257
  if temp_files:
@@ -265,7 +274,7 @@ async def create_conversation(
265
274
  try:
266
275
  if os.path.exists(temp_path):
267
276
  os.unlink(temp_path)
268
- except:
277
+ except OSError:
269
278
  pass
270
279
 
271
280
  # Get the created conversation details