autoforge-ai 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/.claude/commands/check-code.md +32 -0
  2. package/.claude/commands/checkpoint.md +40 -0
  3. package/.claude/commands/create-spec.md +613 -0
  4. package/.claude/commands/expand-project.md +234 -0
  5. package/.claude/commands/gsd-to-autoforge-spec.md +10 -0
  6. package/.claude/commands/review-pr.md +75 -0
  7. package/.claude/templates/app_spec.template.txt +331 -0
  8. package/.claude/templates/coding_prompt.template.md +265 -0
  9. package/.claude/templates/initializer_prompt.template.md +354 -0
  10. package/.claude/templates/testing_prompt.template.md +146 -0
  11. package/.env.example +64 -0
  12. package/LICENSE.md +676 -0
  13. package/README.md +423 -0
  14. package/agent.py +444 -0
  15. package/api/__init__.py +10 -0
  16. package/api/database.py +536 -0
  17. package/api/dependency_resolver.py +449 -0
  18. package/api/migration.py +156 -0
  19. package/auth.py +83 -0
  20. package/autoforge_paths.py +315 -0
  21. package/autonomous_agent_demo.py +293 -0
  22. package/bin/autoforge.js +3 -0
  23. package/client.py +607 -0
  24. package/env_constants.py +27 -0
  25. package/examples/OPTIMIZE_CONFIG.md +230 -0
  26. package/examples/README.md +531 -0
  27. package/examples/org_config.yaml +172 -0
  28. package/examples/project_allowed_commands.yaml +139 -0
  29. package/lib/cli.js +791 -0
  30. package/mcp_server/__init__.py +1 -0
  31. package/mcp_server/feature_mcp.py +988 -0
  32. package/package.json +53 -0
  33. package/parallel_orchestrator.py +1800 -0
  34. package/progress.py +247 -0
  35. package/prompts.py +427 -0
  36. package/pyproject.toml +17 -0
  37. package/rate_limit_utils.py +132 -0
  38. package/registry.py +614 -0
  39. package/requirements-prod.txt +14 -0
  40. package/security.py +959 -0
  41. package/server/__init__.py +17 -0
  42. package/server/main.py +261 -0
  43. package/server/routers/__init__.py +32 -0
  44. package/server/routers/agent.py +177 -0
  45. package/server/routers/assistant_chat.py +327 -0
  46. package/server/routers/devserver.py +309 -0
  47. package/server/routers/expand_project.py +239 -0
  48. package/server/routers/features.py +746 -0
  49. package/server/routers/filesystem.py +514 -0
  50. package/server/routers/projects.py +524 -0
  51. package/server/routers/schedules.py +356 -0
  52. package/server/routers/settings.py +127 -0
  53. package/server/routers/spec_creation.py +357 -0
  54. package/server/routers/terminal.py +453 -0
  55. package/server/schemas.py +593 -0
  56. package/server/services/__init__.py +36 -0
  57. package/server/services/assistant_chat_session.py +496 -0
  58. package/server/services/assistant_database.py +304 -0
  59. package/server/services/chat_constants.py +57 -0
  60. package/server/services/dev_server_manager.py +557 -0
  61. package/server/services/expand_chat_session.py +399 -0
  62. package/server/services/process_manager.py +657 -0
  63. package/server/services/project_config.py +475 -0
  64. package/server/services/scheduler_service.py +683 -0
  65. package/server/services/spec_chat_session.py +502 -0
  66. package/server/services/terminal_manager.py +756 -0
  67. package/server/utils/__init__.py +1 -0
  68. package/server/utils/process_utils.py +134 -0
  69. package/server/utils/project_helpers.py +32 -0
  70. package/server/utils/validation.py +54 -0
  71. package/server/websocket.py +903 -0
  72. package/start.py +456 -0
  73. package/ui/dist/assets/index-8W_wmZzz.js +168 -0
  74. package/ui/dist/assets/index-B47Ubhox.css +1 -0
  75. package/ui/dist/assets/vendor-flow-CVNK-_lx.js +7 -0
  76. package/ui/dist/assets/vendor-query-BUABzP5o.js +1 -0
  77. package/ui/dist/assets/vendor-radix-DTNNCg2d.js +45 -0
  78. package/ui/dist/assets/vendor-react-qkC6yhPU.js +1 -0
  79. package/ui/dist/assets/vendor-utils-COeKbHgx.js +2 -0
  80. package/ui/dist/assets/vendor-xterm-DP_gxef0.js +16 -0
  81. package/ui/dist/index.html +23 -0
  82. package/ui/dist/ollama.png +0 -0
  83. package/ui/dist/vite.svg +6 -0
  84. package/ui/package.json +57 -0
@@ -0,0 +1,536 @@
1
+ """
2
+ Database Models and Connection
3
+ ==============================
4
+
5
+ SQLite database schema for feature storage using SQLAlchemy.
6
+ """
7
+
8
+ import sys
9
+ from datetime import datetime, timezone
10
+ from pathlib import Path
11
+ from typing import Generator, Optional
12
+
13
+
14
+ def _utc_now() -> datetime:
15
+ """Return current UTC time. Replacement for deprecated _utc_now()."""
16
+ return datetime.now(timezone.utc)
17
+
18
+ from sqlalchemy import (
19
+ Boolean,
20
+ CheckConstraint,
21
+ Column,
22
+ DateTime,
23
+ ForeignKey,
24
+ Index,
25
+ Integer,
26
+ String,
27
+ Text,
28
+ create_engine,
29
+ event,
30
+ text,
31
+ )
32
+ from sqlalchemy.orm import DeclarativeBase, Session, relationship, sessionmaker
33
+ from sqlalchemy.types import JSON
34
+
35
+
36
+ class Base(DeclarativeBase):
37
+ """SQLAlchemy 2.0 style declarative base."""
38
+ pass
39
+
40
+
41
+ class Feature(Base):
42
+ """Feature model representing a test case/feature to implement."""
43
+
44
+ __tablename__ = "features"
45
+
46
+ # Composite index for common status query pattern (passes, in_progress)
47
+ # Used by feature_get_stats, get_ready_features, and other status queries
48
+ __table_args__ = (
49
+ Index('ix_feature_status', 'passes', 'in_progress'),
50
+ )
51
+
52
+ id = Column(Integer, primary_key=True, index=True)
53
+ priority = Column(Integer, nullable=False, default=999, index=True)
54
+ category = Column(String(100), nullable=False)
55
+ name = Column(String(255), nullable=False)
56
+ description = Column(Text, nullable=False)
57
+ steps = Column(JSON, nullable=False) # Stored as JSON array
58
+ passes = Column(Boolean, nullable=False, default=False, index=True)
59
+ in_progress = Column(Boolean, nullable=False, default=False, index=True)
60
+ # Dependencies: list of feature IDs that must be completed before this feature
61
+ # NULL/empty = no dependencies (backwards compatible)
62
+ dependencies = Column(JSON, nullable=True, default=None)
63
+
64
+ def to_dict(self) -> dict:
65
+ """Convert feature to dictionary for JSON serialization."""
66
+ return {
67
+ "id": self.id,
68
+ "priority": self.priority,
69
+ "category": self.category,
70
+ "name": self.name,
71
+ "description": self.description,
72
+ "steps": self.steps,
73
+ # Handle legacy NULL values gracefully - treat as False
74
+ "passes": self.passes if self.passes is not None else False,
75
+ "in_progress": self.in_progress if self.in_progress is not None else False,
76
+ # Dependencies: NULL/empty treated as empty list for backwards compat
77
+ "dependencies": self.dependencies if self.dependencies else [],
78
+ }
79
+
80
+ def get_dependencies_safe(self) -> list[int]:
81
+ """Safely extract dependencies, handling NULL and malformed data."""
82
+ if self.dependencies is None:
83
+ return []
84
+ if isinstance(self.dependencies, list):
85
+ return [d for d in self.dependencies if isinstance(d, int)]
86
+ return []
87
+
88
+
89
+ class Schedule(Base):
90
+ """Time-based schedule for automated agent start/stop."""
91
+
92
+ __tablename__ = "schedules"
93
+
94
+ # Database-level CHECK constraints for data integrity
95
+ __table_args__ = (
96
+ CheckConstraint('duration_minutes >= 1 AND duration_minutes <= 1440', name='ck_schedule_duration'),
97
+ CheckConstraint('days_of_week >= 0 AND days_of_week <= 127', name='ck_schedule_days'),
98
+ CheckConstraint('max_concurrency >= 1 AND max_concurrency <= 5', name='ck_schedule_concurrency'),
99
+ CheckConstraint('crash_count >= 0', name='ck_schedule_crash_count'),
100
+ )
101
+
102
+ id = Column(Integer, primary_key=True, index=True)
103
+ project_name = Column(String(50), nullable=False, index=True)
104
+
105
+ # Timing (stored in UTC)
106
+ start_time = Column(String(5), nullable=False) # "HH:MM" format
107
+ duration_minutes = Column(Integer, nullable=False) # 1-1440
108
+
109
+ # Day filtering (bitfield: Mon=1, Tue=2, Wed=4, Thu=8, Fri=16, Sat=32, Sun=64)
110
+ days_of_week = Column(Integer, nullable=False, default=127) # 127 = all days
111
+
112
+ # State
113
+ enabled = Column(Boolean, nullable=False, default=True, index=True)
114
+
115
+ # Agent configuration for scheduled runs
116
+ yolo_mode = Column(Boolean, nullable=False, default=False)
117
+ model = Column(String(50), nullable=True) # None = use global default
118
+ max_concurrency = Column(Integer, nullable=False, default=3) # 1-5 concurrent agents
119
+
120
+ # Crash recovery tracking
121
+ crash_count = Column(Integer, nullable=False, default=0) # Resets at window start
122
+
123
+ # Metadata
124
+ created_at = Column(DateTime, nullable=False, default=_utc_now)
125
+
126
+ # Relationships
127
+ overrides = relationship(
128
+ "ScheduleOverride", back_populates="schedule", cascade="all, delete-orphan"
129
+ )
130
+
131
+ def to_dict(self) -> dict:
132
+ """Convert schedule to dictionary for JSON serialization."""
133
+ return {
134
+ "id": self.id,
135
+ "project_name": self.project_name,
136
+ "start_time": self.start_time,
137
+ "duration_minutes": self.duration_minutes,
138
+ "days_of_week": self.days_of_week,
139
+ "enabled": self.enabled,
140
+ "yolo_mode": self.yolo_mode,
141
+ "model": self.model,
142
+ "max_concurrency": self.max_concurrency,
143
+ "crash_count": self.crash_count,
144
+ "created_at": self.created_at.isoformat() if self.created_at else None,
145
+ }
146
+
147
+ def is_active_on_day(self, weekday: int) -> bool:
148
+ """Check if schedule is active on given weekday (0=Monday, 6=Sunday)."""
149
+ day_bit = 1 << weekday
150
+ return bool(self.days_of_week & day_bit)
151
+
152
+
153
+ class ScheduleOverride(Base):
154
+ """Persisted manual override for a schedule window."""
155
+
156
+ __tablename__ = "schedule_overrides"
157
+
158
+ id = Column(Integer, primary_key=True, index=True)
159
+ schedule_id = Column(
160
+ Integer, ForeignKey("schedules.id", ondelete="CASCADE"), nullable=False
161
+ )
162
+
163
+ # Override details
164
+ override_type = Column(String(10), nullable=False) # "start" or "stop"
165
+ expires_at = Column(DateTime, nullable=False) # When this window ends (UTC)
166
+
167
+ # Metadata
168
+ created_at = Column(DateTime, nullable=False, default=_utc_now)
169
+
170
+ # Relationships
171
+ schedule = relationship("Schedule", back_populates="overrides")
172
+
173
+ def to_dict(self) -> dict:
174
+ """Convert override to dictionary for JSON serialization."""
175
+ return {
176
+ "id": self.id,
177
+ "schedule_id": self.schedule_id,
178
+ "override_type": self.override_type,
179
+ "expires_at": self.expires_at.isoformat() if self.expires_at else None,
180
+ "created_at": self.created_at.isoformat() if self.created_at else None,
181
+ }
182
+
183
+
184
+ def get_database_path(project_dir: Path) -> Path:
185
+ """Return the path to the SQLite database for a project."""
186
+ from autoforge_paths import get_features_db_path
187
+ return get_features_db_path(project_dir)
188
+
189
+
190
+ def get_database_url(project_dir: Path) -> str:
191
+ """Return the SQLAlchemy database URL for a project.
192
+
193
+ Uses POSIX-style paths (forward slashes) for cross-platform compatibility.
194
+ """
195
+ db_path = get_database_path(project_dir)
196
+ return f"sqlite:///{db_path.as_posix()}"
197
+
198
+
199
+ def _migrate_add_in_progress_column(engine) -> None:
200
+ """Add in_progress column to existing databases that don't have it."""
201
+ with engine.connect() as conn:
202
+ # Check if column exists
203
+ result = conn.execute(text("PRAGMA table_info(features)"))
204
+ columns = [row[1] for row in result.fetchall()]
205
+
206
+ if "in_progress" not in columns:
207
+ # Add the column with default value
208
+ conn.execute(text("ALTER TABLE features ADD COLUMN in_progress BOOLEAN DEFAULT 0"))
209
+ conn.commit()
210
+
211
+
212
+ def _migrate_fix_null_boolean_fields(engine) -> None:
213
+ """Fix NULL values in passes and in_progress columns."""
214
+ with engine.connect() as conn:
215
+ # Fix NULL passes values
216
+ conn.execute(text("UPDATE features SET passes = 0 WHERE passes IS NULL"))
217
+ # Fix NULL in_progress values
218
+ conn.execute(text("UPDATE features SET in_progress = 0 WHERE in_progress IS NULL"))
219
+ conn.commit()
220
+
221
+
222
+ def _migrate_add_dependencies_column(engine) -> None:
223
+ """Add dependencies column to existing databases that don't have it.
224
+
225
+ Uses NULL default for backwards compatibility - existing features
226
+ without dependencies will have NULL which is treated as empty list.
227
+ """
228
+ with engine.connect() as conn:
229
+ # Check if column exists
230
+ result = conn.execute(text("PRAGMA table_info(features)"))
231
+ columns = [row[1] for row in result.fetchall()]
232
+
233
+ if "dependencies" not in columns:
234
+ # Use TEXT for SQLite JSON storage, NULL default for backwards compat
235
+ conn.execute(text("ALTER TABLE features ADD COLUMN dependencies TEXT DEFAULT NULL"))
236
+ conn.commit()
237
+
238
+
239
+ def _migrate_add_testing_columns(engine) -> None:
240
+ """Legacy migration - no longer adds testing columns.
241
+
242
+ The testing_in_progress and last_tested_at columns were removed from the
243
+ Feature model as part of simplifying the testing agent architecture.
244
+ Multiple testing agents can now test the same feature concurrently
245
+ without coordination.
246
+
247
+ This function is kept for backwards compatibility but does nothing.
248
+ Existing databases with these columns will continue to work - the columns
249
+ are simply ignored.
250
+ """
251
+ pass
252
+
253
+
254
+ def _is_network_path(path: Path) -> bool:
255
+ """Detect if path is on a network filesystem.
256
+
257
+ WAL mode doesn't work reliably on network filesystems (NFS, SMB, CIFS)
258
+ and can cause database corruption. This function detects common network
259
+ path patterns so we can fall back to DELETE mode.
260
+
261
+ Args:
262
+ path: The path to check
263
+
264
+ Returns:
265
+ True if the path appears to be on a network filesystem
266
+ """
267
+ path_str = str(path.resolve())
268
+
269
+ if sys.platform == "win32":
270
+ # Windows UNC paths: \\server\share or \\?\UNC\server\share
271
+ if path_str.startswith("\\\\"):
272
+ return True
273
+ # Mapped network drives - check if the drive is a network drive
274
+ try:
275
+ import ctypes
276
+ drive = path_str[:2] # e.g., "Z:"
277
+ if len(drive) == 2 and drive[1] == ":":
278
+ # DRIVE_REMOTE = 4
279
+ drive_type = ctypes.windll.kernel32.GetDriveTypeW(drive + "\\")
280
+ if drive_type == 4: # DRIVE_REMOTE
281
+ return True
282
+ except (AttributeError, OSError):
283
+ pass
284
+ else:
285
+ # Unix: Check mount type via /proc/mounts or mount command
286
+ try:
287
+ with open("/proc/mounts", "r") as f:
288
+ mounts = f.read()
289
+ # Check each mount point to find which one contains our path
290
+ for line in mounts.splitlines():
291
+ parts = line.split()
292
+ if len(parts) >= 3:
293
+ mount_point = parts[1]
294
+ fs_type = parts[2]
295
+ # Check if path is under this mount point and if it's a network FS
296
+ if path_str.startswith(mount_point):
297
+ if fs_type in ("nfs", "nfs4", "cifs", "smbfs", "fuse.sshfs"):
298
+ return True
299
+ except (FileNotFoundError, PermissionError):
300
+ pass
301
+
302
+ return False
303
+
304
+
305
+ def _migrate_add_schedules_tables(engine) -> None:
306
+ """Create schedules and schedule_overrides tables if they don't exist."""
307
+ from sqlalchemy import inspect
308
+
309
+ inspector = inspect(engine)
310
+ existing_tables = inspector.get_table_names()
311
+
312
+ # Create schedules table if missing
313
+ if "schedules" not in existing_tables:
314
+ Schedule.__table__.create(bind=engine) # type: ignore[attr-defined]
315
+
316
+ # Create schedule_overrides table if missing
317
+ if "schedule_overrides" not in existing_tables:
318
+ ScheduleOverride.__table__.create(bind=engine) # type: ignore[attr-defined]
319
+
320
+ # Add crash_count column if missing (for upgrades)
321
+ if "schedules" in existing_tables:
322
+ columns = [c["name"] for c in inspector.get_columns("schedules")]
323
+ if "crash_count" not in columns:
324
+ with engine.connect() as conn:
325
+ conn.execute(
326
+ text("ALTER TABLE schedules ADD COLUMN crash_count INTEGER DEFAULT 0")
327
+ )
328
+ conn.commit()
329
+
330
+ # Add max_concurrency column if missing (for upgrades)
331
+ if "max_concurrency" not in columns:
332
+ with engine.connect() as conn:
333
+ conn.execute(
334
+ text("ALTER TABLE schedules ADD COLUMN max_concurrency INTEGER DEFAULT 3")
335
+ )
336
+ conn.commit()
337
+
338
+
339
+ def _configure_sqlite_immediate_transactions(engine) -> None:
340
+ """Configure engine for IMMEDIATE transactions via event hooks.
341
+
342
+ Per SQLAlchemy docs: https://docs.sqlalchemy.org/en/20/dialects/sqlite.html
343
+
344
+ This replaces fragile pysqlite implicit transaction handling with explicit
345
+ BEGIN IMMEDIATE at transaction start. Benefits:
346
+ - Acquires write lock immediately, preventing stale reads
347
+ - Works correctly regardless of prior ORM operations
348
+ - Future-proof: won't break when pysqlite legacy mode is removed in Python 3.16
349
+ """
350
+ @event.listens_for(engine, "connect")
351
+ def do_connect(dbapi_connection, connection_record):
352
+ # Disable pysqlite's implicit transaction handling
353
+ dbapi_connection.isolation_level = None
354
+
355
+ # Set busy_timeout on raw connection before any transactions
356
+ cursor = dbapi_connection.cursor()
357
+ try:
358
+ cursor.execute("PRAGMA busy_timeout=30000")
359
+ finally:
360
+ cursor.close()
361
+
362
+ @event.listens_for(engine, "begin")
363
+ def do_begin(conn):
364
+ # Use IMMEDIATE for all transactions to prevent stale reads
365
+ conn.exec_driver_sql("BEGIN IMMEDIATE")
366
+
367
+
368
+ def create_database(project_dir: Path) -> tuple:
369
+ """
370
+ Create database and return engine + session maker.
371
+
372
+ Uses a cache to avoid creating new engines for each request, which improves
373
+ performance by reusing database connections.
374
+
375
+ Args:
376
+ project_dir: Directory containing the project
377
+
378
+ Returns:
379
+ Tuple of (engine, SessionLocal)
380
+ """
381
+ cache_key = project_dir.as_posix()
382
+
383
+ if cache_key in _engine_cache:
384
+ return _engine_cache[cache_key]
385
+
386
+ db_url = get_database_url(project_dir)
387
+
388
+ # Ensure parent directory exists (for .autoforge/ layout)
389
+ db_path = get_database_path(project_dir)
390
+ db_path.parent.mkdir(parents=True, exist_ok=True)
391
+
392
+ # Choose journal mode based on filesystem type
393
+ # WAL mode doesn't work reliably on network filesystems and can cause corruption
394
+ is_network = _is_network_path(project_dir)
395
+ journal_mode = "DELETE" if is_network else "WAL"
396
+
397
+ engine = create_engine(db_url, connect_args={
398
+ "check_same_thread": False,
399
+ "timeout": 30 # Wait up to 30s for locks
400
+ })
401
+
402
+ # Set journal mode BEFORE configuring event hooks
403
+ # PRAGMA journal_mode must run outside of a transaction, and our event hooks
404
+ # start a transaction with BEGIN IMMEDIATE on every operation
405
+ with engine.connect() as conn:
406
+ # Get raw DBAPI connection to execute PRAGMA outside transaction
407
+ raw_conn = conn.connection.dbapi_connection
408
+ if raw_conn is None:
409
+ raise RuntimeError("Failed to get raw DBAPI connection")
410
+ cursor = raw_conn.cursor()
411
+ try:
412
+ cursor.execute(f"PRAGMA journal_mode={journal_mode}")
413
+ cursor.execute("PRAGMA busy_timeout=30000")
414
+ finally:
415
+ cursor.close()
416
+
417
+ # Configure IMMEDIATE transactions via event hooks AFTER setting PRAGMAs
418
+ # This must happen before create_all() and migrations run
419
+ _configure_sqlite_immediate_transactions(engine)
420
+
421
+ Base.metadata.create_all(bind=engine)
422
+
423
+ # Migrate existing databases
424
+ _migrate_add_in_progress_column(engine)
425
+ _migrate_fix_null_boolean_fields(engine)
426
+ _migrate_add_dependencies_column(engine)
427
+ _migrate_add_testing_columns(engine)
428
+
429
+ # Migrate to add schedules tables
430
+ _migrate_add_schedules_tables(engine)
431
+
432
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
433
+
434
+ # Cache the engine and session maker
435
+ _engine_cache[cache_key] = (engine, SessionLocal)
436
+
437
+ return engine, SessionLocal
438
+
439
+
440
+ def dispose_engine(project_dir: Path) -> bool:
441
+ """Dispose of and remove the cached engine for a project.
442
+
443
+ This closes all database connections, releasing file locks on Windows.
444
+ Should be called before deleting the database file.
445
+
446
+ Returns:
447
+ True if an engine was disposed, False if no engine was cached.
448
+ """
449
+ cache_key = project_dir.as_posix()
450
+
451
+ if cache_key in _engine_cache:
452
+ engine, _ = _engine_cache.pop(cache_key)
453
+ engine.dispose()
454
+ return True
455
+
456
+ return False
457
+
458
+
459
+ # Global session maker - will be set when server starts
460
+ _session_maker: Optional[sessionmaker] = None
461
+
462
+ # Engine cache to avoid creating new engines for each request
463
+ # Key: project directory path (as posix string), Value: (engine, SessionLocal)
464
+ _engine_cache: dict[str, tuple] = {}
465
+
466
+
467
+ def set_session_maker(session_maker: sessionmaker) -> None:
468
+ """Set the global session maker."""
469
+ global _session_maker
470
+ _session_maker = session_maker
471
+
472
+
473
+ def get_db() -> Generator[Session, None, None]:
474
+ """
475
+ Dependency for FastAPI to get database session.
476
+
477
+ Yields a database session and ensures it's closed after use.
478
+ """
479
+ if _session_maker is None:
480
+ raise RuntimeError("Database not initialized. Call set_session_maker first.")
481
+
482
+ db = _session_maker()
483
+ try:
484
+ yield db
485
+ except Exception:
486
+ db.rollback()
487
+ raise
488
+ finally:
489
+ db.close()
490
+
491
+
492
+ # =============================================================================
493
+ # Atomic Transaction Helpers for Parallel Mode
494
+ # =============================================================================
495
+ # These helpers prevent database corruption when multiple processes access the
496
+ # same SQLite database concurrently. They use IMMEDIATE transactions which
497
+ # acquire write locks at the start (preventing stale reads) and atomic
498
+ # UPDATE ... WHERE clauses (preventing check-then-modify races).
499
+
500
+
501
+ from contextlib import contextmanager
502
+
503
+
504
+ @contextmanager
505
+ def atomic_transaction(session_maker):
506
+ """Context manager for atomic SQLite transactions.
507
+
508
+ Acquires a write lock immediately via BEGIN IMMEDIATE (configured by
509
+ engine event hooks), preventing stale reads in read-modify-write patterns.
510
+ This is essential for preventing race conditions in parallel mode.
511
+
512
+ Args:
513
+ session_maker: SQLAlchemy sessionmaker
514
+
515
+ Yields:
516
+ SQLAlchemy session with automatic commit/rollback
517
+
518
+ Example:
519
+ with atomic_transaction(session_maker) as session:
520
+ # All reads in this block are protected by write lock
521
+ feature = session.query(Feature).filter(...).first()
522
+ feature.priority = new_priority
523
+ # Commit happens automatically on exit
524
+ """
525
+ session = session_maker()
526
+ try:
527
+ yield session
528
+ session.commit()
529
+ except Exception:
530
+ try:
531
+ session.rollback()
532
+ except Exception:
533
+ pass # Don't let rollback failure mask original error
534
+ raise
535
+ finally:
536
+ session.close()