edda-framework 0.11.0__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -88,6 +88,7 @@ class WorkflowInstance(Base):
88
88
  workflow_name: Mapped[str] = mapped_column(String(255))
89
89
  source_hash: Mapped[str] = mapped_column(String(64))
90
90
  owner_service: Mapped[str] = mapped_column(String(255))
91
+ framework: Mapped[str] = mapped_column(String(50), server_default=text("'python'"))
91
92
  status: Mapped[str] = mapped_column(String(50), server_default=text("'running'"))
92
93
  current_activity_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
93
94
  continued_from: Mapped[str | None] = mapped_column(
@@ -121,6 +122,7 @@ class WorkflowInstance(Base):
121
122
  Index("idx_instances_status", "status"),
122
123
  Index("idx_instances_workflow", "workflow_name"),
123
124
  Index("idx_instances_owner", "owner_service"),
125
+ Index("idx_instances_framework", "framework"),
124
126
  Index("idx_instances_locked", "locked_by", "locked_at"),
125
127
  Index("idx_instances_lock_expires", "lock_expires_at"),
126
128
  Index("idx_instances_updated", "updated_at"),
@@ -516,6 +518,7 @@ class SQLAlchemyStorage:
516
518
  self,
517
519
  engine: AsyncEngine,
518
520
  notify_listener: Any | None = None,
521
+ migrations_dir: str | None = None,
519
522
  ):
520
523
  """
521
524
  Initialize SQLAlchemy storage.
@@ -525,9 +528,12 @@ class SQLAlchemyStorage:
525
528
  notify_listener: Optional notify listener for PostgreSQL LISTEN/NOTIFY.
526
529
  If provided and PostgreSQL is used, NOTIFY messages
527
530
  will be sent after key operations.
531
+ migrations_dir: Optional path to migrations directory. If None,
532
+ auto-detects from package or schema/ submodule.
528
533
  """
529
534
  self.engine = engine
530
535
  self._notify_listener = notify_listener
536
+ self._migrations_dir = migrations_dir
531
537
 
532
538
  @property
533
539
  def _is_postgresql(self) -> bool:
@@ -551,20 +557,21 @@ class SQLAlchemyStorage:
551
557
  self._notify_listener = listener
552
558
 
553
559
  async def initialize(self) -> None:
554
- """Initialize database connection and create tables.
560
+ """Initialize database connection and apply migrations.
555
561
 
556
- This method creates all tables if they don't exist, and then performs
557
- automatic schema migration to add any missing columns and update CHECK
558
- constraints. This ensures compatibility when upgrading Edda versions.
562
+ This method automatically applies dbmate migration files to create
563
+ tables and update schema. It tracks applied migrations in the
564
+ schema_migrations table (compatible with dbmate CLI).
559
565
  """
560
- # Create all tables and indexes
561
- async with self.engine.begin() as conn:
562
- await conn.run_sync(Base.metadata.create_all)
566
+ from pathlib import Path
567
+
568
+ from .migrations import apply_dbmate_migrations
563
569
 
564
- # Auto-migrate schema (add missing columns)
565
- await self._auto_migrate_schema()
570
+ # Apply dbmate migrations
571
+ migrations_dir = Path(self._migrations_dir) if self._migrations_dir else None
572
+ await apply_dbmate_migrations(self.engine, migrations_dir)
566
573
 
567
- # Auto-migrate CHECK constraints
574
+ # Auto-migrate CHECK constraints (for existing tables)
568
575
  await self._auto_migrate_check_constraints()
569
576
 
570
577
  # Initialize schema version
@@ -1227,6 +1234,7 @@ class SQLAlchemyStorage:
1227
1234
  workflow_name=workflow_name,
1228
1235
  source_hash=source_hash,
1229
1236
  owner_service=owner_service,
1237
+ framework="python",
1230
1238
  input_data=json.dumps(input_data),
1231
1239
  lock_timeout_seconds=lock_timeout_seconds,
1232
1240
  continued_from=continued_from,
@@ -1332,6 +1340,7 @@ class SQLAlchemyStorage:
1332
1340
  WorkflowInstance.source_hash == WorkflowDefinition.source_hash,
1333
1341
  ),
1334
1342
  )
1343
+ .where(WorkflowInstance.framework == "python")
1335
1344
  .order_by(
1336
1345
  WorkflowInstance.started_at.desc(),
1337
1346
  WorkflowInstance.instance_id.desc(),
@@ -1700,6 +1709,7 @@ class SQLAlchemyStorage:
1700
1709
  WorkflowInstance.lock_expires_at.isnot(None),
1701
1710
  self._make_datetime_comparable(WorkflowInstance.lock_expires_at)
1702
1711
  < self._get_current_time_expr(),
1712
+ WorkflowInstance.framework == "python",
1703
1713
  )
1704
1714
  )
1705
1715
  )
@@ -1752,10 +1762,9 @@ class SQLAlchemyStorage:
1752
1762
  """
1753
1763
  Try to acquire a system-level lock for coordinating background tasks.
1754
1764
 
1755
- Uses INSERT ON CONFLICT pattern to handle race conditions:
1756
- 1. Try to INSERT new lock record
1757
- 2. If exists, check if expired or unlocked
1758
- 3. If available, acquire lock; otherwise return False
1765
+ Uses atomic UPDATE pattern to avoid race conditions:
1766
+ 1. Ensure row exists (INSERT OR IGNORE / ON CONFLICT DO NOTHING)
1767
+ 2. Atomic UPDATE with WHERE condition (rowcount determines success)
1759
1768
 
1760
1769
  Note: ALWAYS uses separate session (not external session).
1761
1770
  """
@@ -1764,51 +1773,78 @@ class SQLAlchemyStorage:
1764
1773
  current_time = datetime.now(UTC)
1765
1774
  lock_expires_at = current_time + timedelta(seconds=timeout_seconds)
1766
1775
 
1767
- # Try to get existing lock
1768
- result = await session.execute(
1769
- select(SystemLock).where(SystemLock.lock_name == lock_name)
1770
- )
1771
- lock = result.scalar_one_or_none()
1776
+ # Get dialect name
1777
+ dialect_name = self.engine.dialect.name
1778
+
1779
+ # 1. Ensure row exists (idempotent INSERT)
1780
+ if dialect_name == "sqlite":
1781
+ from sqlalchemy.dialects.sqlite import insert as sqlite_insert
1782
+
1783
+ stmt: Any = (
1784
+ sqlite_insert(SystemLock)
1785
+ .values(
1786
+ lock_name=lock_name,
1787
+ locked_by=None,
1788
+ locked_at=None,
1789
+ lock_expires_at=None,
1790
+ )
1791
+ .on_conflict_do_nothing(index_elements=["lock_name"])
1792
+ )
1793
+ elif dialect_name == "postgresql":
1794
+ from sqlalchemy.dialects.postgresql import insert as pg_insert
1772
1795
 
1773
- if lock is None:
1774
- # No lock exists - create new one
1775
- lock = SystemLock(
1776
- lock_name=lock_name,
1796
+ stmt = (
1797
+ pg_insert(SystemLock)
1798
+ .values(
1799
+ lock_name=lock_name,
1800
+ locked_by=None,
1801
+ locked_at=None,
1802
+ lock_expires_at=None,
1803
+ )
1804
+ .on_conflict_do_nothing(index_elements=["lock_name"])
1805
+ )
1806
+ else: # mysql
1807
+ from sqlalchemy.dialects.mysql import insert as mysql_insert
1808
+
1809
+ stmt = (
1810
+ mysql_insert(SystemLock)
1811
+ .values(
1812
+ lock_name=lock_name,
1813
+ locked_by=None,
1814
+ locked_at=None,
1815
+ lock_expires_at=None,
1816
+ )
1817
+ .on_duplicate_key_update(lock_name=lock_name)
1818
+ ) # No-op update
1819
+
1820
+ await session.execute(stmt)
1821
+ await session.commit()
1822
+
1823
+ # 2. Atomic UPDATE to acquire lock (rowcount == 1 means success)
1824
+ # Use SQL-side datetime comparison for cross-DB compatibility
1825
+ current_time_expr = self._get_current_time_expr()
1826
+ result = await session.execute(
1827
+ update(SystemLock)
1828
+ .where(
1829
+ and_(
1830
+ SystemLock.lock_name == lock_name,
1831
+ or_(
1832
+ SystemLock.locked_by == None, # noqa: E711
1833
+ SystemLock.locked_by == worker_id, # Allow renewal by same worker
1834
+ self._make_datetime_comparable(SystemLock.lock_expires_at)
1835
+ <= current_time_expr,
1836
+ ),
1837
+ )
1838
+ )
1839
+ .values(
1777
1840
  locked_by=worker_id,
1778
1841
  locked_at=current_time,
1779
1842
  lock_expires_at=lock_expires_at,
1780
1843
  )
1781
- session.add(lock)
1782
- await session.commit()
1783
- return True
1784
-
1785
- # Lock exists - check if available
1786
- if lock.locked_by is None:
1787
- # Unlocked - acquire
1788
- lock.locked_by = worker_id
1789
- lock.locked_at = current_time
1790
- lock.lock_expires_at = lock_expires_at
1791
- await session.commit()
1792
- return True
1793
-
1794
- # Check if expired (use SQL-side comparison for cross-DB compatibility)
1795
- if lock.lock_expires_at is not None:
1796
- # Handle timezone-naive datetime from SQLite
1797
- lock_expires = (
1798
- lock.lock_expires_at.replace(tzinfo=UTC)
1799
- if lock.lock_expires_at.tzinfo is None
1800
- else lock.lock_expires_at
1801
- )
1802
- if lock_expires <= current_time:
1803
- # Expired - acquire
1804
- lock.locked_by = worker_id
1805
- lock.locked_at = current_time
1806
- lock.lock_expires_at = lock_expires_at
1807
- await session.commit()
1808
- return True
1844
+ )
1845
+ await session.commit()
1809
1846
 
1810
- # Already locked by another worker
1811
- return False
1847
+ return bool(result.rowcount == 1) # type: ignore[attr-defined]
1812
1848
 
1813
1849
  async def release_system_lock(self, lock_name: str, worker_id: str) -> None:
1814
1850
  """
@@ -2033,7 +2069,7 @@ class SQLAlchemyStorage:
2033
2069
  result = await session.execute(
2034
2070
  select(WorkflowCompensation)
2035
2071
  .where(WorkflowCompensation.instance_id == instance_id)
2036
- .order_by(WorkflowCompensation.created_at.desc())
2072
+ .order_by(WorkflowCompensation.created_at.desc(), WorkflowCompensation.id.desc())
2037
2073
  )
2038
2074
  rows = result.scalars().all()
2039
2075
 
@@ -2178,6 +2214,7 @@ class SQLAlchemyStorage:
2178
2214
  self._make_datetime_comparable(WorkflowTimerSubscription.expires_at)
2179
2215
  <= self._get_current_time_expr(),
2180
2216
  WorkflowInstance.status == "waiting_for_timer",
2217
+ WorkflowInstance.framework == "python",
2181
2218
  )
2182
2219
  )
2183
2220
  )
@@ -2252,7 +2289,7 @@ class SQLAlchemyStorage:
2252
2289
 
2253
2290
  # Send NOTIFY for new outbox event
2254
2291
  await self._send_notify(
2255
- "edda_outbox_pending",
2292
+ "workflow_outbox_pending",
2256
2293
  {"evt_id": event_id, "evt_type": event_type},
2257
2294
  )
2258
2295
 
@@ -2764,6 +2801,7 @@ class SQLAlchemyStorage:
2764
2801
  ChannelSubscription.activity_id.isnot(None), # Only waiting subscriptions
2765
2802
  self._make_datetime_comparable(ChannelSubscription.timeout_at)
2766
2803
  <= self._get_current_time_expr(),
2804
+ WorkflowInstance.framework == "python",
2767
2805
  )
2768
2806
  )
2769
2807
  )
@@ -2902,6 +2940,7 @@ class SQLAlchemyStorage:
2902
2940
  and_(
2903
2941
  WorkflowInstance.status == "running",
2904
2942
  WorkflowInstance.locked_by.is_(None),
2943
+ WorkflowInstance.framework == "python",
2905
2944
  )
2906
2945
  )
2907
2946
  if limit is not None:
@@ -3002,12 +3041,9 @@ class SQLAlchemyStorage:
3002
3041
  session.add(msg)
3003
3042
  await self._commit_if_not_in_transaction(session)
3004
3043
 
3005
- # Send NOTIFY for message published (channel-specific)
3006
- import hashlib
3007
-
3008
- channel_hash = hashlib.sha256(channel.encode()).hexdigest()[:16]
3044
+ # Send NOTIFY for message published (unified channel name)
3009
3045
  await self._send_notify(
3010
- f"edda_msg_{channel_hash}",
3046
+ "workflow_channel_message",
3011
3047
  {"ch": channel, "msg_id": message_id},
3012
3048
  )
3013
3049
 
@@ -3571,7 +3607,7 @@ class SQLAlchemyStorage:
3571
3607
 
3572
3608
  # Send NOTIFY for workflow resumable
3573
3609
  await self._send_notify(
3574
- "edda_workflow_resumable",
3610
+ "workflow_resumable",
3575
3611
  {"wf_id": instance_id, "wf_name": workflow_name},
3576
3612
  )
3577
3613
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: edda-framework
3
- Version: 0.11.0
3
+ Version: 0.12.0
4
4
  Summary: Lightweight Durable Execution Framework
5
5
  Project-URL: Homepage, https://github.com/i2y/edda
6
6
  Project-URL: Documentation, https://github.com/i2y/edda#readme
@@ -286,6 +286,46 @@ pip install "git+https://github.com/i2y/edda.git[postgresql,viewer]"
286
286
 
287
287
  > **Tip**: For PostgreSQL, install the `postgres-notify` extra for near-instant event delivery using LISTEN/NOTIFY instead of polling.
288
288
 
289
+ ### Database Schema Migration
290
+
291
+ **Automatic Migration (Default)**
292
+
293
+ Edda automatically applies database migrations at startup. No manual commands needed:
294
+
295
+ ```python
296
+ from edda import EddaApp
297
+
298
+ # Migrations are applied automatically
299
+ app = EddaApp(db_url="postgresql://user:pass@localhost/dbname")
300
+ ```
301
+
302
+ This is safe in multi-worker environments - Edda handles concurrent startup gracefully.
303
+
304
+ **Manual Migration with dbmate (Optional)**
305
+
306
+ For explicit schema control, you can disable auto-migration and use [dbmate](https://github.com/amacneil/dbmate):
307
+
308
+ ```python
309
+ # Disable auto-migration
310
+ app = EddaApp(
311
+ db_url="postgresql://...",
312
+ auto_migrate=False # Use dbmate-managed schema
313
+ )
314
+ ```
315
+
316
+ ```bash
317
+ # Install dbmate
318
+ brew install dbmate # macOS
319
+
320
+ # Add schema submodule
321
+ git submodule add https://github.com/durax-io/schema.git schema
322
+
323
+ # Run migration manually
324
+ DATABASE_URL="postgresql://user:pass@localhost/dbname" dbmate -d ./schema/db/migrations/postgresql up
325
+ ```
326
+
327
+ > **Note**: Edda's auto-migration uses the same SQL files as dbmate, maintaining full compatibility.
328
+
289
329
  ### Development Installation
290
330
 
291
331
  If you want to contribute to Edda or modify the framework itself:
@@ -293,7 +333,7 @@ If you want to contribute to Edda or modify the framework itself:
293
333
  ```bash
294
334
  # Clone repository
295
335
  git clone https://github.com/i2y/edda.git
296
- cd kairo
336
+ cd edda
297
337
  uv sync --all-extras
298
338
  ```
299
339
 
@@ -333,7 +373,7 @@ async def user_signup(ctx: WorkflowContext, email: str):
333
373
  return {"status": "completed"}
334
374
  ```
335
375
 
336
- **Activity IDs**: Activities are automatically identified with IDs like `"send_email:1"` for deterministic replay. Manual IDs are only needed for concurrent execution (e.g., `asyncio.gather`). See [MIGRATION_GUIDE_ACTIVITY_ID.md](MIGRATION_GUIDE_ACTIVITY_ID.md) for details.
376
+ **Activity IDs**: Activities are automatically identified with IDs like `"send_email:1"` for deterministic replay. Manual IDs are only needed for concurrent execution (e.g., `asyncio.gather`).
337
377
 
338
378
  ### Durable Execution
339
379
 
@@ -1,9 +1,9 @@
1
1
  edda/__init__.py,sha256=hGC6WR2R36M8LWC97F-0Rw4Ln0QUUT_1xC-7acOy_Fk,2237
2
2
  edda/activity.py,sha256=nRm9eBrr0lFe4ZRQ2whyZ6mo5xd171ITIVhqytUhOpw,21025
3
- edda/app.py,sha256=hoxDKp6q5qHl_dNLMkLxKhuibUC6D8FtuiWsIZkzwhA,61546
4
- edda/channels.py,sha256=Budi0FyxalmcAMwj50mX3WzRce5OuLKXGws0Hp_snfw,34745
3
+ edda/app.py,sha256=ITTc7x5S4ykCP3KPZXKxuNczXkPtbn04ZQaxcem46Hw,68406
4
+ edda/channels.py,sha256=CosFoB9HVHBKRmhU_t6qoCV3l6egAGt3sqpakfgZLKc,36596
5
5
  edda/compensation.py,sha256=iKLlnTxiF1YSatmYQW84EkPB1yMKUEZBtgjuGnghLtY,11824
6
- edda/context.py,sha256=pPn98-G5HgaOGDRzEhma58TzBulwsiTvmNEMLIu0XwI,21330
6
+ edda/context.py,sha256=Qqm_nUC5NNnOfHAb7taqKqZVIc0GoRWUrjZ4L9_-q70,22128
7
7
  edda/exceptions.py,sha256=-ntBLGpVQgPFG5N1o8m_7weejAYkNrUdxTkOP38vsHk,1766
8
8
  edda/hooks.py,sha256=HUZ6FTM__DZjwuomDfTDEroQ3mugEPuJHcGm7CTQNvg,8193
9
9
  edda/locking.py,sha256=NAFJmw-JaSVsXn4Y4czJyv_s9bWG8cdrzDBWIEag5X8,13661
@@ -24,17 +24,18 @@ edda/integrations/mirascope/types.py,sha256=vgEAu8EFTLSd92XSAxtZpMoe5gv93fe4Rm0D
24
24
  edda/integrations/opentelemetry/__init__.py,sha256=x1_PyyygGDW-rxQTwoIrGzyjKErXHOOKdquFAMlCOAo,906
25
25
  edda/integrations/opentelemetry/hooks.py,sha256=rCb6K_gJJMxjQ-UoJnbIOWsafapipzu7w-YPROZKxDA,21330
26
26
  edda/outbox/__init__.py,sha256=azXG1rtheJEjOyoWmMsBeR2jp8Bz02R3wDEd5tQnaWA,424
27
- edda/outbox/relayer.py,sha256=_yOZVpjj862lzMtEK47RMdVdbxXmL8v-FXppZWdy4Ag,10444
27
+ edda/outbox/relayer.py,sha256=b14BaFnBODg4bz0ft8UZxqORlTQON5SHu4wrfYs3Lx4,11467
28
28
  edda/outbox/transactional.py,sha256=LFfUjunqRlGibaINi-efGXFFivWGO7v3mhqrqyGW6Nw,3808
29
29
  edda/serialization/__init__.py,sha256=hnOVJN-mJNIsSa_XH9jwhIydOsWvIfCaFaSd37HUplg,216
30
30
  edda/serialization/base.py,sha256=xJy2CY9gdJDCF0tmCor8NomL2Lr_w7cveVvxccuc-tA,1998
31
31
  edda/serialization/json.py,sha256=Dq96V4n1yozexjCPd_CL6Iuvh1u3jJhef6sTcNxXZeA,2842
32
32
  edda/storage/__init__.py,sha256=NjvAzYV3SknACrC16ZQOA-xCKOj1-s3rBIWOS1ZGCaM,407
33
- edda/storage/models.py,sha256=vUwjiAOvp9uFNQgLK57kEGo7uzXplDZikOfnlOyed2M,12146
33
+ edda/storage/migrations.py,sha256=KrceouVODct9WWDBhmjAW0IYptDWd2mqJmhrHnee59M,13704
34
+ edda/storage/models.py,sha256=axXGJ-Orwcd_AsEUwIyFfDyg3NQxMcOQ2mrTzXkNv3g,12284
34
35
  edda/storage/notify_base.py,sha256=gUb-ypG1Bo0c-KrleYmC7eKtdwQNUeqGS5k7UILlSsQ,5055
35
- edda/storage/pg_notify.py,sha256=to5rDIQbiqqkNNVMODye_KvY4EDqRSUQblTeoeDZv8w,11850
36
+ edda/storage/pg_notify.py,sha256=myzJ9xX86uiro9aaiA1SW1sN3E-zYafn7_lpeAy1jOg,11830
36
37
  edda/storage/protocol.py,sha256=vdB5GvBen8lgUA0qEfBXfQTLbVfGKeBTQuEwSUqLZtI,39463
37
- edda/storage/sqlalchemy_storage.py,sha256=IAc8SYHM2xoJEIVDG05_mkguWQGB3wIAALsc0QI8EcE,144484
38
+ edda/storage/sqlalchemy_storage.py,sha256=HREK7fHmq3DGx6w4jA03_NrQu9HbyMomyIawMuOQLYQ,146246
38
39
  edda/viewer_ui/__init__.py,sha256=N1-T33SXadOXcBsDSgJJ9Iqz4y4verJngWryQu70c5c,517
39
40
  edda/viewer_ui/app.py,sha256=K3c5sMeJz_AE9gh5QftxwvfDthLeJi1i2CDkP9gb4Ig,96695
40
41
  edda/viewer_ui/components.py,sha256=A0IxLwgj_Lu51O57OfzOwME8jzoJtKegEVvSnWc7uPo,45174
@@ -43,8 +44,11 @@ edda/viewer_ui/theme.py,sha256=mrXoXLRzgSnvE2a58LuMcPJkhlvHEDMWVa8Smqtk4l0,8118
43
44
  edda/visualizer/__init__.py,sha256=DOpDstNhR0VcXAs_eMKxaL30p_0u4PKZ4o2ndnYhiRo,343
44
45
  edda/visualizer/ast_analyzer.py,sha256=plmx7C9X_X35xLY80jxOL3ljg3afXxBePRZubqUIkxY,13663
45
46
  edda/visualizer/mermaid_generator.py,sha256=XWa2egoOTNDfJEjPcwoxwQmblUqXf7YInWFjFRI1QGo,12457
46
- edda_framework-0.11.0.dist-info/METADATA,sha256=AkgFtGUJNfhOoHXzti_R7fLli1q45Bg5xO6TfpjvsO8,36587
47
- edda_framework-0.11.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
48
- edda_framework-0.11.0.dist-info/entry_points.txt,sha256=dPH47s6UoJgUZxHoeSMqZsQkLaSE-SGLi-gh88k2WrU,48
49
- edda_framework-0.11.0.dist-info/licenses/LICENSE,sha256=udxb-V7_cYKTHqW7lNm48rxJ-Zpf0WAY_PyGDK9BPCo,1069
50
- edda_framework-0.11.0.dist-info/RECORD,,
47
+ edda/migrations/mysql/20251217000000_initial_schema.sql,sha256=LpINasESRhadOeqABwDk4JZ0OZ4_zQw_opnhIR4Xe9U,12367
48
+ edda/migrations/postgresql/20251217000000_initial_schema.sql,sha256=hCaGMWeptpzpnsjfNKVsMYuwPRe__fK9E0VZpClAumQ,11732
49
+ edda/migrations/sqlite/20251217000000_initial_schema.sql,sha256=Wq9gCnQ0K9SOt0PY_8f1MG4va8rLVWIIcf2lnRzSK5g,11906
50
+ edda_framework-0.12.0.dist-info/METADATA,sha256=5OyvWeuFkn7twSOODq7hUSzile0aMvOSgDhXshEsPF0,37567
51
+ edda_framework-0.12.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
52
+ edda_framework-0.12.0.dist-info/entry_points.txt,sha256=dPH47s6UoJgUZxHoeSMqZsQkLaSE-SGLi-gh88k2WrU,48
53
+ edda_framework-0.12.0.dist-info/licenses/LICENSE,sha256=udxb-V7_cYKTHqW7lNm48rxJ-Zpf0WAY_PyGDK9BPCo,1069
54
+ edda_framework-0.12.0.dist-info/RECORD,,