planar 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- planar/.__init__.py.un~ +0 -0
- planar/._version.py.un~ +0 -0
- planar/.app.py.un~ +0 -0
- planar/.cli.py.un~ +0 -0
- planar/.config.py.un~ +0 -0
- planar/.context.py.un~ +0 -0
- planar/.db.py.un~ +0 -0
- planar/.di.py.un~ +0 -0
- planar/.engine.py.un~ +0 -0
- planar/.files.py.un~ +0 -0
- planar/.log_context.py.un~ +0 -0
- planar/.log_metadata.py.un~ +0 -0
- planar/.logging.py.un~ +0 -0
- planar/.object_registry.py.un~ +0 -0
- planar/.otel.py.un~ +0 -0
- planar/.server.py.un~ +0 -0
- planar/.session.py.un~ +0 -0
- planar/.sqlalchemy.py.un~ +0 -0
- planar/.task_local.py.un~ +0 -0
- planar/.test_app.py.un~ +0 -0
- planar/.test_config.py.un~ +0 -0
- planar/.test_object_config.py.un~ +0 -0
- planar/.test_sqlalchemy.py.un~ +0 -0
- planar/.test_utils.py.un~ +0 -0
- planar/.util.py.un~ +0 -0
- planar/.utils.py.un~ +0 -0
- planar/__init__.py +26 -0
- planar/_version.py +1 -0
- planar/ai/.__init__.py.un~ +0 -0
- planar/ai/._models.py.un~ +0 -0
- planar/ai/.agent.py.un~ +0 -0
- planar/ai/.agent_utils.py.un~ +0 -0
- planar/ai/.events.py.un~ +0 -0
- planar/ai/.files.py.un~ +0 -0
- planar/ai/.models.py.un~ +0 -0
- planar/ai/.providers.py.un~ +0 -0
- planar/ai/.pydantic_ai.py.un~ +0 -0
- planar/ai/.pydantic_ai_agent.py.un~ +0 -0
- planar/ai/.pydantic_ai_provider.py.un~ +0 -0
- planar/ai/.step.py.un~ +0 -0
- planar/ai/.test_agent.py.un~ +0 -0
- planar/ai/.test_agent_serialization.py.un~ +0 -0
- planar/ai/.test_providers.py.un~ +0 -0
- planar/ai/.utils.py.un~ +0 -0
- planar/ai/__init__.py +15 -0
- planar/ai/agent.py +457 -0
- planar/ai/agent_utils.py +205 -0
- planar/ai/models.py +140 -0
- planar/ai/providers.py +1088 -0
- planar/ai/test_agent.py +1298 -0
- planar/ai/test_agent_serialization.py +229 -0
- planar/ai/test_providers.py +463 -0
- planar/ai/utils.py +102 -0
- planar/app.py +494 -0
- planar/cli.py +282 -0
- planar/config.py +544 -0
- planar/db/.db.py.un~ +0 -0
- planar/db/__init__.py +17 -0
- planar/db/alembic/env.py +136 -0
- planar/db/alembic/script.py.mako +28 -0
- planar/db/alembic/versions/3476068c153c_initial_system_tables_migration.py +339 -0
- planar/db/alembic.ini +128 -0
- planar/db/db.py +318 -0
- planar/files/.config.py.un~ +0 -0
- planar/files/.local.py.un~ +0 -0
- planar/files/.local_filesystem.py.un~ +0 -0
- planar/files/.model.py.un~ +0 -0
- planar/files/.models.py.un~ +0 -0
- planar/files/.s3.py.un~ +0 -0
- planar/files/.storage.py.un~ +0 -0
- planar/files/.test_files.py.un~ +0 -0
- planar/files/__init__.py +2 -0
- planar/files/models.py +162 -0
- planar/files/storage/.__init__.py.un~ +0 -0
- planar/files/storage/.base.py.un~ +0 -0
- planar/files/storage/.config.py.un~ +0 -0
- planar/files/storage/.context.py.un~ +0 -0
- planar/files/storage/.local_directory.py.un~ +0 -0
- planar/files/storage/.test_local_directory.py.un~ +0 -0
- planar/files/storage/.test_s3.py.un~ +0 -0
- planar/files/storage/base.py +61 -0
- planar/files/storage/config.py +44 -0
- planar/files/storage/context.py +15 -0
- planar/files/storage/local_directory.py +188 -0
- planar/files/storage/s3.py +220 -0
- planar/files/storage/test_local_directory.py +162 -0
- planar/files/storage/test_s3.py +299 -0
- planar/files/test_files.py +283 -0
- planar/human/.human.py.un~ +0 -0
- planar/human/.test_human.py.un~ +0 -0
- planar/human/__init__.py +2 -0
- planar/human/human.py +458 -0
- planar/human/models.py +80 -0
- planar/human/test_human.py +385 -0
- planar/logging/.__init__.py.un~ +0 -0
- planar/logging/.attributes.py.un~ +0 -0
- planar/logging/.formatter.py.un~ +0 -0
- planar/logging/.logger.py.un~ +0 -0
- planar/logging/.otel.py.un~ +0 -0
- planar/logging/.tracer.py.un~ +0 -0
- planar/logging/__init__.py +10 -0
- planar/logging/attributes.py +54 -0
- planar/logging/context.py +14 -0
- planar/logging/formatter.py +113 -0
- planar/logging/logger.py +114 -0
- planar/logging/otel.py +51 -0
- planar/modeling/.mixin.py.un~ +0 -0
- planar/modeling/.storage.py.un~ +0 -0
- planar/modeling/__init__.py +0 -0
- planar/modeling/field_helpers.py +59 -0
- planar/modeling/json_schema_generator.py +94 -0
- planar/modeling/mixins/__init__.py +10 -0
- planar/modeling/mixins/auditable.py +52 -0
- planar/modeling/mixins/test_auditable.py +97 -0
- planar/modeling/mixins/test_timestamp.py +134 -0
- planar/modeling/mixins/test_uuid_primary_key.py +52 -0
- planar/modeling/mixins/timestamp.py +53 -0
- planar/modeling/mixins/uuid_primary_key.py +19 -0
- planar/modeling/orm/.planar_base_model.py.un~ +0 -0
- planar/modeling/orm/__init__.py +18 -0
- planar/modeling/orm/planar_base_entity.py +29 -0
- planar/modeling/orm/query_filter_builder.py +122 -0
- planar/modeling/orm/reexports.py +15 -0
- planar/object_config/.object_config.py.un~ +0 -0
- planar/object_config/__init__.py +11 -0
- planar/object_config/models.py +114 -0
- planar/object_config/object_config.py +378 -0
- planar/object_registry.py +100 -0
- planar/registry_items.py +65 -0
- planar/routers/.__init__.py.un~ +0 -0
- planar/routers/.agents_router.py.un~ +0 -0
- planar/routers/.crud.py.un~ +0 -0
- planar/routers/.decision.py.un~ +0 -0
- planar/routers/.event.py.un~ +0 -0
- planar/routers/.file_attachment.py.un~ +0 -0
- planar/routers/.files.py.un~ +0 -0
- planar/routers/.files_router.py.un~ +0 -0
- planar/routers/.human.py.un~ +0 -0
- planar/routers/.info.py.un~ +0 -0
- planar/routers/.models.py.un~ +0 -0
- planar/routers/.object_config_router.py.un~ +0 -0
- planar/routers/.rule.py.un~ +0 -0
- planar/routers/.test_object_config_router.py.un~ +0 -0
- planar/routers/.test_workflow_router.py.un~ +0 -0
- planar/routers/.workflow.py.un~ +0 -0
- planar/routers/__init__.py +13 -0
- planar/routers/agents_router.py +197 -0
- planar/routers/entity_router.py +143 -0
- planar/routers/event.py +91 -0
- planar/routers/files.py +142 -0
- planar/routers/human.py +151 -0
- planar/routers/info.py +131 -0
- planar/routers/models.py +170 -0
- planar/routers/object_config_router.py +133 -0
- planar/routers/rule.py +108 -0
- planar/routers/test_agents_router.py +174 -0
- planar/routers/test_object_config_router.py +367 -0
- planar/routers/test_routes_security.py +169 -0
- planar/routers/test_rule_router.py +470 -0
- planar/routers/test_workflow_router.py +274 -0
- planar/routers/workflow.py +468 -0
- planar/rules/.decorator.py.un~ +0 -0
- planar/rules/.runner.py.un~ +0 -0
- planar/rules/.test_rules.py.un~ +0 -0
- planar/rules/__init__.py +23 -0
- planar/rules/decorator.py +184 -0
- planar/rules/models.py +355 -0
- planar/rules/rule_configuration.py +191 -0
- planar/rules/runner.py +64 -0
- planar/rules/test_rules.py +750 -0
- planar/scaffold_templates/app/__init__.py.j2 +0 -0
- planar/scaffold_templates/app/db/entities.py.j2 +11 -0
- planar/scaffold_templates/app/flows/process_invoice.py.j2 +67 -0
- planar/scaffold_templates/main.py.j2 +13 -0
- planar/scaffold_templates/planar.dev.yaml.j2 +34 -0
- planar/scaffold_templates/planar.prod.yaml.j2 +28 -0
- planar/scaffold_templates/pyproject.toml.j2 +10 -0
- planar/security/.jwt_middleware.py.un~ +0 -0
- planar/security/auth_context.py +148 -0
- planar/security/authorization.py +388 -0
- planar/security/default_policies.cedar +77 -0
- planar/security/jwt_middleware.py +116 -0
- planar/security/security_context.py +18 -0
- planar/security/tests/test_authorization_context.py +78 -0
- planar/security/tests/test_cedar_basics.py +41 -0
- planar/security/tests/test_cedar_policies.py +158 -0
- planar/security/tests/test_jwt_principal_context.py +179 -0
- planar/session.py +40 -0
- planar/sse/.constants.py.un~ +0 -0
- planar/sse/.example.html.un~ +0 -0
- planar/sse/.hub.py.un~ +0 -0
- planar/sse/.model.py.un~ +0 -0
- planar/sse/.proxy.py.un~ +0 -0
- planar/sse/constants.py +1 -0
- planar/sse/example.html +126 -0
- planar/sse/hub.py +216 -0
- planar/sse/model.py +8 -0
- planar/sse/proxy.py +257 -0
- planar/task_local.py +37 -0
- planar/test_app.py +51 -0
- planar/test_cli.py +372 -0
- planar/test_config.py +512 -0
- planar/test_object_config.py +527 -0
- planar/test_object_registry.py +14 -0
- planar/test_sqlalchemy.py +158 -0
- planar/test_utils.py +105 -0
- planar/testing/.client.py.un~ +0 -0
- planar/testing/.memory_storage.py.un~ +0 -0
- planar/testing/.planar_test_client.py.un~ +0 -0
- planar/testing/.predictable_tracer.py.un~ +0 -0
- planar/testing/.synchronizable_tracer.py.un~ +0 -0
- planar/testing/.test_memory_storage.py.un~ +0 -0
- planar/testing/.workflow_observer.py.un~ +0 -0
- planar/testing/__init__.py +0 -0
- planar/testing/memory_storage.py +78 -0
- planar/testing/planar_test_client.py +54 -0
- planar/testing/synchronizable_tracer.py +153 -0
- planar/testing/test_memory_storage.py +143 -0
- planar/testing/workflow_observer.py +73 -0
- planar/utils.py +70 -0
- planar/workflows/.__init__.py.un~ +0 -0
- planar/workflows/.builtin_steps.py.un~ +0 -0
- planar/workflows/.concurrency_tracing.py.un~ +0 -0
- planar/workflows/.context.py.un~ +0 -0
- planar/workflows/.contrib.py.un~ +0 -0
- planar/workflows/.decorators.py.un~ +0 -0
- planar/workflows/.durable_test.py.un~ +0 -0
- planar/workflows/.errors.py.un~ +0 -0
- planar/workflows/.events.py.un~ +0 -0
- planar/workflows/.exceptions.py.un~ +0 -0
- planar/workflows/.execution.py.un~ +0 -0
- planar/workflows/.human.py.un~ +0 -0
- planar/workflows/.lock.py.un~ +0 -0
- planar/workflows/.misc.py.un~ +0 -0
- planar/workflows/.model.py.un~ +0 -0
- planar/workflows/.models.py.un~ +0 -0
- planar/workflows/.notifications.py.un~ +0 -0
- planar/workflows/.orchestrator.py.un~ +0 -0
- planar/workflows/.runtime.py.un~ +0 -0
- planar/workflows/.serialization.py.un~ +0 -0
- planar/workflows/.step.py.un~ +0 -0
- planar/workflows/.step_core.py.un~ +0 -0
- planar/workflows/.sub_workflow_runner.py.un~ +0 -0
- planar/workflows/.sub_workflow_scheduler.py.un~ +0 -0
- planar/workflows/.test_concurrency.py.un~ +0 -0
- planar/workflows/.test_concurrency_detection.py.un~ +0 -0
- planar/workflows/.test_human.py.un~ +0 -0
- planar/workflows/.test_lock_timeout.py.un~ +0 -0
- planar/workflows/.test_orchestrator.py.un~ +0 -0
- planar/workflows/.test_race_conditions.py.un~ +0 -0
- planar/workflows/.test_serialization.py.un~ +0 -0
- planar/workflows/.test_suspend_deserialization.py.un~ +0 -0
- planar/workflows/.test_workflow.py.un~ +0 -0
- planar/workflows/.tracing.py.un~ +0 -0
- planar/workflows/.types.py.un~ +0 -0
- planar/workflows/.util.py.un~ +0 -0
- planar/workflows/.utils.py.un~ +0 -0
- planar/workflows/.workflow.py.un~ +0 -0
- planar/workflows/.workflow_wrapper.py.un~ +0 -0
- planar/workflows/.wrappers.py.un~ +0 -0
- planar/workflows/__init__.py +42 -0
- planar/workflows/context.py +44 -0
- planar/workflows/contrib.py +190 -0
- planar/workflows/decorators.py +217 -0
- planar/workflows/events.py +185 -0
- planar/workflows/exceptions.py +34 -0
- planar/workflows/execution.py +198 -0
- planar/workflows/lock.py +229 -0
- planar/workflows/misc.py +5 -0
- planar/workflows/models.py +154 -0
- planar/workflows/notifications.py +96 -0
- planar/workflows/orchestrator.py +383 -0
- planar/workflows/query.py +256 -0
- planar/workflows/serialization.py +409 -0
- planar/workflows/step_core.py +373 -0
- planar/workflows/step_metadata.py +357 -0
- planar/workflows/step_testing_utils.py +86 -0
- planar/workflows/sub_workflow_runner.py +191 -0
- planar/workflows/test_concurrency_detection.py +120 -0
- planar/workflows/test_lock_timeout.py +140 -0
- planar/workflows/test_serialization.py +1195 -0
- planar/workflows/test_suspend_deserialization.py +231 -0
- planar/workflows/test_workflow.py +1967 -0
- planar/workflows/tracing.py +106 -0
- planar/workflows/wrappers.py +41 -0
- planar-0.5.0.dist-info/METADATA +285 -0
- planar-0.5.0.dist-info/RECORD +289 -0
- planar-0.5.0.dist-info/WHEEL +4 -0
- planar-0.5.0.dist-info/entry_points.txt +3 -0
planar/db/db.py
ADDED
@@ -0,0 +1,318 @@
|
|
1
|
+
import asyncio
|
2
|
+
import re
|
3
|
+
from contextlib import asynccontextmanager
|
4
|
+
from pathlib import Path
|
5
|
+
from sqlite3 import LEGACY_TRANSACTION_CONTROL
|
6
|
+
from typing import Any, Callable, Coroutine, cast
|
7
|
+
|
8
|
+
from alembic import command
|
9
|
+
from alembic.config import Config as AlembicConfig
|
10
|
+
from pydantic import ConfigDict
|
11
|
+
from sqlalchemy import Connection, MetaData, event, make_url, text
|
12
|
+
from sqlalchemy.engine.url import URL
|
13
|
+
from sqlalchemy.exc import DBAPIError
|
14
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
|
15
|
+
from sqlalchemy.ext.compiler import compiles
|
16
|
+
from sqlalchemy.orm import declared_attr
|
17
|
+
from sqlalchemy.sql.expression import ClauseElement, Executable
|
18
|
+
from sqlmodel import SQLModel
|
19
|
+
from sqlmodel.ext.asyncio.session import AsyncSession
|
20
|
+
|
21
|
+
import planar
|
22
|
+
from planar.logging import get_logger
|
23
|
+
from planar.modeling.orm.planar_base_entity import PLANAR_APPLICATION_METADATA
|
24
|
+
from planar.utils import P, R, T, U, exponential_backoff_with_jitter
|
25
|
+
|
26
|
+
|
27
|
+
def camel_to_snake(name):
|
28
|
+
name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
|
29
|
+
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower()
|
30
|
+
|
31
|
+
|
32
|
+
PLANAR_SCHEMA = "planar"
|
33
|
+
PLANAR_FRAMEWORK_METADATA = MetaData(schema=PLANAR_SCHEMA)
|
34
|
+
logger = get_logger(__name__)
|
35
|
+
|
36
|
+
|
37
|
+
class explain(Executable, ClauseElement):
|
38
|
+
inherit_cache = False
|
39
|
+
|
40
|
+
def __init__(self, stmt):
|
41
|
+
self.statement = stmt
|
42
|
+
self._inline = False
|
43
|
+
|
44
|
+
|
45
|
+
@compiles(explain, "postgresql")
|
46
|
+
def pg_explain(element, compiler, **kw):
|
47
|
+
text = "EXPLAIN ANALYZE "
|
48
|
+
text += compiler.process(element.statement, **kw)
|
49
|
+
return text
|
50
|
+
|
51
|
+
|
52
|
+
class PlanarInternalBase(SQLModel, table=False):
|
53
|
+
"""
|
54
|
+
Base model with common fields for all database tables.
|
55
|
+
Not a table itself - meant to be inherited by concrete model classes.
|
56
|
+
|
57
|
+
Usage conventions:
|
58
|
+
- Primary keys should be "id" and be UUID with default_factory=uuid4 when possible
|
59
|
+
- Use TimeStampMixin for auto-timestamp fields
|
60
|
+
- Field names should use snake_case consistently
|
61
|
+
- Table schema is set to 'planar' automatically
|
62
|
+
- Foreign keys should specify the full schema.table_name
|
63
|
+
"""
|
64
|
+
|
65
|
+
@declared_attr.directive
|
66
|
+
def __tablename__(cls) -> str: # type: ignore
|
67
|
+
return camel_to_snake(cls.__name__)
|
68
|
+
|
69
|
+
__abstract__ = True
|
70
|
+
# __table_args__ = {"schema": PLANAR_SCHEMA}
|
71
|
+
metadata = PLANAR_FRAMEWORK_METADATA
|
72
|
+
model_config = ConfigDict(validate_assignment=True) # type: ignore
|
73
|
+
|
74
|
+
|
75
|
+
class PlanarSession(AsyncSession):
|
76
|
+
def __init__(self, engine: AsyncEngine | None = None):
|
77
|
+
assert engine
|
78
|
+
self.engine = engine
|
79
|
+
self.dialect = engine.dialect
|
80
|
+
self.max_conflict_retries: int = 10
|
81
|
+
# dynamic import since planar.session depends on this
|
82
|
+
from planar.session import config_var
|
83
|
+
|
84
|
+
config = config_var.get(None)
|
85
|
+
if config is not None and config.app.max_db_conflict_retries:
|
86
|
+
self.max_conflict_retries = config.app.max_db_conflict_retries
|
87
|
+
super().__init__(engine, expire_on_commit=False)
|
88
|
+
|
89
|
+
async def set_serializable_isolation(self):
|
90
|
+
if self.dialect.name == "postgresql":
|
91
|
+
await self.exec(text("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE")) # type: ignore[arg-type]
|
92
|
+
|
93
|
+
@asynccontextmanager
|
94
|
+
async def begin_read(self):
|
95
|
+
"""Context manager for read-only transactions.
|
96
|
+
|
97
|
+
This is useful when reading from the database since it ensures that if
|
98
|
+
a transaction has not started before the context, it will ensure no
|
99
|
+
transactions are open after the context.
|
100
|
+
"""
|
101
|
+
in_transaction = self.in_transaction()
|
102
|
+
try:
|
103
|
+
yield
|
104
|
+
if not in_transaction:
|
105
|
+
await self.commit()
|
106
|
+
except Exception:
|
107
|
+
if not in_transaction:
|
108
|
+
await self.rollback()
|
109
|
+
raise
|
110
|
+
|
111
|
+
async def run_transaction(
|
112
|
+
self,
|
113
|
+
fn: Callable[P, Coroutine[T, U, R]],
|
114
|
+
*args: P.args,
|
115
|
+
**kwargs: P.kwargs,
|
116
|
+
) -> R:
|
117
|
+
max_conflict_retries = self.max_conflict_retries
|
118
|
+
|
119
|
+
if self.in_transaction():
|
120
|
+
await self.commit()
|
121
|
+
|
122
|
+
remaining_retries = max_conflict_retries
|
123
|
+
|
124
|
+
while True:
|
125
|
+
try:
|
126
|
+
async with self.begin():
|
127
|
+
await self.set_serializable_isolation()
|
128
|
+
return await fn(*args, **kwargs)
|
129
|
+
except Exception as e:
|
130
|
+
if remaining_retries == 0:
|
131
|
+
logger.exception("transaction failed after maximum retries")
|
132
|
+
raise
|
133
|
+
|
134
|
+
if isinstance(e, DBAPIError) and "could not serialize access" in str(e):
|
135
|
+
delay = exponential_backoff_with_jitter(
|
136
|
+
max_conflict_retries - remaining_retries
|
137
|
+
)
|
138
|
+
await asyncio.sleep(delay)
|
139
|
+
remaining_retries -= 1
|
140
|
+
continue
|
141
|
+
logger.exception(
|
142
|
+
"transaction failed due to unrecoverable error",
|
143
|
+
remaining_retries=remaining_retries,
|
144
|
+
)
|
145
|
+
raise
|
146
|
+
|
147
|
+
async def explain(self, query: Executable, log_identifier: str) -> str:
|
148
|
+
if self.dialect.name != "postgresql" or not logger.isDebugEnabled():
|
149
|
+
return ""
|
150
|
+
# Reusing the current session will mess things up
|
151
|
+
# (implicit transaction starting, for example), so use
|
152
|
+
# a separate session to run the explain command
|
153
|
+
async with PlanarSession(self.engine) as session:
|
154
|
+
result = await session.exec(cast(Any, explain(query)))
|
155
|
+
query_plan = "\n".join([str(row[0]) for row in result])
|
156
|
+
compiled_sql = str(query)
|
157
|
+
logger.debug(
|
158
|
+
f"query_plan:{log_identifier}",
|
159
|
+
query_plan=query_plan,
|
160
|
+
compiled_sql=compiled_sql,
|
161
|
+
)
|
162
|
+
return query_plan
|
163
|
+
|
164
|
+
|
165
|
+
def new_session(engine: AsyncEngine) -> PlanarSession:
|
166
|
+
return PlanarSession(engine)
|
167
|
+
|
168
|
+
|
169
|
+
class DatabaseManager:
|
170
|
+
def __init__(
|
171
|
+
self,
|
172
|
+
db_url: str | URL,
|
173
|
+
):
|
174
|
+
self.db_url = make_url(db_url) if isinstance(db_url, str) else db_url
|
175
|
+
self.engine: AsyncEngine | None = None
|
176
|
+
|
177
|
+
def _create_sqlite_engine(self, url: URL) -> AsyncEngine:
|
178
|
+
# in practice this high timeout is only use
|
179
|
+
timeout = int(str(url.query.get("timeout", 10)))
|
180
|
+
|
181
|
+
engine = create_async_engine(
|
182
|
+
url,
|
183
|
+
connect_args=dict(
|
184
|
+
timeout=timeout,
|
185
|
+
isolation_level=None,
|
186
|
+
# If autocommit is not LEGACY_TRANSACTION_CONTROL, isolation_level
|
187
|
+
# is ignored, so we set here explicitly to make the intention clear,
|
188
|
+
# even though it is the default value.
|
189
|
+
autocommit=LEGACY_TRANSACTION_CONTROL,
|
190
|
+
),
|
191
|
+
# SQLite doesn't support schemas, so we need to translate the planar schema
|
192
|
+
# name to None.
|
193
|
+
execution_options={"schema_translate_map": {"planar": None}},
|
194
|
+
)
|
195
|
+
|
196
|
+
def do_begin(conn: Connection):
|
197
|
+
conn.exec_driver_sql("BEGIN IMMEDIATE")
|
198
|
+
|
199
|
+
event.listen(engine.sync_engine, "begin", do_begin)
|
200
|
+
|
201
|
+
return engine
|
202
|
+
|
203
|
+
def _create_postgresql_engine(self, url: URL) -> AsyncEngine:
|
204
|
+
engine = create_async_engine(url)
|
205
|
+
|
206
|
+
return engine
|
207
|
+
|
208
|
+
def connect(self):
|
209
|
+
"""Creates and initializes the database engine."""
|
210
|
+
if self.engine:
|
211
|
+
logger.warning("database engine already initialized")
|
212
|
+
return
|
213
|
+
|
214
|
+
db_backend = self.db_url.get_backend_name()
|
215
|
+
|
216
|
+
match db_backend:
|
217
|
+
case "sqlite":
|
218
|
+
logger.info(
|
219
|
+
"connecting to database", db_backend=db_backend, db_url=self.db_url
|
220
|
+
)
|
221
|
+
self.engine = self._create_sqlite_engine(self.db_url)
|
222
|
+
case "postgresql":
|
223
|
+
logger.info("connecting to database", db_backend=db_backend)
|
224
|
+
self.engine = self._create_postgresql_engine(self.db_url)
|
225
|
+
case _:
|
226
|
+
raise NotImplementedError(
|
227
|
+
f'Unsupported database backend "{db_backend}"'
|
228
|
+
)
|
229
|
+
|
230
|
+
async def disconnect(self):
|
231
|
+
"""Disposes of the database engine."""
|
232
|
+
if self.engine:
|
233
|
+
logger.info("disconnecting database engine")
|
234
|
+
await self.engine.dispose()
|
235
|
+
self.engine = None
|
236
|
+
else:
|
237
|
+
logger.warning("attempted to disconnect an uninitialized engine")
|
238
|
+
|
239
|
+
def get_engine(self) -> AsyncEngine:
|
240
|
+
"""Returns the initialized AsyncEngine."""
|
241
|
+
if not self.engine:
|
242
|
+
raise RuntimeError("Database engine not initialized. Call connect() first.")
|
243
|
+
return self.engine
|
244
|
+
|
245
|
+
def get_session(self) -> PlanarSession:
|
246
|
+
"""Returns a new PlanarSession."""
|
247
|
+
if not self.engine:
|
248
|
+
raise RuntimeError("Database engine not initialized. Call connect() first.")
|
249
|
+
return PlanarSession(self.engine)
|
250
|
+
|
251
|
+
async def _run_system_migrations(self):
|
252
|
+
logger.info("running planar system migrations")
|
253
|
+
|
254
|
+
module_path = Path(planar.__file__).parent
|
255
|
+
script_location = str(module_path / "db" / "alembic")
|
256
|
+
|
257
|
+
alembic_cfg = AlembicConfig()
|
258
|
+
alembic_cfg.set_main_option("script_location", script_location)
|
259
|
+
|
260
|
+
if not self.engine:
|
261
|
+
raise RuntimeError("Database engine not initialized. Call connect() first.")
|
262
|
+
try:
|
263
|
+
async with self.engine.begin() as conn:
|
264
|
+
# Pass the *synchronous* connection produced by `run_sync` to Alembic.
|
265
|
+
|
266
|
+
def _upgrade(sync_conn):
|
267
|
+
"""Run Alembic upgrade using the given synchronous connection."""
|
268
|
+
|
269
|
+
# Inject the sync SQLAlchemy Connection so that planar/db/alembic/env.py
|
270
|
+
# recognises we're running in programmatic (runtime) mode instead of
|
271
|
+
# development mode. This prevents it from trying to create a new engine
|
272
|
+
# via `engine_from_config`, which expects a URL in the Alembic config.
|
273
|
+
alembic_cfg.attributes["connection"] = sync_conn
|
274
|
+
|
275
|
+
# Execute migrations up to the latest revision.
|
276
|
+
command.upgrade(alembic_cfg, "head")
|
277
|
+
|
278
|
+
# Execute the upgrade inside the green-thread aware sync context.
|
279
|
+
await conn.run_sync(_upgrade)
|
280
|
+
logger.info("planar system migrations completed successfully")
|
281
|
+
except Exception:
|
282
|
+
logger.exception("planar system migration failed")
|
283
|
+
raise
|
284
|
+
|
285
|
+
async def _setup_database(self):
|
286
|
+
if not self.engine:
|
287
|
+
raise RuntimeError("Database engine not initialized. Call connect() first.")
|
288
|
+
|
289
|
+
async with self.engine.begin() as conn:
|
290
|
+
if "sqlite" in self.db_url.drivername:
|
291
|
+
await conn.execute(text("PRAGMA foreign_keys=ON"))
|
292
|
+
else:
|
293
|
+
# Ensure planar schema exists
|
294
|
+
await conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {PLANAR_SCHEMA}"))
|
295
|
+
|
296
|
+
async def migrate(self, use_alembic: bool):
|
297
|
+
"""
|
298
|
+
Runs database migrations.
|
299
|
+
By default, uses SQLModel.metadata.create_all.
|
300
|
+
Set use_alembic=True to use Alembic (requires Alembic setup).
|
301
|
+
"""
|
302
|
+
if not self.engine:
|
303
|
+
raise RuntimeError("Database engine not initialized. Call connect() first.")
|
304
|
+
|
305
|
+
logger.info("starting database migration")
|
306
|
+
if use_alembic:
|
307
|
+
logger.info("using alembic for migrations")
|
308
|
+
await self._setup_database()
|
309
|
+
await self._run_system_migrations()
|
310
|
+
# For now user migrations are not supported, so we fall back to SQLModel.metadata.create_all
|
311
|
+
async with self.engine.begin() as conn:
|
312
|
+
await conn.run_sync(PLANAR_APPLICATION_METADATA.create_all)
|
313
|
+
|
314
|
+
else:
|
315
|
+
async with self.engine.begin() as conn:
|
316
|
+
await self._setup_database()
|
317
|
+
await conn.run_sync(PLANAR_FRAMEWORK_METADATA.create_all)
|
318
|
+
await conn.run_sync(PLANAR_APPLICATION_METADATA.create_all)
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
planar/files/.s3.py.un~
ADDED
Binary file
|
Binary file
|
Binary file
|
planar/files/__init__.py
ADDED
planar/files/models.py
ADDED
@@ -0,0 +1,162 @@
|
|
1
|
+
import mimetypes
|
2
|
+
import os
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import AsyncGenerator, Union
|
5
|
+
from uuid import UUID, uuid4
|
6
|
+
|
7
|
+
import aiofiles
|
8
|
+
from pydantic import BaseModel
|
9
|
+
from sqlmodel import Field
|
10
|
+
|
11
|
+
from planar.db import PlanarInternalBase
|
12
|
+
from planar.files.storage.context import get_storage
|
13
|
+
from planar.logging import get_logger
|
14
|
+
from planar.modeling.mixins import TimestampMixin
|
15
|
+
from planar.session import get_session
|
16
|
+
|
17
|
+
logger = get_logger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
class PlanarFile(BaseModel):
|
21
|
+
id: UUID
|
22
|
+
filename: str
|
23
|
+
content_type: str
|
24
|
+
size: int
|
25
|
+
|
26
|
+
async def get_metadata(self) -> "PlanarFileMetadata":
|
27
|
+
"""
|
28
|
+
Retrieves the metadata for this file from the database.
|
29
|
+
"""
|
30
|
+
logger.debug("getting metadata for file", file_id=self.id)
|
31
|
+
session = get_session()
|
32
|
+
async with session.begin_read():
|
33
|
+
result = await session.get(PlanarFileMetadata, self.id)
|
34
|
+
if result is None:
|
35
|
+
logger.warning("file metadata not found in database", file_id=self.id)
|
36
|
+
raise ValueError(f"File with ID {self.id} not found in the database.")
|
37
|
+
return result
|
38
|
+
|
39
|
+
async def get_content(self) -> bytes:
|
40
|
+
"""
|
41
|
+
Retrieves the content of this file from the storage backend.
|
42
|
+
"""
|
43
|
+
logger.debug("getting content for file", file_id=self.id)
|
44
|
+
storage = get_storage()
|
45
|
+
metadata = await self.get_metadata()
|
46
|
+
data, _ = await storage.get_bytes(metadata.storage_ref)
|
47
|
+
return data
|
48
|
+
|
49
|
+
@staticmethod
|
50
|
+
async def upload(
|
51
|
+
content: Union[bytes, AsyncGenerator[bytes, None], Path, str],
|
52
|
+
filename: str,
|
53
|
+
content_type: str | None = None,
|
54
|
+
size: int | None = None,
|
55
|
+
) -> "PlanarFile":
|
56
|
+
"""
|
57
|
+
Uploads file content to storage and creates its metadata record.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
content: File content as bytes, an async iterator, or a file path (str or Path).
|
61
|
+
filename: The desired filename for storage and metadata.
|
62
|
+
content_type: The MIME type of the file. If None, it's inferred from the filename
|
63
|
+
for paths or defaults to 'application/octet-stream'.
|
64
|
+
size: The size of the file in bytes. If None, it's calculated for bytes/paths
|
65
|
+
or defaults to -1 for streams.
|
66
|
+
|
67
|
+
Returns:
|
68
|
+
The created PlanarFile object with metadata.
|
69
|
+
|
70
|
+
Raises:
|
71
|
+
FileNotFoundError: If content is a path and the file doesn't exist.
|
72
|
+
TypeError: If the content type is not supported.
|
73
|
+
"""
|
74
|
+
logger.debug(
|
75
|
+
"uploading file",
|
76
|
+
filename=filename,
|
77
|
+
content_type=content_type,
|
78
|
+
size=size,
|
79
|
+
)
|
80
|
+
storage = get_storage()
|
81
|
+
session = get_session()
|
82
|
+
|
83
|
+
storage_ref: str
|
84
|
+
actual_size: int = -1
|
85
|
+
final_content_type: str = content_type or "application/octet-stream"
|
86
|
+
|
87
|
+
if isinstance(content, (str, Path)):
|
88
|
+
file_path = Path(content)
|
89
|
+
logger.debug("uploading from path", path=file_path)
|
90
|
+
if not file_path.is_file():
|
91
|
+
logger.warning("file not found at path for upload", path=file_path)
|
92
|
+
raise FileNotFoundError(f"File not found at path: {file_path}")
|
93
|
+
|
94
|
+
actual_size = size if size is not None else os.path.getsize(file_path)
|
95
|
+
|
96
|
+
if content_type is None:
|
97
|
+
guessed_type, _ = mimetypes.guess_type(filename)
|
98
|
+
final_content_type = guessed_type or "application/octet-stream"
|
99
|
+
|
100
|
+
async def file_stream():
|
101
|
+
async with aiofiles.open(file_path, "rb") as afp:
|
102
|
+
chunk_size = 65536 # 64KB chunk size
|
103
|
+
while chunk := await afp.read(chunk_size):
|
104
|
+
yield chunk
|
105
|
+
|
106
|
+
storage_ref = await storage.put(
|
107
|
+
stream=file_stream(), mime_type=final_content_type
|
108
|
+
)
|
109
|
+
|
110
|
+
elif isinstance(content, bytes):
|
111
|
+
logger.debug("uploading from bytes")
|
112
|
+
actual_size = size if size is not None else len(content)
|
113
|
+
# Keep provided content_type or default
|
114
|
+
final_content_type = content_type or "application/octet-stream"
|
115
|
+
storage_ref = await storage.put_bytes(content, mime_type=final_content_type)
|
116
|
+
|
117
|
+
elif isinstance(content, AsyncGenerator): # Check for async iterator
|
118
|
+
logger.debug("uploading from async generator stream")
|
119
|
+
actual_size = size if size is not None else -1 # Size required or unknown
|
120
|
+
# Keep provided content_type or default
|
121
|
+
final_content_type = content_type or "application/octet-stream"
|
122
|
+
storage_ref = await storage.put(
|
123
|
+
stream=content, mime_type=final_content_type
|
124
|
+
)
|
125
|
+
else:
|
126
|
+
logger.warning(
|
127
|
+
"unsupported content type for upload", content_type=type(content)
|
128
|
+
)
|
129
|
+
raise TypeError(
|
130
|
+
"Unsupported content type. Must be bytes, AsyncGenerator, str path, or Path object."
|
131
|
+
)
|
132
|
+
|
133
|
+
# Create the metadata record
|
134
|
+
planar_file_metadata = PlanarFileMetadata(
|
135
|
+
filename=filename,
|
136
|
+
content_type=final_content_type,
|
137
|
+
size=actual_size,
|
138
|
+
storage_ref=storage_ref,
|
139
|
+
)
|
140
|
+
session.add(planar_file_metadata)
|
141
|
+
await session.commit()
|
142
|
+
await session.refresh(planar_file_metadata)
|
143
|
+
logger.info(
|
144
|
+
"file uploaded and metadata created",
|
145
|
+
id=planar_file_metadata.id,
|
146
|
+
filename=filename,
|
147
|
+
storage_ref=storage_ref,
|
148
|
+
)
|
149
|
+
|
150
|
+
# We return the metadata instance which also satisfies the PlanarFile structure
|
151
|
+
return planar_file_metadata
|
152
|
+
|
153
|
+
|
154
|
+
class PlanarFileMetadata(PlanarFile, TimestampMixin, PlanarInternalBase, table=True):
|
155
|
+
"""
|
156
|
+
Database model storing the authoritative mapping between a PlanarFile.file_id
|
157
|
+
and its storage details. Acts as the single, central file manifest.
|
158
|
+
"""
|
159
|
+
|
160
|
+
id: UUID = Field(default_factory=uuid4, primary_key=True)
|
161
|
+
# storage_ref is a storage backend specifid identifier for the file
|
162
|
+
storage_ref: str = Field(index=True)
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import io
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from typing import AsyncGenerator
|
4
|
+
|
5
|
+
|
6
|
+
class Storage(ABC):
|
7
|
+
@abstractmethod
|
8
|
+
async def put(
|
9
|
+
self, stream: AsyncGenerator[bytes, None], mime_type: str | None = None
|
10
|
+
) -> str:
|
11
|
+
"""Store a stream and its mime type, returning a storage ref."""
|
12
|
+
...
|
13
|
+
|
14
|
+
@abstractmethod
|
15
|
+
async def get(self, ref: str) -> tuple[AsyncGenerator[bytes, None], str | None]:
|
16
|
+
"""Get a stream and its mime type from a storage ref."""
|
17
|
+
...
|
18
|
+
|
19
|
+
@abstractmethod
|
20
|
+
async def delete(self, ref: str) -> None:
|
21
|
+
"""Delete the object associated with the storage ref."""
|
22
|
+
...
|
23
|
+
|
24
|
+
@abstractmethod
|
25
|
+
async def external_url(self, ref: str) -> str | None:
|
26
|
+
"""If available, return an external URL to read the file."""
|
27
|
+
...
|
28
|
+
|
29
|
+
async def put_bytes(self, data: bytes, mime_type: str | None = None) -> str:
|
30
|
+
"""Store bytes and optional mime type, returning a storage ref."""
|
31
|
+
|
32
|
+
async def _stream():
|
33
|
+
yield data
|
34
|
+
|
35
|
+
return await self.put(_stream(), mime_type=mime_type)
|
36
|
+
|
37
|
+
async def get_bytes(self, ref: str) -> tuple[bytes, str | None]:
|
38
|
+
"""Get bytes and mime type from a storage ref."""
|
39
|
+
buffer = io.BytesIO()
|
40
|
+
stream, mime_type = await self.get(ref)
|
41
|
+
async for chunk in stream:
|
42
|
+
buffer.write(chunk)
|
43
|
+
return buffer.getvalue(), mime_type
|
44
|
+
|
45
|
+
async def put_string(
|
46
|
+
self, data: str, encoding: str = "utf-8", mime_type: str | None = None
|
47
|
+
) -> str:
|
48
|
+
"""Store a string and optional mime type, returning a storage ref."""
|
49
|
+
# Ensure mime_type includes encoding if not already specified
|
50
|
+
final_mime_type = mime_type
|
51
|
+
if mime_type and "charset=" not in mime_type and mime_type.startswith("text/"):
|
52
|
+
final_mime_type = f"{mime_type}; charset={encoding}"
|
53
|
+
return await self.put_bytes(data.encode(encoding), mime_type=final_mime_type)
|
54
|
+
|
55
|
+
async def get_string(
|
56
|
+
self, ref: str, encoding: str = "utf-8"
|
57
|
+
) -> tuple[str, str | None]:
|
58
|
+
"""Get a string and mime type from a storage ref."""
|
59
|
+
data_bytes, mime_type = await self.get_bytes(ref)
|
60
|
+
# TODO: Potentially use encoding from mime_type if available?
|
61
|
+
return data_bytes.decode(encoding), mime_type
|
@@ -0,0 +1,44 @@
|
|
1
|
+
from typing import Annotated, Literal, Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, Field
|
4
|
+
|
5
|
+
from .local_directory import LocalDirectoryStorage
|
6
|
+
from .s3 import S3Storage
|
7
|
+
|
8
|
+
|
9
|
+
class LocalDirectoryConfig(BaseModel):
|
10
|
+
backend: Literal["localdir"]
|
11
|
+
directory: str
|
12
|
+
|
13
|
+
|
14
|
+
class S3Config(BaseModel):
|
15
|
+
backend: Literal["s3"]
|
16
|
+
bucket_name: str
|
17
|
+
region: str
|
18
|
+
access_key: Optional[str] = None
|
19
|
+
secret_key: Optional[str] = None
|
20
|
+
endpoint_url: Optional[str] = None
|
21
|
+
presigned_url_ttl: int = 3600
|
22
|
+
|
23
|
+
|
24
|
+
StorageConfig = Annotated[
|
25
|
+
LocalDirectoryConfig | S3Config,
|
26
|
+
Field(discriminator="backend"),
|
27
|
+
]
|
28
|
+
|
29
|
+
|
30
|
+
def create_from_config(config: StorageConfig) -> LocalDirectoryStorage | S3Storage:
|
31
|
+
"""Creates a storage instance from the given configuration."""
|
32
|
+
if config.backend == "localdir":
|
33
|
+
return LocalDirectoryStorage(config.directory)
|
34
|
+
elif config.backend == "s3":
|
35
|
+
return S3Storage(
|
36
|
+
bucket_name=config.bucket_name,
|
37
|
+
region=config.region,
|
38
|
+
access_key_id=config.access_key,
|
39
|
+
secret_access_key=config.secret_key,
|
40
|
+
endpoint_url=config.endpoint_url,
|
41
|
+
presigned_url_ttl=config.presigned_url_ttl,
|
42
|
+
)
|
43
|
+
else:
|
44
|
+
raise ValueError(f"Unsupported backend: {config.backend}")
|
@@ -0,0 +1,15 @@
|
|
1
|
+
from contextvars import ContextVar
|
2
|
+
|
3
|
+
from planar.files.storage.base import Storage
|
4
|
+
|
5
|
+
storage_var: ContextVar[Storage] = ContextVar("storage")
|
6
|
+
|
7
|
+
|
8
|
+
def get_storage() -> Storage:
|
9
|
+
"""Get the current storage context."""
|
10
|
+
return storage_var.get()
|
11
|
+
|
12
|
+
|
13
|
+
def set_storage(storage: Storage) -> None:
|
14
|
+
"""Set the current storage context."""
|
15
|
+
storage_var.set(storage)
|