surrealdb-orm 0.1.4__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. surreal_orm/__init__.py +72 -3
  2. surreal_orm/aggregations.py +164 -0
  3. surreal_orm/auth/__init__.py +15 -0
  4. surreal_orm/auth/access.py +167 -0
  5. surreal_orm/auth/mixins.py +302 -0
  6. surreal_orm/cli/__init__.py +15 -0
  7. surreal_orm/cli/commands.py +369 -0
  8. surreal_orm/connection_manager.py +58 -18
  9. surreal_orm/fields/__init__.py +36 -0
  10. surreal_orm/fields/encrypted.py +166 -0
  11. surreal_orm/fields/relation.py +465 -0
  12. surreal_orm/migrations/__init__.py +51 -0
  13. surreal_orm/migrations/executor.py +380 -0
  14. surreal_orm/migrations/generator.py +272 -0
  15. surreal_orm/migrations/introspector.py +305 -0
  16. surreal_orm/migrations/migration.py +188 -0
  17. surreal_orm/migrations/operations.py +531 -0
  18. surreal_orm/migrations/state.py +406 -0
  19. surreal_orm/model_base.py +530 -44
  20. surreal_orm/query_set.py +609 -33
  21. surreal_orm/relations.py +645 -0
  22. surreal_orm/surreal_function.py +95 -0
  23. surreal_orm/surreal_ql.py +113 -0
  24. surreal_orm/types.py +86 -0
  25. surreal_sdk/README.md +79 -0
  26. surreal_sdk/__init__.py +151 -0
  27. surreal_sdk/connection/__init__.py +17 -0
  28. surreal_sdk/connection/base.py +516 -0
  29. surreal_sdk/connection/http.py +421 -0
  30. surreal_sdk/connection/pool.py +244 -0
  31. surreal_sdk/connection/websocket.py +519 -0
  32. surreal_sdk/exceptions.py +71 -0
  33. surreal_sdk/functions.py +607 -0
  34. surreal_sdk/protocol/__init__.py +13 -0
  35. surreal_sdk/protocol/rpc.py +218 -0
  36. surreal_sdk/py.typed +0 -0
  37. surreal_sdk/pyproject.toml +49 -0
  38. surreal_sdk/streaming/__init__.py +31 -0
  39. surreal_sdk/streaming/change_feed.py +278 -0
  40. surreal_sdk/streaming/live_query.py +265 -0
  41. surreal_sdk/streaming/live_select.py +369 -0
  42. surreal_sdk/transaction.py +386 -0
  43. surreal_sdk/types.py +346 -0
  44. surrealdb_orm-0.5.0.dist-info/METADATA +465 -0
  45. surrealdb_orm-0.5.0.dist-info/RECORD +52 -0
  46. {surrealdb_orm-0.1.4.dist-info → surrealdb_orm-0.5.0.dist-info}/WHEEL +1 -1
  47. surrealdb_orm-0.5.0.dist-info/entry_points.txt +2 -0
  48. {surrealdb_orm-0.1.4.dist-info → surrealdb_orm-0.5.0.dist-info}/licenses/LICENSE +1 -1
  49. surrealdb_orm-0.1.4.dist-info/METADATA +0 -184
  50. surrealdb_orm-0.1.4.dist-info/RECORD +0 -12
@@ -0,0 +1,218 @@
1
+ """
2
+ SurrealDB RPC Protocol Implementation.
3
+
4
+ Handles the JSON-RPC style messaging format used by SurrealDB.
5
+ """
6
+
7
+ from dataclasses import dataclass, field
8
+ from typing import Any
9
+ import json
10
+
11
+
12
+ @dataclass
13
+ class RPCRequest:
14
+ """
15
+ RPC Request message format.
16
+
17
+ Attributes:
18
+ id: Unique request identifier for response matching
19
+ method: RPC method name (query, select, create, etc.)
20
+ params: Method parameters as list or dict
21
+ """
22
+
23
+ method: str
24
+ params: list[Any] | dict[str, Any] = field(default_factory=list)
25
+ id: int = 1
26
+
27
+ def to_dict(self) -> dict[str, Any]:
28
+ """Convert to dictionary for JSON serialization."""
29
+ return {
30
+ "id": self.id,
31
+ "method": self.method,
32
+ "params": self.params if isinstance(self.params, list) else [self.params],
33
+ }
34
+
35
+ def to_json(self) -> str:
36
+ """Serialize to JSON string."""
37
+ return json.dumps(self.to_dict())
38
+
39
+ @classmethod
40
+ def query(cls, sql: str, vars: dict[str, Any] | None = None, request_id: int = 1) -> "RPCRequest":
41
+ """Create a query request."""
42
+ return cls(method="query", params=[sql, vars or {}], id=request_id)
43
+
44
+ @classmethod
45
+ def select(cls, thing: str, request_id: int = 1) -> "RPCRequest":
46
+ """Create a select request."""
47
+ return cls(method="select", params=[thing], id=request_id)
48
+
49
+ @classmethod
50
+ def create(cls, thing: str, data: dict[str, Any], request_id: int = 1) -> "RPCRequest":
51
+ """Create a create request."""
52
+ return cls(method="create", params=[thing, data], id=request_id)
53
+
54
+ @classmethod
55
+ def update(cls, thing: str, data: dict[str, Any], request_id: int = 1) -> "RPCRequest":
56
+ """Create an update request."""
57
+ return cls(method="update", params=[thing, data], id=request_id)
58
+
59
+ @classmethod
60
+ def merge(cls, thing: str, data: dict[str, Any], request_id: int = 1) -> "RPCRequest":
61
+ """Create a merge request."""
62
+ return cls(method="merge", params=[thing, data], id=request_id)
63
+
64
+ @classmethod
65
+ def delete(cls, thing: str, request_id: int = 1) -> "RPCRequest":
66
+ """Create a delete request."""
67
+ return cls(method="delete", params=[thing], id=request_id)
68
+
69
+ @classmethod
70
+ def signin(
71
+ cls,
72
+ user: str | None = None,
73
+ password: str | None = None,
74
+ namespace: str | None = None,
75
+ database: str | None = None,
76
+ access: str | None = None,
77
+ request_id: int = 1,
78
+ ) -> "RPCRequest":
79
+ """Create a signin request."""
80
+ params: dict[str, Any] = {}
81
+ if user:
82
+ params["user"] = user
83
+ if password:
84
+ params["pass"] = password
85
+ if namespace:
86
+ params["ns"] = namespace
87
+ if database:
88
+ params["db"] = database
89
+ if access:
90
+ params["ac"] = access
91
+ return cls(method="signin", params=params, id=request_id)
92
+
93
+ @classmethod
94
+ def use(cls, namespace: str, database: str, request_id: int = 1) -> "RPCRequest":
95
+ """Create a use request."""
96
+ return cls(method="use", params=[namespace, database], id=request_id)
97
+
98
+ @classmethod
99
+ def live(cls, table: str, diff: bool = False, request_id: int = 1) -> "RPCRequest":
100
+ """Create a live query request."""
101
+ sql = f"LIVE SELECT * FROM {table}"
102
+ if diff:
103
+ sql += " DIFF"
104
+ return cls(method="query", params=[sql, {}], id=request_id)
105
+
106
+ @classmethod
107
+ def kill(cls, live_id: str, request_id: int = 1) -> "RPCRequest":
108
+ """Create a kill request for a live query."""
109
+ return cls(method="kill", params=[live_id], id=request_id)
110
+
111
+
112
+ @dataclass
113
+ class RPCError:
114
+ """
115
+ RPC Error format.
116
+
117
+ Attributes:
118
+ code: Error code
119
+ message: Error message
120
+ """
121
+
122
+ code: int
123
+ message: str
124
+
125
+ @classmethod
126
+ def from_dict(cls, data: dict[str, Any]) -> "RPCError":
127
+ """Create from dictionary."""
128
+ return cls(
129
+ code=data.get("code", -1),
130
+ message=data.get("message", "Unknown error"),
131
+ )
132
+
133
+
134
+ @dataclass
135
+ class RPCResponse:
136
+ """
137
+ RPC Response message format.
138
+
139
+ Attributes:
140
+ id: Request identifier this response matches
141
+ result: Query result data (if successful)
142
+ error: Error information (if failed)
143
+ """
144
+
145
+ id: int
146
+ result: Any = None
147
+ error: RPCError | None = None
148
+
149
+ @property
150
+ def is_error(self) -> bool:
151
+ """Check if response is an error."""
152
+ return self.error is not None
153
+
154
+ @property
155
+ def is_success(self) -> bool:
156
+ """Check if response is successful."""
157
+ return self.error is None
158
+
159
+ @classmethod
160
+ def from_dict(cls, data: dict[str, Any]) -> "RPCResponse":
161
+ """Parse from dictionary."""
162
+ error = None
163
+ if "error" in data:
164
+ error = RPCError.from_dict(data["error"])
165
+
166
+ return cls(
167
+ id=data.get("id", 0),
168
+ result=data.get("result"),
169
+ error=error,
170
+ )
171
+
172
+ @classmethod
173
+ def from_json(cls, json_str: str) -> "RPCResponse":
174
+ """Parse from JSON string."""
175
+ data = json.loads(json_str)
176
+ return cls.from_dict(data)
177
+
178
+
179
+ # RPC Method names as constants
180
+ class RPCMethod:
181
+ """RPC method name constants."""
182
+
183
+ # Authentication
184
+ SIGNIN = "signin"
185
+ SIGNUP = "signup"
186
+ AUTHENTICATE = "authenticate"
187
+ INVALIDATE = "invalidate"
188
+ INFO = "info"
189
+
190
+ # Connection
191
+ USE = "use"
192
+ PING = "ping"
193
+ VERSION = "version"
194
+ RESET = "reset"
195
+
196
+ # CRUD
197
+ SELECT = "select"
198
+ CREATE = "create"
199
+ INSERT = "insert"
200
+ UPDATE = "update"
201
+ UPSERT = "upsert"
202
+ MERGE = "merge"
203
+ PATCH = "patch"
204
+ DELETE = "delete"
205
+ RELATE = "relate"
206
+
207
+ # Query
208
+ QUERY = "query"
209
+ GRAPHQL = "graphql"
210
+ RUN = "run"
211
+
212
+ # Live Queries (WebSocket only)
213
+ LIVE = "live"
214
+ KILL = "kill"
215
+
216
+ # Variables (WebSocket only)
217
+ LET = "let"
218
+ UNSET = "unset"
surreal_sdk/py.typed ADDED
File without changes
@@ -0,0 +1,49 @@
1
+ [project]
2
+ name = "surreal-sdk"
3
+ version = "0.5.0"
4
+ description = "Custom Python SDK for SurrealDB with HTTP and WebSocket support. No dependency on official surrealdb package."
5
+ readme = "README.md"
6
+ requires-python = ">=3.12"
7
+ license = {text = "MIT"}
8
+ authors = [
9
+ { name = "Yannick Croteau", email = "yannick.croteau@gmail.com" }
10
+ ]
11
+ classifiers = [
12
+ "Development Status :: 3 - Alpha",
13
+ "Programming Language :: Python :: 3",
14
+ "Programming Language :: Python :: 3.12",
15
+ "Programming Language :: Python :: 3.13",
16
+ "Programming Language :: Python :: 3.14",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Topic :: Database",
19
+ "Topic :: Software Development :: Libraries :: Python Modules",
20
+ "Operating System :: OS Independent",
21
+ "Framework :: AsyncIO",
22
+ ]
23
+
24
+ dependencies = [
25
+ "httpx>=0.27.0",
26
+ "aiohttp>=3.9.0",
27
+ ]
28
+
29
+ [project.optional-dependencies]
30
+ # CBOR support for binary protocol
31
+ cbor = [
32
+ "cbor2>=5.6.0",
33
+ ]
34
+
35
+ [project.urls]
36
+ Homepage = "https://github.com/EulogySnowfall/SurrealDB-ORM"
37
+ Documentation = "https://github.com/EulogySnowfall/SurrealDB-ORM"
38
+ Repository = "https://github.com/EulogySnowfall/SurrealDB-ORM.git"
39
+ Issues = "https://github.com/EulogySnowfall/SurrealDB-ORM/issues"
40
+
41
+ [build-system]
42
+ requires = ["hatchling"]
43
+ build-backend = "hatchling.build"
44
+
45
+ [tool.hatch.build.targets.wheel]
46
+ packages = ["."]
47
+
48
+ [tool.hatch.build.targets.sdist]
49
+ include = ["*.py", "py.typed", "README.md"]
@@ -0,0 +1,31 @@
1
+ """
2
+ SurrealDB SDK Streaming Module.
3
+
4
+ Provides Live Queries and Change Feeds streaming capabilities.
5
+ """
6
+
7
+ from .change_feed import ChangeFeedStream
8
+ from .live_query import LiveQuery, LiveQueryManager, LiveNotification, LiveAction
9
+ from .live_select import (
10
+ LiveSelectStream,
11
+ LiveSelectManager,
12
+ LiveChange,
13
+ LiveAction as LiveSelectAction,
14
+ LiveSubscriptionParams,
15
+ )
16
+
17
+ __all__ = [
18
+ # Change Feeds
19
+ "ChangeFeedStream",
20
+ # Live Query (callback-based)
21
+ "LiveQuery",
22
+ "LiveQueryManager",
23
+ "LiveNotification",
24
+ "LiveAction",
25
+ # Live Select (async iterator)
26
+ "LiveSelectStream",
27
+ "LiveSelectManager",
28
+ "LiveChange",
29
+ "LiveSelectAction",
30
+ "LiveSubscriptionParams",
31
+ ]
@@ -0,0 +1,278 @@
1
+ """
2
+ Change Feed Streaming Implementation.
3
+
4
+ Provides CDC (Change Data Capture) streaming via SurrealDB Change Feeds.
5
+ This is stateless and ideal for microservices architectures.
6
+ """
7
+
8
+ from typing import Any, AsyncGenerator, AsyncIterator
9
+ from datetime import datetime
10
+ import asyncio
11
+
12
+ from ..connection.http import HTTPConnection
13
+ from ..exceptions import ChangeFeedError
14
+
15
+
16
+ class ChangeFeedStream:
17
+ """
18
+ Stream changes from a SurrealDB table using Change Feeds.
19
+
20
+ Change Feeds capture database modifications as a historic stream,
21
+ allowing replay from specific timestamps. This is ideal for:
22
+ - Microservices event streaming
23
+ - Data replication
24
+ - Audit trails
25
+ - Event sourcing
26
+
27
+ Usage:
28
+ async with HTTPConnection("http://localhost:8000", "ns", "db") as conn:
29
+ await conn.signin("root", "root")
30
+
31
+ stream = ChangeFeedStream(conn, "orders")
32
+
33
+ async for change in stream.stream():
34
+ print(f"Change: {change['changes']}")
35
+ """
36
+
37
+ def __init__(
38
+ self,
39
+ connection: HTTPConnection,
40
+ table: str,
41
+ poll_interval: float = 0.1,
42
+ batch_size: int = 100,
43
+ ):
44
+ """
45
+ Initialize Change Feed stream.
46
+
47
+ Args:
48
+ connection: HTTP connection to use
49
+ table: Table to stream changes from
50
+ poll_interval: Seconds between polls when no changes
51
+ batch_size: Maximum changes per poll
52
+ """
53
+ self.connection = connection
54
+ self.table = table
55
+ self.poll_interval = poll_interval
56
+ self.batch_size = batch_size
57
+ self._cursor: str | None = None
58
+ self._running = False
59
+
60
+ @property
61
+ def cursor(self) -> str | None:
62
+ """Current stream cursor (versionstamp or timestamp)."""
63
+ return self._cursor
64
+
65
+ async def define_changefeed(self, retention: str = "7d") -> None:
66
+ """
67
+ Define a change feed on the table.
68
+
69
+ Must be called before streaming if not already defined.
70
+
71
+ Args:
72
+ retention: How long to keep changes (e.g., "1h", "7d", "30d")
73
+ """
74
+ try:
75
+ await self.connection.query(f"DEFINE TABLE {self.table} CHANGEFEED {retention}")
76
+ except Exception as e:
77
+ raise ChangeFeedError(f"Failed to define change feed: {e}")
78
+
79
+ async def get_changes(
80
+ self,
81
+ since: str | datetime | None = None,
82
+ limit: int | None = None,
83
+ ) -> list[dict[str, Any]]:
84
+ """
85
+ Get changes since a specific point.
86
+
87
+ Args:
88
+ since: Timestamp or versionstamp to start from
89
+ limit: Maximum number of changes to return
90
+
91
+ Returns:
92
+ List of change records
93
+ """
94
+ if since is None:
95
+ since = self._cursor or datetime.utcnow().isoformat() + "Z"
96
+ elif isinstance(since, datetime):
97
+ since = since.isoformat() + "Z"
98
+
99
+ limit = limit or self.batch_size
100
+
101
+ query = f"SHOW CHANGES FOR TABLE {self.table} SINCE '{since}' LIMIT {limit}"
102
+
103
+ try:
104
+ response = await self.connection.query(query)
105
+
106
+ if response.results:
107
+ first_result = response.results[0]
108
+ if first_result.is_ok:
109
+ result_data = first_result.result
110
+ if isinstance(result_data, list):
111
+ return result_data
112
+ elif isinstance(result_data, dict):
113
+ return [result_data]
114
+
115
+ return []
116
+
117
+ except Exception as e:
118
+ raise ChangeFeedError(f"Failed to get changes: {e}")
119
+
120
+ async def stream(
121
+ self,
122
+ since: str | datetime | None = None,
123
+ ) -> AsyncIterator[dict[str, Any]]:
124
+ """
125
+ Stream changes continuously.
126
+
127
+ Args:
128
+ since: Starting point (timestamp or versionstamp)
129
+
130
+ Yields:
131
+ Change records as they become available
132
+ """
133
+ if since is None:
134
+ since = datetime.utcnow().isoformat() + "Z"
135
+ elif isinstance(since, datetime):
136
+ since = since.isoformat() + "Z"
137
+
138
+ self._cursor = since
139
+ self._running = True
140
+
141
+ while self._running:
142
+ try:
143
+ changes = await self.get_changes(since=self._cursor)
144
+
145
+ for change in changes:
146
+ yield change
147
+
148
+ # Update cursor with versionstamp if available
149
+ versionstamp = change.get("versionstamp")
150
+ if versionstamp:
151
+ self._cursor = str(versionstamp)
152
+
153
+ if not changes:
154
+ await asyncio.sleep(self.poll_interval)
155
+
156
+ except asyncio.CancelledError:
157
+ self._running = False
158
+ raise
159
+ except Exception:
160
+ # Log error but continue streaming
161
+ await asyncio.sleep(self.poll_interval)
162
+
163
+ def stop(self) -> None:
164
+ """Stop the stream."""
165
+ self._running = False
166
+
167
+ async def stream_batch(
168
+ self,
169
+ since: str | datetime | None = None,
170
+ ) -> AsyncIterator[list[dict[str, Any]]]:
171
+ """
172
+ Stream changes in batches.
173
+
174
+ More efficient for high-volume scenarios.
175
+
176
+ Args:
177
+ since: Starting point
178
+
179
+ Yields:
180
+ Batches of change records
181
+ """
182
+ if since is None:
183
+ since = datetime.utcnow().isoformat() + "Z"
184
+ elif isinstance(since, datetime):
185
+ since = since.isoformat() + "Z"
186
+
187
+ self._cursor = since
188
+ self._running = True
189
+
190
+ while self._running:
191
+ try:
192
+ changes = await self.get_changes(since=self._cursor)
193
+
194
+ if changes:
195
+ yield changes
196
+
197
+ # Update cursor with last versionstamp
198
+ last_change = changes[-1]
199
+ versionstamp = last_change.get("versionstamp")
200
+ if versionstamp:
201
+ self._cursor = str(versionstamp)
202
+ else:
203
+ await asyncio.sleep(self.poll_interval)
204
+
205
+ except asyncio.CancelledError:
206
+ self._running = False
207
+ raise
208
+ except Exception:
209
+ await asyncio.sleep(self.poll_interval)
210
+
211
+
212
+ class MultiTableChangeFeed:
213
+ """
214
+ Stream changes from multiple tables.
215
+
216
+ Useful for aggregating changes across related tables.
217
+ """
218
+
219
+ def __init__(
220
+ self,
221
+ connection: HTTPConnection,
222
+ tables: list[str],
223
+ poll_interval: float = 0.1,
224
+ batch_size: int = 100,
225
+ ):
226
+ """
227
+ Initialize multi-table change feed.
228
+
229
+ Args:
230
+ connection: HTTP connection to use
231
+ tables: List of tables to stream
232
+ poll_interval: Seconds between polls
233
+ batch_size: Maximum changes per table per poll
234
+ """
235
+ self.streams = {table: ChangeFeedStream(connection, table, poll_interval, batch_size) for table in tables}
236
+ self._running = False
237
+
238
+ async def stream(
239
+ self,
240
+ since: str | datetime | None = None,
241
+ ) -> AsyncIterator[tuple[str, dict[str, Any]]]:
242
+ """
243
+ Stream changes from all tables.
244
+
245
+ Yields:
246
+ Tuple of (table_name, change_record)
247
+ """
248
+ self._running = True
249
+
250
+ async def stream_table(table: str, stream: ChangeFeedStream) -> AsyncGenerator[tuple[str, dict[str, Any]], None]:
251
+ async for change in stream.stream(since):
252
+ yield table, change
253
+
254
+ # Create tasks for all tables
255
+ async def merged_stream() -> None:
256
+ # This is a simplified implementation
257
+ # A production version would use asyncio.Queue
258
+ for table, stream in self.streams.items():
259
+ _ = (table, stream) # Placeholder for future implementation
260
+
261
+ # Simple round-robin implementation
262
+ while self._running:
263
+ for table, stream in self.streams.items():
264
+ try:
265
+ changes = await stream.get_changes()
266
+ for change in changes:
267
+ yield table, change
268
+ except Exception:
269
+ pass
270
+
271
+ if not any(stream._cursor for stream in self.streams.values()):
272
+ await asyncio.sleep(self.streams[list(self.streams.keys())[0]].poll_interval)
273
+
274
+ def stop(self) -> None:
275
+ """Stop all streams."""
276
+ self._running = False
277
+ for stream in self.streams.values():
278
+ stream.stop()