wool 0.1rc7__tar.gz → 0.1rc8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wool might be problematic. Click here for more details.

Files changed (31) hide show
  1. {wool-0.1rc7 → wool-0.1rc8}/PKG-INFO +7 -2
  2. {wool-0.1rc7 → wool-0.1rc8}/pyproject.toml +19 -5
  3. wool-0.1rc8/wool/_mempool/__init__.py +4 -0
  4. wool-0.1rc8/wool/_mempool/_mempool.py +311 -0
  5. wool-0.1rc7/wool/_mempool/_metadata/__init__.py → wool-0.1rc8/wool/_mempool/_metadata.py +1 -3
  6. wool-0.1rc8/wool/_mempool/_service.py +225 -0
  7. wool-0.1rc8/wool/_protobuf/__init__.py +4 -0
  8. wool-0.1rc8/wool/_protobuf/mempool/mempool_pb2.py +66 -0
  9. wool-0.1rc8/wool/_protobuf/mempool/mempool_pb2.pyi +108 -0
  10. wool-0.1rc8/wool/_protobuf/mempool/mempool_pb2_grpc.py +312 -0
  11. wool-0.1rc7/wool/_protobuf/_mempool/_metadata/_metadata_pb2.py → wool-0.1rc8/wool/_protobuf/mempool/metadata/metadata_pb2.py +8 -8
  12. wool-0.1rc8/wool/_protobuf/mempool/metadata/metadata_pb2_grpc.py +24 -0
  13. wool-0.1rc7/wool/_mempool/__init__.py +0 -3
  14. wool-0.1rc7/wool/_mempool/_mempool.py +0 -204
  15. wool-0.1rc7/wool/_protobuf/.gitkeep +0 -0
  16. {wool-0.1rc7 → wool-0.1rc8}/.gitignore +0 -0
  17. {wool-0.1rc7 → wool-0.1rc8}/README.md +0 -0
  18. {wool-0.1rc7 → wool-0.1rc8}/wool/__init__.py +0 -0
  19. {wool-0.1rc7 → wool-0.1rc8}/wool/_cli.py +0 -0
  20. {wool-0.1rc7 → wool-0.1rc8}/wool/_event.py +0 -0
  21. {wool-0.1rc7 → wool-0.1rc8}/wool/_future.py +0 -0
  22. {wool-0.1rc7 → wool-0.1rc8}/wool/_logging.py +0 -0
  23. {wool-0.1rc7 → wool-0.1rc8}/wool/_manager.py +0 -0
  24. {wool-0.1rc7 → wool-0.1rc8}/wool/_pool.py +0 -0
  25. /wool-0.1rc7/wool/_protobuf/_mempool/_metadata/_metadata_pb2.pyi → /wool-0.1rc8/wool/_protobuf/mempool/metadata/metadata_pb2.pyi +0 -0
  26. {wool-0.1rc7 → wool-0.1rc8}/wool/_queue.py +0 -0
  27. {wool-0.1rc7 → wool-0.1rc8}/wool/_session.py +0 -0
  28. {wool-0.1rc7 → wool-0.1rc8}/wool/_task.py +0 -0
  29. {wool-0.1rc7 → wool-0.1rc8}/wool/_typing.py +0 -0
  30. {wool-0.1rc7 → wool-0.1rc8}/wool/_utils.py +0 -0
  31. {wool-0.1rc7 → wool-0.1rc8}/wool/_worker.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: wool
3
- Version: 0.1rc7
3
+ Version: 0.1rc8
4
4
  Summary: A Python framework for distributed multiprocessing.
5
5
  Author-email: Conrad Bzura <conrad@wool.io>
6
6
  Maintainer-email: maintainers@wool.io
@@ -206,19 +206,24 @@ License: Apache License
206
206
  See the License for the specific language governing permissions and
207
207
  limitations under the License.
208
208
  Classifier: Intended Audience :: Developers
209
+ Classifier: Operating System :: MacOS :: MacOS X
210
+ Classifier: Operating System :: POSIX :: Linux
209
211
  Requires-Python: >=3.10
210
212
  Requires-Dist: annotated-types
211
213
  Requires-Dist: click
212
214
  Requires-Dist: debugpy
215
+ Requires-Dist: grpcio
213
216
  Requires-Dist: protobuf
214
217
  Requires-Dist: shortuuid
215
218
  Requires-Dist: tblib
219
+ Requires-Dist: typing-extensions
216
220
  Provides-Extra: dev
217
221
  Requires-Dist: pytest; extra == 'dev'
218
222
  Requires-Dist: pytest-asyncio; extra == 'dev'
223
+ Requires-Dist: pytest-grpc-aio~=0.2.0; extra == 'dev'
219
224
  Requires-Dist: ruff; extra == 'dev'
220
225
  Provides-Extra: locking
221
- Requires-Dist: wool-locking==0.1rc7; extra == 'locking'
226
+ Requires-Dist: wool-locking==0.1rc8; extra == 'locking'
222
227
  Description-Content-Type: text/markdown
223
228
 
224
229
  # Wool
@@ -1,11 +1,25 @@
1
1
  [build-system]
2
+
2
3
  build-backend = "hatchling.build"
3
4
  requires = ["debugpy", "hatchling", "packaging", "GitPython", "toml"]
4
5
 
5
6
  [project]
6
7
  authors = [{ name = "Conrad Bzura", email = "conrad@wool.io" }]
7
- classifiers = ["Intended Audience :: Developers"]
8
- dependencies = ["annotated-types", "click", "debugpy", "protobuf", "shortuuid", "tblib"]
8
+ classifiers = [
9
+ "Intended Audience :: Developers",
10
+ "Operating System :: MacOS :: MacOS X",
11
+ "Operating System :: POSIX :: Linux",
12
+ ]
13
+ dependencies = [
14
+ "annotated-types",
15
+ "click",
16
+ "debugpy",
17
+ "grpcio",
18
+ "protobuf",
19
+ "shortuuid",
20
+ "tblib",
21
+ "typing-extensions",
22
+ ]
9
23
  description = "A Python framework for distributed multiprocessing."
10
24
  dynamic = ["version"]
11
25
  license = { file = "../LICENSE" }
@@ -15,7 +29,7 @@ readme = "README.md"
15
29
  requires-python = ">=3.10"
16
30
 
17
31
  [project.optional-dependencies]
18
- dev = ["pytest", "pytest-asyncio", "ruff"]
32
+ dev = ["pytest", "pytest-asyncio", "pytest-grpc-aio~=0.2.0", "ruff"]
19
33
  locking = ["wool-locking"]
20
34
 
21
35
  [project.scripts]
@@ -23,7 +37,7 @@ wool = "wool._cli:cli"
23
37
 
24
38
  [tool.hatch.build.hooks.protobuf]
25
39
  dependencies = ["hatch-protobuf"]
26
- generate_grpc = false
40
+ generate_grpc = true
27
41
  generate_pyi = true
28
42
  output_path = "src/wool/_protobuf"
29
43
  proto_paths = ["protobuf"]
@@ -55,7 +69,7 @@ select = ["E", "F", "I"]
55
69
  quote-style = "double"
56
70
  docstring-code-format = true
57
71
 
58
- [tool.ruff.lint.isort]
72
+ [tool.ruff.lint.isort]
59
73
  combine-as-imports = false
60
74
  force-single-line = true
61
75
  known-first-party = ["wool"]
@@ -0,0 +1,4 @@
1
+ from wool._mempool._mempool import MemoryPool
2
+ from wool._mempool._service import MemoryPoolService
3
+
4
+ __all__ = ["MemoryPool", "MemoryPoolService"]
@@ -0,0 +1,311 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import hashlib
5
+ import mmap
6
+ import os
7
+ import pathlib
8
+ import shutil
9
+ from contextlib import asynccontextmanager
10
+ from typing import BinaryIO
11
+
12
+ try:
13
+ from typing import Self
14
+ except ImportError:
15
+ from typing_extensions import Self
16
+
17
+ import shortuuid
18
+
19
+ from wool._mempool._metadata import MetadataMessage
20
+
21
+
22
+ class SharedObject:
23
+ _id: str
24
+ _mempool: MemoryPool
25
+ _file: BinaryIO
26
+ _mmap: mmap.mmap
27
+ _size: int
28
+ _md5: bytes
29
+
30
+ def __init__(self, id: str, *, mempool: MemoryPool):
31
+ self._id = id
32
+ self._mempool = mempool
33
+ self._file = open(self._path / "dump", "r+b")
34
+ self._mmap = mmap.mmap(self._file.fileno(), 0)
35
+ self._size = self.metadata.size
36
+ self._md5 = self.metadata.md5
37
+
38
+ def __del__(self):
39
+ try:
40
+ self.close()
41
+ except Exception:
42
+ pass
43
+
44
+ @property
45
+ def id(self) -> str:
46
+ return self._id
47
+
48
+ @property
49
+ def metadata(self) -> SharedObjectMetadata:
50
+ return SharedObjectMetadata(self.id, mempool=self._mempool)
51
+
52
+ @property
53
+ def mmap(self) -> mmap.mmap:
54
+ return self._mmap
55
+
56
+ @property
57
+ def _path(self) -> pathlib.Path:
58
+ return pathlib.Path(self._mempool.path, self.id)
59
+
60
+ def close(self):
61
+ self.metadata.close()
62
+ self._mmap.close()
63
+ self._file.close()
64
+
65
+ def refresh(self) -> Self:
66
+ if self._size != self.metadata.size or self._md5 != self.metadata.md5:
67
+ self._mmap.close()
68
+ self._file.close()
69
+ self._file = open(self._path / "dump", "r+b")
70
+ self._mmap = mmap.mmap(self._file.fileno(), 0)
71
+ self._size = self.metadata.size
72
+ self._md5 = self.metadata.md5
73
+ return self
74
+
75
+
76
+ class SharedObjectMetadata:
77
+ _id: str
78
+ _mempool: MemoryPool
79
+ _file: BinaryIO
80
+ _mmap: mmap.mmap
81
+ _instances: dict[str, SharedObjectMetadata] = {}
82
+
83
+ def __new__(cls, id: str, *, mempool: MemoryPool):
84
+ if id in cls._instances:
85
+ return cls._instances[id]
86
+ return super().__new__(cls)
87
+
88
+ def __init__(self, id: str, mempool: MemoryPool):
89
+ self._id = id
90
+ self._mempool = mempool
91
+ self._file = open(self._path / "meta", "r+b")
92
+ self._mmap = mmap.mmap(self._file.fileno(), 0)
93
+ self._instances[id] = self
94
+
95
+ def __del__(self):
96
+ try:
97
+ self.close()
98
+ except Exception:
99
+ pass
100
+
101
+ @property
102
+ def id(self) -> str:
103
+ return self._id
104
+
105
+ @property
106
+ def mutable(self) -> bool:
107
+ return self._metadata.mutable
108
+
109
+ @property
110
+ def size(self) -> int:
111
+ return self._metadata.size
112
+
113
+ @property
114
+ def md5(self) -> bytes:
115
+ return self._metadata.md5
116
+
117
+ @property
118
+ def mmap(self) -> mmap.mmap:
119
+ return self._mmap
120
+
121
+ @property
122
+ def _path(self) -> pathlib.Path:
123
+ return pathlib.Path(self._mempool.path, self.id)
124
+
125
+ @property
126
+ def _metadata(self) -> MetadataMessage:
127
+ return MetadataMessage.loads(bytes(self._mmap))
128
+
129
+ def close(self):
130
+ self._mmap.close()
131
+ self._file.close()
132
+ del self._instances[self.id]
133
+
134
+
135
+ class MemoryPool:
136
+ _objects: dict[str, SharedObject]
137
+ _path: pathlib.Path
138
+
139
+ def __init__(self, path: str | pathlib.Path = pathlib.Path(".mempool")):
140
+ if isinstance(path, str):
141
+ self._path = pathlib.Path(path)
142
+ else:
143
+ self._path = path
144
+ self._lockdir = self._path / "locks"
145
+ os.makedirs(self._lockdir, exist_ok=True)
146
+ self._acquire(f"pid-{os.getpid()}")
147
+ self._objects = dict()
148
+
149
+ def __contains__(self, ref: str) -> bool:
150
+ return ref in self._objects
151
+
152
+ def __del__(self):
153
+ self._release(f"pid-{os.getpid()}")
154
+
155
+ @property
156
+ def path(self) -> pathlib.Path:
157
+ return self._path
158
+
159
+ async def map(self, ref: str | None = None):
160
+ if ref is not None and ref not in self._objects:
161
+ if self._locked(f"delete-{ref}"):
162
+ raise RuntimeError(
163
+ f"Reference {ref} is currently locked for deletion"
164
+ )
165
+ async with self._reference_lock(ref):
166
+ self._map(ref)
167
+ else:
168
+ for entry in os.scandir(self._path):
169
+ if entry.is_dir() and (ref := entry.name) != "locks":
170
+ if not self._locked(f"delete-{ref}"):
171
+ async with self._reference_lock(ref):
172
+ self._map(ref)
173
+
174
+ async def put(
175
+ self, dump: bytes, *, mutable: bool = False, ref: str | None = None
176
+ ) -> str:
177
+ ref = ref or str(shortuuid.uuid())
178
+ async with self._reference_lock(ref):
179
+ self._put(ref, dump, mutable=mutable, exist_ok=False)
180
+ return ref
181
+
182
+ async def post(self, ref: str, dump: bytes) -> bool:
183
+ if self._locked(f"delete-{ref}"):
184
+ raise RuntimeError(
185
+ f"Reference {ref} is currently locked for deletion"
186
+ )
187
+ async with self._reference_lock(ref):
188
+ if ref not in self._objects:
189
+ self._map(ref)
190
+ obj = self._objects[ref]
191
+ if not obj.metadata.mutable:
192
+ raise ValueError("Cannot modify an immutable reference")
193
+ if (size := len(dump)) != obj.metadata.size:
194
+ try:
195
+ obj.mmap.resize(size)
196
+ self._post(ref, obj, dump)
197
+ except SystemError:
198
+ self._put(ref, dump, mutable=True, exist_ok=True)
199
+ return True
200
+ elif hashlib.md5(dump).digest() != obj.metadata.md5:
201
+ self._post(ref, obj, dump)
202
+ return True
203
+ else:
204
+ return False
205
+
206
+ async def get(self, ref: str) -> bytes:
207
+ if self._locked(f"delete-{ref}"):
208
+ raise RuntimeError(
209
+ f"Reference {ref} is currently locked for deletion"
210
+ )
211
+ async with self._reference_lock(ref):
212
+ if ref not in self._objects:
213
+ self._map(ref)
214
+ return bytes(self._objects[ref].refresh().mmap)
215
+
216
+ async def delete(self, ref: str):
217
+ async with self._delete_lock(ref):
218
+ async with self._reference_lock(ref):
219
+ if ref not in self._objects:
220
+ self._map(ref)
221
+ self._objects.pop(ref).close()
222
+ try:
223
+ shutil.rmtree(self.path / ref)
224
+ except FileNotFoundError:
225
+ pass
226
+
227
+ def _put(
228
+ self,
229
+ ref: str,
230
+ dump: bytes,
231
+ *,
232
+ mutable: bool = False,
233
+ exist_ok: bool = False,
234
+ ):
235
+ metadata = MetadataMessage(
236
+ ref=ref,
237
+ mutable=mutable,
238
+ size=len(dump),
239
+ md5=hashlib.md5(dump).digest(),
240
+ )
241
+
242
+ refpath = pathlib.Path(self._path, f"{ref}")
243
+ os.makedirs(refpath, exist_ok=exist_ok)
244
+
245
+ with open(refpath / "meta", "wb") as metafile:
246
+ metafile.write(metadata.dumps())
247
+
248
+ with open(refpath / "dump", "wb") as dumpfile:
249
+ dumpfile.write(dump)
250
+
251
+ self._map(ref)
252
+
253
+ def _post(self, ref: str, obj: SharedObject, dump: bytes):
254
+ if not obj.metadata.mutable:
255
+ raise ValueError("Cannot modify an immutable reference")
256
+ metadata = MetadataMessage(
257
+ ref=ref,
258
+ mutable=True,
259
+ size=len(dump),
260
+ md5=hashlib.md5(dump).digest(),
261
+ )
262
+ obj.metadata.mmap[:] = metadata.dumps()
263
+ obj.metadata.mmap.flush()
264
+ obj.mmap.seek(0)
265
+ obj.mmap.write(dump)
266
+
267
+ def _map(self, ref: str):
268
+ obj = self._objects.pop(ref, None)
269
+ if obj:
270
+ obj.close()
271
+ self._objects[ref] = SharedObject(id=ref, mempool=self)
272
+
273
+ def _lockpath(self, key: str) -> pathlib.Path:
274
+ return pathlib.Path(self._lockdir, f"{key}.lock")
275
+
276
+ def _acquire(self, key: str) -> bool:
277
+ try:
278
+ os.symlink(f"{key}", self._lockpath(key))
279
+ return True
280
+ except FileExistsError:
281
+ return False
282
+
283
+ def _release(self, key: str):
284
+ try:
285
+ if os.path.islink(lock_path := self._lockpath(key)):
286
+ os.unlink(lock_path)
287
+ except FileNotFoundError:
288
+ pass
289
+
290
+ def _locked(self, key: str) -> bool:
291
+ return os.path.islink(self._lockpath(key))
292
+
293
+ @asynccontextmanager
294
+ async def _reference_lock(self, ref: str):
295
+ try:
296
+ while not self._acquire(ref):
297
+ await asyncio.sleep(0)
298
+ yield
299
+ finally:
300
+ self._release(ref)
301
+
302
+ @asynccontextmanager
303
+ async def _delete_lock(self, ref: str):
304
+ key = f"delete-{ref}"
305
+ if not self._acquire(f"delete-{ref}"):
306
+ raise RuntimeError(
307
+ f"Reference {ref} is currently locked for deletion"
308
+ )
309
+ else:
310
+ yield
311
+ self._release(key)
@@ -4,9 +4,7 @@ import logging
4
4
  from dataclasses import dataclass
5
5
 
6
6
  try:
7
- from wool._protobuf._mempool._metadata._metadata_pb2 import (
8
- _MetadataMessage,
9
- )
7
+ from wool._protobuf.mempool.metadata.metadata_pb2 import _MetadataMessage
10
8
  except ImportError:
11
9
  logging.error(
12
10
  "Failed to import _MetadataMessage. "
@@ -0,0 +1,225 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from typing import AsyncGenerator
5
+ from typing import Final
6
+ from weakref import WeakSet
7
+ from weakref import WeakValueDictionary
8
+
9
+ import shortuuid
10
+ from grpc.aio import ServicerContext
11
+
12
+ from wool._mempool import MemoryPool
13
+ from wool._protobuf.mempool import mempool_pb2 as proto
14
+ from wool._protobuf.mempool import mempool_pb2_grpc as rpc
15
+
16
+
17
+ class Session:
18
+ """
19
+ A session represents a client connection to the memory pool service and
20
+ serves as the scope for any shared references acquired over its duration.
21
+ """
22
+
23
+ id: Final[str]
24
+ queue: Final[asyncio.Queue[proto.SessionResponse]]
25
+ references: Final[set[Reference]]
26
+ sessions: Final[WeakValueDictionary[str, Session]] = WeakValueDictionary()
27
+
28
+ @classmethod
29
+ def get(cls, id: str) -> Session | None:
30
+ return cls.sessions.get(id)
31
+
32
+ def __init__(self):
33
+ self.id = shortuuid.uuid()
34
+ self.queue = asyncio.Queue()
35
+ self.references = set()
36
+ self.sessions[self.id] = self
37
+
38
+ def __eq__(self, other) -> bool:
39
+ if isinstance(other, Session):
40
+ return self.id == other.id
41
+ return False
42
+
43
+ def __hash__(self) -> int:
44
+ return hash(self.id)
45
+
46
+
47
+ class Reference:
48
+ id: Final[str]
49
+ mempool: Final[MemoryPool]
50
+ sessions: Final[WeakSet[Session]]
51
+
52
+ _references: Final[WeakValueDictionary[str, Reference]] = (
53
+ WeakValueDictionary()
54
+ )
55
+ _to_delete: Final[set[str]] = set()
56
+ _initialized: bool = False
57
+
58
+ @classmethod
59
+ def get(cls, id: str) -> Reference | None:
60
+ return cls._references.get(id)
61
+
62
+ @classmethod
63
+ def new(cls, id: str, *, mempool: MemoryPool) -> Reference:
64
+ if id in cls._references:
65
+ raise ValueError(f"Reference {id} already exists")
66
+ return cls(id, mempool=mempool)
67
+
68
+ def __new__(cls, id: str, *, mempool: MemoryPool):
69
+ if id in cls._references:
70
+ if id in cls._to_delete:
71
+ cls._to_delete.remove(id)
72
+ return cls._references[id]
73
+ return super().__new__(cls)
74
+
75
+ def __init__(self, id: str, *, mempool: MemoryPool):
76
+ if not self._initialized:
77
+ self.id = id
78
+ self.mempool = mempool
79
+ self.sessions = WeakSet()
80
+ self._references[id] = self
81
+ self._initialized = True
82
+
83
+ def __eq__(self, other) -> bool:
84
+ if isinstance(other, Reference):
85
+ return self.id == other.id
86
+ return False
87
+
88
+ def __hash__(self) -> int:
89
+ return hash(self.id)
90
+
91
+ def __del__(self):
92
+ self._to_delete.add(self.id)
93
+
94
+ id = self.id
95
+ to_delete = self._to_delete
96
+ mempool = self.mempool
97
+
98
+ async def _delete():
99
+ if id in to_delete:
100
+ try:
101
+ to_delete.remove(id)
102
+ await mempool.delete(id)
103
+ except FileNotFoundError:
104
+ pass
105
+
106
+ try:
107
+ asyncio.get_running_loop().create_task(_delete())
108
+ except RuntimeError:
109
+ asyncio.new_event_loop().run_until_complete(_delete())
110
+
111
+
112
+ class MemoryPoolService(rpc.MemoryPoolServicer):
113
+ def __init__(self, mempool: MemoryPool | None = None):
114
+ self._mempool = mempool or MemoryPool()
115
+ self._shutdown = asyncio.Event()
116
+
117
+ async def session(
118
+ self, request: proto.SessionRequest, context: ServicerContext
119
+ ) -> AsyncGenerator[proto.SessionResponse]:
120
+ session = Session()
121
+ yield proto.SessionResponse(session=proto.Session(id=session.id))
122
+ while True:
123
+ yield await session.queue.get()
124
+
125
+ async def acquire(
126
+ self, request: proto.AcquireRequest, context: ServicerContext
127
+ ) -> proto.AcquireResponse:
128
+ if not (session := Session.get(request.session.id)):
129
+ raise ValueError(f"Session {request.session.id} not found")
130
+ if not (reference := Reference.get(request.reference.id)):
131
+ raise ValueError(f"Reference {request.reference.id} not found")
132
+ session.references.add(reference)
133
+ reference.sessions.add(session)
134
+ return proto.AcquireResponse()
135
+
136
+ async def map(
137
+ self, request: proto.AcquireRequest, context: ServicerContext
138
+ ) -> proto.AcquireResponse:
139
+ if not (session := Session.get(request.session.id)):
140
+ raise ValueError(f"Session {request.session.id} not found")
141
+ await self._mempool.map(request.reference.id)
142
+ reference = Reference(request.reference.id, mempool=self._mempool)
143
+ await self.acquire(
144
+ proto.AcquireRequest(
145
+ session=proto.Session(id=session.id),
146
+ reference=proto.Reference(id=reference.id),
147
+ ),
148
+ context,
149
+ )
150
+ return proto.AcquireResponse()
151
+
152
+ async def put(
153
+ self, request: proto.PutRequest, context: ServicerContext
154
+ ) -> proto.PutResponse:
155
+ if not (session := Session.get(request.session.id)):
156
+ raise ValueError(f"Session {request.session.id} not found")
157
+ reference = Reference(
158
+ id=await self._mempool.put(request.dump, mutable=request.mutable),
159
+ mempool=self._mempool,
160
+ )
161
+ await self.acquire(
162
+ proto.AcquireRequest(
163
+ session=proto.Session(id=session.id),
164
+ reference=proto.Reference(id=reference.id),
165
+ ),
166
+ context,
167
+ )
168
+ return proto.PutResponse(reference=proto.Reference(id=reference.id))
169
+
170
+ async def get(
171
+ self, request: proto.GetRequest, context: ServicerContext
172
+ ) -> proto.GetResponse:
173
+ if not (session := Session.get(request.session.id)):
174
+ raise ValueError(f"Session {request.session.id} not found")
175
+ if not (reference := Reference.get(request.reference.id)):
176
+ raise ValueError(f"Reference {request.reference.id} not found")
177
+ if reference not in session.references:
178
+ await self.acquire(
179
+ proto.AcquireRequest(
180
+ session=proto.Session(id=session.id),
181
+ reference=proto.Reference(id=reference.id),
182
+ ),
183
+ context,
184
+ )
185
+ dump = await self._mempool.get(reference.id)
186
+ return proto.GetResponse(dump=dump)
187
+
188
+ async def post(
189
+ self, request: proto.PostRequest, context: ServicerContext
190
+ ) -> proto.PostResponse:
191
+ if not (session := Session.get(request.session.id)):
192
+ raise ValueError(f"Session {request.session.id} not found")
193
+ if not (reference := Reference.get(request.reference.id)):
194
+ raise ValueError(f"Reference {request.reference.id} not found")
195
+ if reference not in session.references:
196
+ await self.acquire(
197
+ proto.AcquireRequest(
198
+ session=proto.Session(id=session.id),
199
+ reference=proto.Reference(id=reference.id),
200
+ ),
201
+ context,
202
+ )
203
+ updated = await self._mempool.post(request.reference.id, request.dump)
204
+ if updated:
205
+ for session in Reference(
206
+ id=request.reference.id, mempool=self._mempool
207
+ ).sessions:
208
+ if session.id is not request.session.id:
209
+ event = proto.Event(
210
+ reference=request.reference,
211
+ event_type="post",
212
+ )
213
+ await session.queue.put(proto.SessionResponse(event=event))
214
+ return proto.PostResponse(updated=updated)
215
+
216
+ async def release(
217
+ self, request: proto.ReleaseRequest, context: ServicerContext
218
+ ) -> proto.ReleaseResponse:
219
+ if not (session := Session.get(request.session.id)):
220
+ raise ValueError(f"Session {request.session.id} not found")
221
+ if not (reference := Reference.get(request.reference.id)):
222
+ raise ValueError(f"Reference {request.reference.id} not found")
223
+ session.references.remove(reference)
224
+ reference.sessions.remove(session)
225
+ return proto.ReleaseResponse()
@@ -0,0 +1,4 @@
1
+ import os
2
+ import sys
3
+
4
+ sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))