ommlds 0.0.0.dev499__py3-none-any.whl → 0.0.0.dev503__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. ommlds/.omlish-manifests.json +20 -9
  2. ommlds/__about__.py +1 -1
  3. ommlds/backends/anthropic/protocol/sse/events.py +2 -0
  4. ommlds/backends/groq/clients.py +9 -0
  5. ommlds/cli/_dataclasses.py +22 -72
  6. ommlds/cli/backends/inject.py +20 -0
  7. ommlds/cli/backends/meta.py +47 -0
  8. ommlds/cli/sessions/chat/drivers/ai/tools.py +3 -7
  9. ommlds/cli/sessions/chat/facades/commands/base.py +1 -1
  10. ommlds/cli/sessions/chat/interfaces/textual/app.py +1 -1
  11. ommlds/minichain/__init__.py +47 -6
  12. ommlds/minichain/_dataclasses.py +533 -132
  13. ommlds/minichain/backends/impls/anthropic/names.py +3 -3
  14. ommlds/minichain/backends/impls/anthropic/stream.py +1 -1
  15. ommlds/minichain/backends/impls/cerebras/names.py +15 -0
  16. ommlds/minichain/backends/impls/cerebras/stream.py +1 -1
  17. ommlds/minichain/backends/impls/google/names.py +6 -0
  18. ommlds/minichain/backends/impls/google/stream.py +1 -1
  19. ommlds/minichain/backends/impls/groq/chat.py +2 -0
  20. ommlds/minichain/backends/impls/groq/stream.py +3 -1
  21. ommlds/minichain/backends/impls/ollama/chat.py +1 -1
  22. ommlds/minichain/backends/impls/openai/format.py +2 -1
  23. ommlds/minichain/backends/impls/openai/stream.py +33 -1
  24. ommlds/minichain/chat/messages.py +1 -1
  25. ommlds/minichain/chat/stream/joining.py +36 -12
  26. ommlds/minichain/chat/transforms/metadata.py +3 -3
  27. ommlds/minichain/content/standard.py +1 -1
  28. ommlds/minichain/content/transform/json.py +1 -1
  29. ommlds/minichain/content/transform/metadata.py +1 -1
  30. ommlds/minichain/content/transform/standard.py +2 -2
  31. ommlds/minichain/content/transform/strings.py +1 -1
  32. ommlds/minichain/content/transform/templates.py +1 -1
  33. ommlds/minichain/metadata.py +13 -16
  34. ommlds/minichain/resources.py +22 -1
  35. ommlds/minichain/services/README.md +154 -0
  36. ommlds/minichain/services/__init__.py +6 -2
  37. ommlds/minichain/services/_marshal.py +46 -10
  38. ommlds/minichain/services/_origclasses.py +11 -0
  39. ommlds/minichain/services/_typedvalues.py +8 -3
  40. ommlds/minichain/services/requests.py +73 -3
  41. ommlds/minichain/services/responses.py +73 -3
  42. ommlds/minichain/services/services.py +9 -0
  43. ommlds/minichain/stream/services.py +24 -1
  44. ommlds/minichain/tools/reflect.py +3 -3
  45. ommlds/minichain/wrappers/firstinwins.py +29 -2
  46. ommlds/minichain/wrappers/instrument.py +146 -0
  47. ommlds/minichain/wrappers/retry.py +93 -3
  48. ommlds/minichain/wrappers/services.py +26 -0
  49. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/METADATA +6 -6
  50. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/RECORD +54 -52
  51. ommlds/minichain/stream/wrap.py +0 -62
  52. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/WHEEL +0 -0
  53. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/entry_points.txt +0 -0
  54. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/licenses/LICENSE +0 -0
  55. {ommlds-0.0.0.dev499.dist-info → ommlds-0.0.0.dev503.dist-info}/top_level.txt +0 -0
@@ -14,9 +14,9 @@ from ...strings.manifests import BackendStringsManifest
14
14
  MODEL_NAMES = ModelNameCollection(
15
15
  default='claude',
16
16
  aliases={
17
- 'claude-opus-4-1-20250805': None,
18
- 'claude-opus-4-1': 'claude-opus-4-1-20250805',
19
- 'claude-opus': 'claude-opus-4-1',
17
+ 'claude-opus-4-5-20251101': None,
18
+ 'claude-opus-4-5': 'claude-opus-4-5-20251101',
19
+ 'claude-opus': 'claude-opus-4-5',
20
20
 
21
21
  'claude-sonnet-4-5-20250929': None,
22
22
  'claude-sonnet-4-5': 'claude-sonnet-4-5-20250929',
@@ -105,7 +105,7 @@ class AnthropicChatChoicesStreamService:
105
105
  for l in db.feed(b):
106
106
  if isinstance(l, DelimitingBuffer.Incomplete):
107
107
  # FIXME: handle
108
- return []
108
+ raise TypeError(l)
109
109
 
110
110
  # FIXME: https://docs.anthropic.com/en/docs/build-with-claude/streaming
111
111
  for so in sd.process_line(l):
@@ -12,9 +12,24 @@ MODEL_NAMES = ModelNameCollection(
12
12
  default='gpt-oss-120b',
13
13
  aliases={
14
14
  'llama3.1-8b': None,
15
+
15
16
  'llama-3.3-70b': None,
17
+ 'llama3': 'llama-3.3-70b',
18
+
16
19
  'gpt-oss-120b': None,
20
+ 'gpt-oss': 'gpt-oss-120b',
21
+
17
22
  'qwen-3-32b': None,
23
+ 'qwen3': 'qwen-3-32b',
24
+
25
+ ##
26
+ # preview
27
+
28
+ 'qwen-3-235b-a22b-instruct-2507': None,
29
+ 'qwen-3-235b': 'qwen-3-235b-a22b-instruct-2507',
30
+
31
+ 'zai-glm-4.7': None,
32
+ 'glm': 'zai-glm-4.7',
18
33
  },
19
34
  )
20
35
 
@@ -88,7 +88,7 @@ class CerebrasChatChoicesStreamService:
88
88
  for l in db.feed(b):
89
89
  if isinstance(l, DelimitingBuffer.Incomplete):
90
90
  # FIXME: handle
91
- return []
91
+ raise TypeError(l)
92
92
 
93
93
  # FIXME: https://platform.openai.com/docs/guides/function-calling?api-mode=responses#streaming
94
94
  for so in sd.process_line(l):
@@ -16,6 +16,12 @@ from ...strings.manifests import BackendStringsManifest
16
16
  MODEL_NAMES = ModelNameCollection(
17
17
  default='gemini',
18
18
  aliases={
19
+ 'gemini-3-pro-preview': None,
20
+ 'gemini-3-pro': 'gemini-3-pro-preview',
21
+
22
+ 'gemini-3-flash-preview': None,
23
+ 'gemini-3-flash': 'gemini-3-flash-preview',
24
+
19
25
  'gemini-2.5-pro': None,
20
26
  'gemini-2.5-flash': None,
21
27
  'gemini-2.5-flash-lite': None,
@@ -179,7 +179,7 @@ class GoogleChatChoicesStreamService:
179
179
  for bl in db.feed(b):
180
180
  if isinstance(bl, DelimitingBuffer.Incomplete):
181
181
  # FIXME: handle
182
- return []
182
+ raise TypeError(bl)
183
183
 
184
184
  l = bl.decode('utf-8')
185
185
  if not l:
@@ -7,6 +7,7 @@ from omlish.formats import json
7
7
  from omlish.http import all as http
8
8
 
9
9
  from .....backends.groq import protocol as pt
10
+ from .....backends.groq.clients import REQUIRED_HTTP_HEADERS
10
11
  from ....chat.choices.services import ChatChoicesRequest
11
12
  from ....chat.choices.services import ChatChoicesResponse
12
13
  from ....chat.choices.services import static_check_is_chat_choices_service
@@ -65,6 +66,7 @@ class GroqChatChoicesService:
65
66
  headers={
66
67
  http.consts.HEADER_CONTENT_TYPE: http.consts.CONTENT_TYPE_JSON,
67
68
  http.consts.HEADER_AUTH: http.consts.format_bearer_auth_header(check.not_none(self._api_key).reveal()),
69
+ **REQUIRED_HTTP_HEADERS,
68
70
  },
69
71
  data=json.dumps(raw_request).encode('utf-8'),
70
72
  client=self._http_client,
@@ -9,6 +9,7 @@ from omlish.http import sse
9
9
  from omlish.io.buffers import DelimitingBuffer
10
10
 
11
11
  from .....backends.groq import protocol as pt
12
+ from .....backends.groq.clients import REQUIRED_HTTP_HEADERS
12
13
  from ....chat.choices.services import ChatChoicesOutputs
13
14
  from ....chat.choices.stream.services import ChatChoicesStreamRequest
14
15
  from ....chat.choices.stream.services import ChatChoicesStreamResponse
@@ -72,6 +73,7 @@ class GroqChatChoicesStreamService:
72
73
  headers={
73
74
  http.consts.HEADER_CONTENT_TYPE: http.consts.CONTENT_TYPE_JSON,
74
75
  http.consts.HEADER_AUTH: http.consts.format_bearer_auth_header(check.not_none(self._api_key).reveal()),
76
+ **REQUIRED_HTTP_HEADERS,
75
77
  },
76
78
  data=json.dumps(raw_request).encode('utf-8'),
77
79
  )
@@ -88,7 +90,7 @@ class GroqChatChoicesStreamService:
88
90
  for l in db.feed(b):
89
91
  if isinstance(l, DelimitingBuffer.Incomplete):
90
92
  # FIXME: handle
91
- return []
93
+ raise TypeError(l)
92
94
 
93
95
  # FIXME: https://platform.openai.com/docs/guides/function-calling?api-mode=responses#streaming
94
96
  for so in sd.process_line(l):
@@ -179,7 +179,7 @@ class OllamaChatChoicesStreamService(BaseOllamaChatChoicesService):
179
179
  for l in db.feed(b):
180
180
  if isinstance(l, DelimitingBuffer.Incomplete):
181
181
  # FIXME: handle
182
- return []
182
+ raise TypeError(l)
183
183
 
184
184
  lj = json.loads(l.decode('utf-8'))
185
185
  lp: pt.ChatResponse = msh.unmarshal(lj, pt.ChatResponse)
@@ -145,7 +145,8 @@ def build_mc_ai_delta(delta: pt.ChatCompletionChunkChoiceDelta) -> AiDelta:
145
145
  )
146
146
 
147
147
  else:
148
- raise ValueError(delta)
148
+ # FIXME: no
149
+ return ContentAiDelta('')
149
150
 
150
151
 
151
152
  ##
@@ -4,6 +4,7 @@ https://platform.openai.com/docs/api-reference/responses-streaming
4
4
  import typing as ta
5
5
 
6
6
  from omlish import check
7
+ from omlish import dataclasses as dc
7
8
  from omlish import marshal as msh
8
9
  from omlish import typedvalues as tv
9
10
  from omlish.formats import json
@@ -35,6 +36,12 @@ from .names import CHAT_MODEL_NAMES
35
36
  ##
36
37
 
37
38
 
39
+ @dc.dataclass()
40
+ class OpenaiChatChoicesStreamServiceError(Exception):
41
+ status: int
42
+ data: ta.Any | None = None
43
+
44
+
38
45
  # @omlish-manifest $.minichain.registries.manifests.RegistryManifest(
39
46
  # name='openai',
40
47
  # type='ChatChoicesStreamService',
@@ -90,18 +97,43 @@ class OpenaiChatChoicesStreamService:
90
97
  http_client = await rs.enter_async_context(http.manage_async_client(self._http_client))
91
98
  http_response = await rs.enter_async_context(await http_client.stream_request(http_request))
92
99
 
100
+ if http_response.status != 200:
101
+ data: ta.Any
102
+ try:
103
+ data = await http_response.stream.readall()
104
+ except Exception as e: # noqa
105
+ data = e
106
+ try:
107
+ data_obj = json.loads(data.decode())
108
+ except Exception as e: # noqa
109
+ pass
110
+ else:
111
+ data = data_obj
112
+ raise OpenaiChatChoicesStreamServiceError(http_response.status, data)
113
+
93
114
  async def inner(sink: StreamResponseSink[AiChoicesDeltas]) -> ta.Sequence[ChatChoicesOutputs]:
94
115
  db = DelimitingBuffer([b'\r', b'\n', b'\r\n'])
95
116
  sd = sse.SseDecoder()
117
+
118
+ # bs = []
119
+ # ls = []
120
+ # sos = []
121
+
96
122
  while True:
97
123
  b = await http_response.stream.read1(self.READ_CHUNK_SIZE)
124
+ # bs.append(b)
125
+
98
126
  for l in db.feed(b):
127
+ # ls.append(l)
128
+
99
129
  if isinstance(l, DelimitingBuffer.Incomplete):
100
130
  # FIXME: handle
101
- return []
131
+ raise TypeError(l)
102
132
 
103
133
  # FIXME: https://platform.openai.com/docs/guides/function-calling?api-mode=responses#streaming
104
134
  for so in sd.process_line(l):
135
+ # sos.append(so)
136
+
105
137
  if isinstance(so, sse.SseEvent) and so.type == b'message':
106
138
  ss = so.data.decode('utf-8')
107
139
  if ss == '[DONE]':
@@ -41,7 +41,7 @@ class Message( # noqa
41
41
  def replace(self, **kwargs: ta.Any) -> ta.Self:
42
42
  if (n := dc.replace_is_not(self, **kwargs)) is self:
43
43
  return self
44
- return n.update_metadata(MessageOriginal(self), discard=[MessageOriginal], override=True)
44
+ return n.with_metadata(MessageOriginal(self), discard=[MessageOriginal], override=True)
45
45
 
46
46
 
47
47
  Chat: ta.TypeAlias = ta.Sequence[Message]
@@ -22,8 +22,9 @@ class AiDeltaJoiner:
22
22
  def __init__(self) -> None:
23
23
  super().__init__()
24
24
 
25
- self._deltas: list[AiDelta] = []
26
- self._messages: list[AnyAiMessage] = []
25
+ self._all: list[AiDelta] = []
26
+ self._queue: list[AiDelta] = []
27
+ self._out: list[AnyAiMessage] = []
27
28
 
28
29
  def _build_joined(self, deltas: ta.Sequence[AiDelta]) -> AnyAiMessage:
29
30
  dty = check.single(set(map(type, check.not_empty(deltas))))
@@ -43,6 +44,9 @@ class AiDeltaJoiner:
43
44
 
44
45
  ra = ''.join(filter(None, (td.raw_args for td in tds)))
45
46
 
47
+ if not ra:
48
+ ra = '{}'
49
+
46
50
  return ToolUseMessage(ToolUse(
47
51
  id=tds[0].id,
48
52
  name=check.non_empty_str(tds[0].name),
@@ -53,19 +57,39 @@ class AiDeltaJoiner:
53
57
  else:
54
58
  raise TypeError(dty)
55
59
 
56
- def _maybe_join(self) -> None:
57
- if not self._deltas:
60
+ def _join(self) -> None:
61
+ if not self._queue:
58
62
  return
59
63
 
60
- self._messages.append(self._build_joined(self._deltas))
61
- self._deltas.clear()
64
+ self._out.append(self._build_joined(self._queue))
65
+ self._queue.clear()
66
+
67
+ def _should_join(self, *, new: AiDelta | None = None) -> bool:
68
+ if not self._queue:
69
+ return False
70
+
71
+ if new is not None and type(self._queue[0]) is not type(new):
72
+ return True
73
+
74
+ if (
75
+ isinstance(d0 := self._queue[0], PartialToolUseAiDelta) and
76
+ isinstance(new, PartialToolUseAiDelta) and
77
+ d0.id is not None and
78
+ new.id is not None and
79
+ d0.id != new.id
80
+ ):
81
+ return True
82
+
83
+ return False
62
84
 
63
85
  def _add_one(self, d: AiDelta) -> None:
64
- if self._deltas and type(self._deltas[0]) is not type(d):
65
- self._maybe_join()
86
+ if self._should_join(new=d):
87
+ self._join()
88
+
89
+ self._all.append(d)
66
90
 
67
91
  if isinstance(d, ToolUseAiDelta):
68
- self._messages.append(ToolUseMessage(ToolUse(
92
+ self._out.append(ToolUseMessage(ToolUse(
69
93
  id=d.id,
70
94
  name=check.not_none(d.name),
71
95
  args=d.args or {},
@@ -73,13 +97,13 @@ class AiDeltaJoiner:
73
97
  )))
74
98
 
75
99
  else:
76
- self._deltas.append(d)
100
+ self._queue.append(d)
77
101
 
78
102
  def add(self, deltas: AiDeltas) -> None:
79
103
  for d in deltas:
80
104
  self._add_one(d)
81
105
 
82
106
  def build(self) -> AiChat:
83
- self._maybe_join()
107
+ self._join()
84
108
 
85
- return list(self._messages)
109
+ return list(self._out)
@@ -26,7 +26,7 @@ class UuidAddingMessageTransform(MessageTransform):
26
26
 
27
27
  def transform_message(self, m: Message) -> Chat:
28
28
  if Uuid not in m.metadata:
29
- m = m.update_metadata(Uuid(self.uuid_factory()))
29
+ m = m.with_metadata(Uuid(self.uuid_factory()))
30
30
  return [m]
31
31
 
32
32
 
@@ -36,7 +36,7 @@ class CreatedAtAddingMessageTransform(MessageTransform):
36
36
 
37
37
  def transform_message(self, m: Message) -> Chat:
38
38
  if CreatedAt not in m.metadata:
39
- m = m.update_metadata(CreatedAt(self.clock()))
39
+ m = m.with_metadata(CreatedAt(self.clock()))
40
40
  return [m]
41
41
 
42
42
 
@@ -54,6 +54,6 @@ class OriginAddingMessageTransform(MessageTransform):
54
54
 
55
55
  def transform_message(self, m: Message) -> Chat:
56
56
  return [
57
- o.update_metadata(TransformedMessageOrigin(m)) if TransformedMessageOrigin not in o.metadata else m
57
+ o.with_metadata(TransformedMessageOrigin(m)) if TransformedMessageOrigin not in o.metadata else m
58
58
  for o in self.child.transform_message(m)
59
59
  ]
@@ -29,4 +29,4 @@ class StandardContent( # noqa
29
29
  def replace(self, **kwargs: ta.Any) -> ta.Self:
30
30
  if (n := dc.replace_is_not(self, **kwargs)) is self:
31
31
  return self
32
- return n.update_metadata(ContentOriginal(self), discard=[ContentOriginal], override=True)
32
+ return n.with_metadata(ContentOriginal(self), discard=[ContentOriginal], override=True)
@@ -52,4 +52,4 @@ class JsonContentRenderer(ContentTransform[None]):
52
52
  case _:
53
53
  raise ValueError(self._code)
54
54
 
55
- return nc.update_metadata(ContentOriginal(c))
55
+ return nc.with_metadata(ContentOriginal(c))
@@ -9,7 +9,7 @@ from ..visitors import ContentTransform
9
9
 
10
10
  class OriginalMetadataStrippingContentTransform(ContentTransform[None]):
11
11
  def visit_standard_content(self, c: StandardContent, ctx: None) -> StandardContent:
12
- return c.discard_metadata(ContentOriginal)
12
+ return c.with_metadata(discard=[ContentOriginal])
13
13
 
14
14
 
15
15
  def strip_content_original_metadata(c: Content) -> Content:
@@ -26,7 +26,7 @@ class LiftToStandardContentTransform(ContentTransform[None]):
26
26
  self._sequence_mode = sequence_mode
27
27
 
28
28
  def visit_str(self, s: str, ctx: None) -> Content:
29
- return TextContent(s).update_metadata(ContentOriginal(s))
29
+ return TextContent(s).with_metadata(ContentOriginal(s))
30
30
 
31
31
  def visit_sequence(self, c: ta.Sequence[Content], ctx: None) -> Content:
32
32
  cc = check.isinstance(super().visit_sequence(c, ctx), collections.abc.Sequence)
@@ -40,4 +40,4 @@ class LiftToStandardContentTransform(ContentTransform[None]):
40
40
  case _:
41
41
  raise ValueError(self._sequence_mode)
42
42
 
43
- return nc.update_metadata(ContentOriginal(c))
43
+ return nc.with_metadata(ContentOriginal(c))
@@ -20,7 +20,7 @@ class StringFnContentTransform(ContentTransform[None]):
20
20
  fn: ta.Callable[[str], str]
21
21
 
22
22
  def visit_str(self, c: str, ctx: None) -> TextContent:
23
- return TextContent(self.fn(c)).update_metadata(ContentOriginal(c))
23
+ return TextContent(self.fn(c)).with_metadata(ContentOriginal(c))
24
24
 
25
25
  def visit_text_content(self, c: TextContent, ctx: None) -> TextContent:
26
26
  return c.replace(s=self.fn(c.s))
@@ -22,4 +22,4 @@ class TemplateContentMaterializer(ContentTransform[None]):
22
22
 
23
23
  def visit_template_content(self, c: TemplateContent, ctx: None) -> Content:
24
24
  s = c.t.render(check.not_none(self._templater_context))
25
- return TextContent(s).update_metadata(ContentOriginal(c))
25
+ return TextContent(s).with_metadata(ContentOriginal(c))
@@ -25,9 +25,9 @@ MetadataT = ta.TypeVar('MetadataT', bound=Metadata)
25
25
 
26
26
 
27
27
  class MetadataContainer(
28
- tv.TypedValueGeneric[MetadataT],
29
28
  lang.Abstract,
30
29
  lang.PackageSealed,
30
+ ta.Generic[MetadataT],
31
31
  ):
32
32
  @property
33
33
  @abc.abstractmethod
@@ -35,7 +35,12 @@ class MetadataContainer(
35
35
  raise NotImplementedError
36
36
 
37
37
  @abc.abstractmethod
38
- def update_metadata(self, *mds: MetadataT, override: bool = False) -> ta.Self:
38
+ def with_metadata(
39
+ self,
40
+ *add: MetadataT,
41
+ discard: ta.Iterable[type] | None = None,
42
+ override: bool = False,
43
+ ) -> ta.Self:
39
44
  raise NotImplementedError
40
45
 
41
46
 
@@ -66,30 +71,22 @@ class MetadataContainerDataclass( # noqa
66
71
  def metadata(self) -> tv.TypedValues[MetadataT]:
67
72
  return check.isinstance(getattr(self, '_metadata'), tv.TypedValues)
68
73
 
69
- def discard_metadata(self, *tys: type) -> ta.Self:
70
- nmd = (md := self.metadata).discard(*tys)
71
-
72
- if nmd is md:
73
- return self
74
-
75
- return dc.replace(self, _metadata=nmd) # type: ignore[call-arg] # noqa
76
-
77
- def update_metadata(
74
+ def with_metadata(
78
75
  self,
79
- *mds: MetadataT,
76
+ *add: MetadataT,
80
77
  discard: ta.Iterable[type] | None = None,
81
78
  override: bool = False,
82
79
  ) -> ta.Self:
83
- nmd = (md := self.metadata).update(
84
- *mds,
80
+ new = (old := self.metadata).update(
81
+ *add,
85
82
  discard=discard,
86
83
  override=override,
87
84
  )
88
85
 
89
- if nmd is md:
86
+ if new is old:
90
87
  return self
91
88
 
92
- return dc.replace(self, _metadata=nmd) # type: ignore[call-arg] # noqa
89
+ return dc.replace(self, _metadata=new) # type: ignore[call-arg] # noqa
93
90
 
94
91
 
95
92
  ##
@@ -33,6 +33,10 @@ class ResourcesRefNotRegisteredError(Exception):
33
33
 
34
34
  @ta.final
35
35
  class Resources(lang.Final, lang.NotPicklable):
36
+ """
37
+ Essentially a reference-tracked AsyncContextManager.
38
+ """
39
+
36
40
  def __init__(
37
41
  self,
38
42
  *,
@@ -80,10 +84,14 @@ class Resources(lang.Final, lang.NotPicklable):
80
84
  @contextlib.asynccontextmanager
81
85
  async def inner():
82
86
  init_ref = Resources._InitRef()
87
+
83
88
  res = Resources(init_ref=init_ref, **kwargs)
89
+
84
90
  await res.init()
91
+
85
92
  try:
86
93
  yield res
94
+
87
95
  finally:
88
96
  await res.remove_ref(init_ref)
89
97
 
@@ -94,6 +102,7 @@ class Resources(lang.Final, lang.NotPicklable):
94
102
  def add_ref(self, ref: ResourcesRef) -> None:
95
103
  check.isinstance(ref, ResourcesRef)
96
104
  check.state(not self._closed)
105
+
97
106
  self._refs.add(ref)
98
107
 
99
108
  def has_ref(self, ref: ResourcesRef) -> bool:
@@ -101,10 +110,13 @@ class Resources(lang.Final, lang.NotPicklable):
101
110
 
102
111
  async def remove_ref(self, ref: ResourcesRef) -> None:
103
112
  check.isinstance(ref, ResourcesRef)
113
+
104
114
  try:
105
115
  self._refs.remove(ref)
116
+
106
117
  except KeyError:
107
118
  raise ResourcesRefNotRegisteredError(ref) from None
119
+
108
120
  if not self._no_autoclose and not self._refs:
109
121
  await self.aclose()
110
122
 
@@ -112,10 +124,12 @@ class Resources(lang.Final, lang.NotPicklable):
112
124
 
113
125
  def enter_context(self, cm: ta.ContextManager[T]) -> T:
114
126
  check.state(not self._closed)
127
+
115
128
  return self._aes.enter_context(cm)
116
129
 
117
130
  async def enter_async_context(self, cm: ta.AsyncContextManager[T]) -> T:
118
131
  check.state(not self._closed)
132
+
119
133
  return await self._aes.enter_async_context(cm)
120
134
 
121
135
  #
@@ -150,7 +164,11 @@ class Resources(lang.Final, lang.NotPicklable):
150
164
  class ResourceManaged(ResourcesRef, lang.Final, lang.NotPicklable, ta.Generic[T]):
151
165
  """
152
166
  A class to 'handoff' a ref to a `Resources`, allowing the `Resources` to temporarily survive being passed from
153
- instantiation within a callee to being `__aenter__`'d in the caller.
167
+ instantiation within a callee.
168
+
169
+ This class wraps an arbitrary value, likely an object referencing resources managed by the `Resources`, which is
170
+ accessed by `__aenter__`'ing. However, as the point of this class is handoff of a `Resources`, not necessarily some
171
+ arbitrary value, the value needn't necessarily be related to the `Resources`, or may even be `None`.
154
172
 
155
173
  The ref to the `Resources` is allocated in the ctor, so the contract is that an instance of this must be immediately
156
174
  `__aenter__`'d before doing anything else with the return value of the call. Failure to do so leaks the `Resources`.
@@ -172,11 +190,13 @@ class ResourceManaged(ResourcesRef, lang.Final, lang.NotPicklable, ta.Generic[T]
172
190
  async def __aenter__(self) -> T:
173
191
  check.state(self.__state == 'new')
174
192
  self.__state = 'entered'
193
+
175
194
  return self.__v
176
195
 
177
196
  async def __aexit__(self, exc_type, exc_val, exc_tb):
178
197
  check.state(self.__state == 'entered')
179
198
  self.__state = 'exited'
199
+
180
200
  await self.__resources.remove_ref(self)
181
201
 
182
202
  def __del__(self) -> None:
@@ -203,6 +223,7 @@ class UseResources(tv.UniqueScalarTypedValue[Resources], ResourcesOption, lang.F
203
223
  if (ur := tv.as_collection(options).get(UseResources)) is not None:
204
224
  async with ResourceManaged(ur.v, ur.v) as rs:
205
225
  yield rs
226
+
206
227
  else:
207
228
  async with Resources.new() as rs:
208
229
  yield rs