weakincentives 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. weakincentives/__init__.py +67 -0
  2. weakincentives/adapters/__init__.py +37 -0
  3. weakincentives/adapters/_names.py +32 -0
  4. weakincentives/adapters/_provider_protocols.py +69 -0
  5. weakincentives/adapters/_tool_messages.py +80 -0
  6. weakincentives/adapters/core.py +102 -0
  7. weakincentives/adapters/litellm.py +254 -0
  8. weakincentives/adapters/openai.py +254 -0
  9. weakincentives/adapters/shared.py +1021 -0
  10. weakincentives/cli/__init__.py +23 -0
  11. weakincentives/cli/wink.py +58 -0
  12. weakincentives/dbc/__init__.py +412 -0
  13. weakincentives/deadlines.py +58 -0
  14. weakincentives/prompt/__init__.py +105 -0
  15. weakincentives/prompt/_generic_params_specializer.py +64 -0
  16. weakincentives/prompt/_normalization.py +48 -0
  17. weakincentives/prompt/_overrides_protocols.py +33 -0
  18. weakincentives/prompt/_types.py +34 -0
  19. weakincentives/prompt/chapter.py +146 -0
  20. weakincentives/prompt/composition.py +281 -0
  21. weakincentives/prompt/errors.py +57 -0
  22. weakincentives/prompt/markdown.py +108 -0
  23. weakincentives/prompt/overrides/__init__.py +59 -0
  24. weakincentives/prompt/overrides/_fs.py +164 -0
  25. weakincentives/prompt/overrides/inspection.py +141 -0
  26. weakincentives/prompt/overrides/local_store.py +275 -0
  27. weakincentives/prompt/overrides/validation.py +534 -0
  28. weakincentives/prompt/overrides/versioning.py +269 -0
  29. weakincentives/prompt/prompt.py +353 -0
  30. weakincentives/prompt/protocols.py +103 -0
  31. weakincentives/prompt/registry.py +375 -0
  32. weakincentives/prompt/rendering.py +288 -0
  33. weakincentives/prompt/response_format.py +60 -0
  34. weakincentives/prompt/section.py +166 -0
  35. weakincentives/prompt/structured_output.py +179 -0
  36. weakincentives/prompt/tool.py +397 -0
  37. weakincentives/prompt/tool_result.py +30 -0
  38. weakincentives/py.typed +0 -0
  39. weakincentives/runtime/__init__.py +82 -0
  40. weakincentives/runtime/events/__init__.py +126 -0
  41. weakincentives/runtime/events/_types.py +110 -0
  42. weakincentives/runtime/logging.py +284 -0
  43. weakincentives/runtime/session/__init__.py +46 -0
  44. weakincentives/runtime/session/_slice_types.py +24 -0
  45. weakincentives/runtime/session/_types.py +55 -0
  46. weakincentives/runtime/session/dataclasses.py +29 -0
  47. weakincentives/runtime/session/protocols.py +34 -0
  48. weakincentives/runtime/session/reducer_context.py +40 -0
  49. weakincentives/runtime/session/reducers.py +82 -0
  50. weakincentives/runtime/session/selectors.py +56 -0
  51. weakincentives/runtime/session/session.py +387 -0
  52. weakincentives/runtime/session/snapshots.py +310 -0
  53. weakincentives/serde/__init__.py +19 -0
  54. weakincentives/serde/_utils.py +240 -0
  55. weakincentives/serde/dataclass_serde.py +55 -0
  56. weakincentives/serde/dump.py +189 -0
  57. weakincentives/serde/parse.py +417 -0
  58. weakincentives/serde/schema.py +260 -0
  59. weakincentives/tools/__init__.py +154 -0
  60. weakincentives/tools/_context.py +38 -0
  61. weakincentives/tools/asteval.py +853 -0
  62. weakincentives/tools/errors.py +26 -0
  63. weakincentives/tools/planning.py +831 -0
  64. weakincentives/tools/podman.py +1655 -0
  65. weakincentives/tools/subagents.py +346 -0
  66. weakincentives/tools/vfs.py +1390 -0
  67. weakincentives/types/__init__.py +35 -0
  68. weakincentives/types/json.py +45 -0
  69. weakincentives-0.9.0.dist-info/METADATA +775 -0
  70. weakincentives-0.9.0.dist-info/RECORD +73 -0
  71. weakincentives-0.9.0.dist-info/WHEEL +4 -0
  72. weakincentives-0.9.0.dist-info/entry_points.txt +2 -0
  73. weakincentives-0.9.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,260 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Dataclass schema generation helpers."""
14
+
15
+ # pyright: reportUnknownArgumentType=false, reportUnknownVariableType=false, reportUnknownMemberType=false, reportArgumentType=false, reportUnnecessaryIsInstance=false, reportPrivateUsage=false
16
+
17
+ from __future__ import annotations
18
+
19
+ import dataclasses
20
+ from collections.abc import Callable, Iterable, Mapping, Sequence
21
+ from dataclasses import MISSING
22
+ from datetime import date, datetime, time
23
+ from decimal import Decimal
24
+ from enum import Enum
25
+ from pathlib import Path
26
+ from typing import Literal, cast, get_args, get_origin, get_type_hints
27
+ from uuid import UUID
28
+
29
+ from ..types import JSONValue
30
+ from ._utils import _UNION_TYPE, _AnyType, _merge_annotated_meta, _ordered_values
31
+
32
+
33
+ def _schema_constraints(meta: Mapping[str, object]) -> dict[str, JSONValue]:
34
+ schema_meta: dict[str, JSONValue] = {}
35
+ mapping = {
36
+ "ge": "minimum",
37
+ "minimum": "minimum",
38
+ "gt": "exclusiveMinimum",
39
+ "exclusiveMinimum": "exclusiveMinimum",
40
+ "le": "maximum",
41
+ "maximum": "maximum",
42
+ "lt": "exclusiveMaximum",
43
+ "exclusiveMaximum": "exclusiveMaximum",
44
+ "min_length": "minLength",
45
+ "minLength": "minLength",
46
+ "max_length": "maxLength",
47
+ "maxLength": "maxLength",
48
+ "regex": "pattern",
49
+ "pattern": "pattern",
50
+ }
51
+ for key, target in mapping.items():
52
+ if key in meta and target not in schema_meta:
53
+ schema_meta[target] = cast(JSONValue, meta[key])
54
+ members = meta.get("enum") or meta.get("in")
55
+ if isinstance(members, Iterable) and not isinstance(members, (str, bytes)):
56
+ enum_values = _ordered_values(cast(Iterable[JSONValue], members))
57
+ _ = schema_meta.setdefault("enum", enum_values)
58
+ not_members = meta.get("not_in")
59
+ if (
60
+ isinstance(not_members, Iterable)
61
+ and not isinstance(not_members, (str, bytes))
62
+ and "not" not in schema_meta
63
+ ):
64
+ schema_meta["not"] = {
65
+ "enum": _ordered_values(cast(Iterable[JSONValue], not_members))
66
+ }
67
+ return schema_meta
68
+
69
+
70
+ def _schema_for_type(
71
+ typ: object,
72
+ meta: Mapping[str, object] | None,
73
+ alias_generator: Callable[[str], str] | None,
74
+ ) -> dict[str, JSONValue]:
75
+ base_type, merged_meta = _merge_annotated_meta(typ, meta)
76
+ origin = get_origin(base_type)
77
+
78
+ if base_type is object or base_type is _AnyType:
79
+ schema_data: dict[str, JSONValue] = {}
80
+ elif dataclasses.is_dataclass(base_type):
81
+ dataclass_type = base_type if isinstance(base_type, type) else type(base_type)
82
+ schema_data = schema(dataclass_type, alias_generator=alias_generator)
83
+ elif base_type is type(None):
84
+ schema_data = {"type": "null"}
85
+ elif isinstance(base_type, type) and issubclass(base_type, Enum):
86
+ enum_values = [member.value for member in base_type]
87
+ schema_data = {"enum": enum_values}
88
+ if enum_values:
89
+ if all(isinstance(value, str) for value in enum_values):
90
+ schema_data["type"] = "string"
91
+ elif all(isinstance(value, bool) for value in enum_values):
92
+ schema_data["type"] = "boolean"
93
+ elif all(
94
+ isinstance(value, int) and not isinstance(value, bool)
95
+ for value in enum_values
96
+ ):
97
+ schema_data["type"] = "integer"
98
+ elif all(isinstance(value, (float, Decimal)) for value in enum_values):
99
+ schema_data["type"] = "number"
100
+ elif base_type is bool:
101
+ schema_data = {"type": "boolean"}
102
+ elif base_type is int:
103
+ schema_data = {"type": "integer"}
104
+ elif base_type in {float, Decimal}:
105
+ schema_data = {"type": "number"}
106
+ elif base_type is str:
107
+ schema_data = {"type": "string"}
108
+ elif base_type is datetime:
109
+ schema_data = {"type": "string", "format": "date-time"}
110
+ elif base_type is date:
111
+ schema_data = {"type": "string", "format": "date"}
112
+ elif base_type is time:
113
+ schema_data = {"type": "string", "format": "time"}
114
+ elif base_type is UUID:
115
+ schema_data = {"type": "string", "format": "uuid"}
116
+ elif base_type is Path:
117
+ schema_data = {"type": "string"}
118
+ elif origin is Literal:
119
+ literal_values = list(get_args(base_type))
120
+ schema_data = {"enum": literal_values}
121
+ if literal_values:
122
+ if all(isinstance(value, bool) for value in literal_values):
123
+ schema_data["type"] = "boolean"
124
+ elif all(isinstance(value, str) for value in literal_values):
125
+ schema_data["type"] = "string"
126
+ elif all(
127
+ isinstance(value, int) and not isinstance(value, bool)
128
+ for value in literal_values
129
+ ):
130
+ schema_data["type"] = "integer"
131
+ elif all(isinstance(value, (float, Decimal)) for value in literal_values):
132
+ schema_data["type"] = "number"
133
+ elif origin in {list, Sequence}:
134
+ item_type = get_args(base_type)[0] if get_args(base_type) else object
135
+ schema_data = {
136
+ "type": "array",
137
+ "items": _schema_for_type(item_type, None, alias_generator),
138
+ }
139
+ elif origin is set:
140
+ item_type = get_args(base_type)[0] if get_args(base_type) else object
141
+ schema_data = {
142
+ "type": "array",
143
+ "items": _schema_for_type(item_type, None, alias_generator),
144
+ "uniqueItems": True,
145
+ }
146
+ elif origin is tuple:
147
+ args = get_args(base_type)
148
+ if args and args[-1] is Ellipsis:
149
+ schema_data = {
150
+ "type": "array",
151
+ "items": _schema_for_type(args[0], None, alias_generator),
152
+ }
153
+ else:
154
+ schema_data = {
155
+ "type": "array",
156
+ "prefixItems": [
157
+ _schema_for_type(arg, None, alias_generator) for arg in args
158
+ ],
159
+ "minItems": len(args),
160
+ "maxItems": len(args),
161
+ }
162
+ elif origin in {dict, Mapping}:
163
+ args = get_args(base_type)
164
+ value_type = args[1] if len(args) == 2 else object
165
+ schema_data = {
166
+ "type": "object",
167
+ "additionalProperties": _schema_for_type(value_type, None, alias_generator),
168
+ }
169
+ elif origin is _UNION_TYPE:
170
+ subschemas = []
171
+ includes_null = False
172
+ base_schema_ref: Mapping[str, object] | None = None
173
+ for arg in get_args(base_type):
174
+ if arg is type(None):
175
+ includes_null = True
176
+ continue
177
+ subschema = _schema_for_type(arg, None, alias_generator)
178
+ subschemas.append(subschema)
179
+ if (
180
+ base_schema_ref is None
181
+ and isinstance(subschema, Mapping)
182
+ and subschema.get("type") == "object"
183
+ ):
184
+ base_schema_ref = subschema
185
+ any_of = list(subschemas)
186
+ if includes_null:
187
+ any_of.append({"type": "null"})
188
+ if base_schema_ref is not None and len(subschemas) == 1:
189
+ schema_data = dict(base_schema_ref)
190
+ else:
191
+ schema_data = {}
192
+ schema_data["anyOf"] = any_of
193
+ non_null_types = [
194
+ subschema.get("type")
195
+ for subschema in subschemas
196
+ if isinstance(subschema.get("type"), str)
197
+ and subschema.get("type") != "null"
198
+ ]
199
+ if non_null_types and len(set(non_null_types)) == 1:
200
+ schema_data["type"] = non_null_types[0]
201
+ if len(subschemas) == 1 and base_schema_ref is None:
202
+ title = subschemas[0].get("title")
203
+ if isinstance(title, str): # pragma: no cover - not triggered in tests
204
+ _ = schema_data.setdefault("title", title)
205
+ required = subschemas[0].get("required")
206
+ if isinstance(required, (list, tuple)): # pragma: no cover - defensive
207
+ _ = schema_data.setdefault("required", list(required))
208
+ else:
209
+ schema_data = {}
210
+
211
+ schema_data.update(_schema_constraints(merged_meta))
212
+ return schema_data
213
+
214
+
215
+ def schema(
216
+ cls: type[object],
217
+ *,
218
+ alias_generator: Callable[[str], str] | None = None,
219
+ extra: Literal["ignore", "forbid", "allow"] = "ignore",
220
+ ) -> dict[str, JSONValue]:
221
+ """Produce a minimal JSON Schema description for a dataclass."""
222
+
223
+ if not dataclasses.is_dataclass(cls) or not isinstance(cls, type):
224
+ raise TypeError("schema() requires a dataclass type")
225
+ if extra not in {"ignore", "forbid", "allow"}:
226
+ raise ValueError("extra must be one of 'ignore', 'forbid', or 'allow'")
227
+
228
+ properties: dict[str, dict[str, JSONValue]] = {}
229
+ required: list[str] = []
230
+ type_hints = get_type_hints(cls, include_extras=True)
231
+
232
+ for field in dataclasses.fields(cls):
233
+ if not field.init:
234
+ continue
235
+ field_meta = dict(field.metadata)
236
+ alias = field_meta.get("alias")
237
+ if alias_generator is not None and not alias:
238
+ alias = alias_generator(field.name)
239
+ property_name = alias or field.name
240
+ field_type = type_hints.get(field.name, field.type)
241
+ properties[property_name] = _schema_for_type(
242
+ field_type, field_meta, alias_generator
243
+ )
244
+ if field.default is MISSING and field.default_factory is MISSING:
245
+ required.append(property_name)
246
+
247
+ schema_dict: dict[str, JSONValue] = {
248
+ "title": cls.__name__,
249
+ "type": "object",
250
+ "properties": properties,
251
+ "additionalProperties": extra != "forbid",
252
+ }
253
+ if required:
254
+ schema_dict["required"] = required
255
+ if not required:
256
+ _ = schema_dict.pop("required", None)
257
+ return schema_dict
258
+
259
+
260
+ __all__ = ["schema"]
@@ -0,0 +1,154 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Public surface for built-in tool suites."""
14
+
15
+ # pyright: reportImportCycles=false
16
+
17
+ from __future__ import annotations
18
+
19
+ from importlib import import_module
20
+ from typing import TYPE_CHECKING
21
+
22
+ from .asteval import (
23
+ AstevalSection,
24
+ EvalFileRead,
25
+ EvalFileWrite,
26
+ EvalParams,
27
+ EvalResult,
28
+ )
29
+ from .errors import DeadlineExceededError, ToolValidationError
30
+ from .planning import (
31
+ AddStep,
32
+ ClearPlan,
33
+ MarkStep,
34
+ NewPlanStep,
35
+ Plan,
36
+ PlanningStrategy,
37
+ PlanningToolsSection,
38
+ PlanStatus,
39
+ PlanStep,
40
+ ReadPlan,
41
+ SetupPlan,
42
+ StepStatus,
43
+ UpdateStep,
44
+ )
45
+ from .subagents import (
46
+ DispatchSubagentsParams,
47
+ SubagentIsolationLevel,
48
+ SubagentResult,
49
+ SubagentsSection,
50
+ build_dispatch_subagents_tool,
51
+ dispatch_subagents,
52
+ )
53
+
54
+ if TYPE_CHECKING:
55
+ from .podman import (
56
+ PodmanSandboxSection,
57
+ PodmanShellParams,
58
+ PodmanShellResult,
59
+ PodmanWorkspace,
60
+ )
61
+ from .vfs import (
62
+ DeleteEntry,
63
+ EditFileParams,
64
+ FileInfo,
65
+ GlobMatch,
66
+ GlobParams,
67
+ GrepMatch,
68
+ GrepParams,
69
+ HostMount,
70
+ ListDirectory,
71
+ ListDirectoryParams,
72
+ ListDirectoryResult,
73
+ ReadFile,
74
+ ReadFileParams,
75
+ ReadFileResult,
76
+ RemoveParams,
77
+ VfsFile,
78
+ VfsPath,
79
+ VfsToolsSection,
80
+ VirtualFileSystem,
81
+ WriteFile,
82
+ WriteFileParams,
83
+ )
84
+
85
+ __all__ = [
86
+ "AddStep",
87
+ "AstevalSection",
88
+ "ClearPlan",
89
+ "DeadlineExceededError",
90
+ "DeleteEntry",
91
+ "DispatchSubagentsParams",
92
+ "EditFileParams",
93
+ "EvalFileRead",
94
+ "EvalFileWrite",
95
+ "EvalParams",
96
+ "EvalResult",
97
+ "FileInfo",
98
+ "GlobMatch",
99
+ "GlobParams",
100
+ "GrepMatch",
101
+ "GrepParams",
102
+ "HostMount",
103
+ "ListDirectory",
104
+ "ListDirectoryParams",
105
+ "ListDirectoryResult",
106
+ "MarkStep",
107
+ "NewPlanStep",
108
+ "Plan",
109
+ "PlanStatus",
110
+ "PlanStep",
111
+ "PlanningStrategy",
112
+ "PlanningToolsSection",
113
+ "PodmanSandboxSection",
114
+ "PodmanShellParams",
115
+ "PodmanShellResult",
116
+ "PodmanWorkspace",
117
+ "ReadFile",
118
+ "ReadFileParams",
119
+ "ReadFileResult",
120
+ "ReadPlan",
121
+ "RemoveParams",
122
+ "SetupPlan",
123
+ "StepStatus",
124
+ "SubagentIsolationLevel",
125
+ "SubagentResult",
126
+ "SubagentsSection",
127
+ "ToolValidationError",
128
+ "UpdateStep",
129
+ "VfsFile",
130
+ "VfsPath",
131
+ "VfsToolsSection",
132
+ "VirtualFileSystem",
133
+ "WriteFile",
134
+ "WriteFileParams",
135
+ "build_dispatch_subagents_tool",
136
+ "dispatch_subagents",
137
+ ]
138
+
139
+ _PODMAN_EXPORTS = {
140
+ "PodmanShellParams",
141
+ "PodmanShellResult",
142
+ "PodmanSandboxSection",
143
+ "PodmanWorkspace",
144
+ }
145
+
146
+
147
+ def __getattr__(name: str) -> object:
148
+ if name in _PODMAN_EXPORTS:
149
+ module = import_module(f"{__name__}.podman")
150
+ value = getattr(module, name)
151
+ globals()[name] = value
152
+ return value
153
+ msg = f"module {__name__!r} has no attribute {name!r}"
154
+ raise AttributeError(msg)
@@ -0,0 +1,38 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Shared helpers for validating tool execution context."""
14
+
15
+ from __future__ import annotations
16
+
17
+ from ..prompt.tool import ToolContext
18
+ from ..runtime.session import Session
19
+
20
+
21
+ def ensure_context_uses_session(*, context: ToolContext, session: Session) -> None:
22
+ """Verify ``context`` matches the ``session`` bound to the tool section."""
23
+
24
+ if context.session is not session:
25
+ message = (
26
+ "ToolContext session does not match the section session. "
27
+ "Ensure the tool is invoked with the bound session."
28
+ )
29
+ raise RuntimeError(message)
30
+ if context.event_bus is not session.event_bus:
31
+ message = (
32
+ "ToolContext event bus does not match the section session bus. "
33
+ "Ensure the tool is invoked with the bound event bus."
34
+ )
35
+ raise RuntimeError(message)
36
+
37
+
38
+ __all__ = ["ensure_context_uses_session"]