openplanter 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +210 -0
- package/dist/builder.d.ts +11 -0
- package/dist/builder.d.ts.map +1 -0
- package/dist/builder.js +179 -0
- package/dist/builder.js.map +1 -0
- package/dist/cli.d.ts +9 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +548 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +51 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +114 -0
- package/dist/config.js.map +1 -0
- package/dist/credentials.d.ts +52 -0
- package/dist/credentials.d.ts.map +1 -0
- package/dist/credentials.js +371 -0
- package/dist/credentials.js.map +1 -0
- package/dist/demo.d.ts +26 -0
- package/dist/demo.d.ts.map +1 -0
- package/dist/demo.js +95 -0
- package/dist/demo.js.map +1 -0
- package/dist/engine.d.ts +91 -0
- package/dist/engine.d.ts.map +1 -0
- package/dist/engine.js +1036 -0
- package/dist/engine.js.map +1 -0
- package/dist/index.d.ts +30 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +39 -0
- package/dist/index.js.map +1 -0
- package/dist/investigation-tools/aph-holdings.d.ts +61 -0
- package/dist/investigation-tools/aph-holdings.d.ts.map +1 -0
- package/dist/investigation-tools/aph-holdings.js +459 -0
- package/dist/investigation-tools/aph-holdings.js.map +1 -0
- package/dist/investigation-tools/asic-officer-lookup.d.ts +42 -0
- package/dist/investigation-tools/asic-officer-lookup.d.ts.map +1 -0
- package/dist/investigation-tools/asic-officer-lookup.js +197 -0
- package/dist/investigation-tools/asic-officer-lookup.js.map +1 -0
- package/dist/investigation-tools/asx-calendar-fetcher.d.ts +42 -0
- package/dist/investigation-tools/asx-calendar-fetcher.d.ts.map +1 -0
- package/dist/investigation-tools/asx-calendar-fetcher.js +271 -0
- package/dist/investigation-tools/asx-calendar-fetcher.js.map +1 -0
- package/dist/investigation-tools/asx-parser.d.ts +66 -0
- package/dist/investigation-tools/asx-parser.d.ts.map +1 -0
- package/dist/investigation-tools/asx-parser.js +314 -0
- package/dist/investigation-tools/asx-parser.js.map +1 -0
- package/dist/investigation-tools/bulk-asx-announcements.d.ts +53 -0
- package/dist/investigation-tools/bulk-asx-announcements.d.ts.map +1 -0
- package/dist/investigation-tools/bulk-asx-announcements.js +204 -0
- package/dist/investigation-tools/bulk-asx-announcements.js.map +1 -0
- package/dist/investigation-tools/entity-resolver.d.ts +77 -0
- package/dist/investigation-tools/entity-resolver.d.ts.map +1 -0
- package/dist/investigation-tools/entity-resolver.js +346 -0
- package/dist/investigation-tools/entity-resolver.js.map +1 -0
- package/dist/investigation-tools/hotcopper-scraper.d.ts +73 -0
- package/dist/investigation-tools/hotcopper-scraper.d.ts.map +1 -0
- package/dist/investigation-tools/hotcopper-scraper.js +318 -0
- package/dist/investigation-tools/hotcopper-scraper.js.map +1 -0
- package/dist/investigation-tools/index.d.ts +15 -0
- package/dist/investigation-tools/index.d.ts.map +1 -0
- package/dist/investigation-tools/index.js +15 -0
- package/dist/investigation-tools/index.js.map +1 -0
- package/dist/investigation-tools/insider-graph.d.ts +173 -0
- package/dist/investigation-tools/insider-graph.d.ts.map +1 -0
- package/dist/investigation-tools/insider-graph.js +732 -0
- package/dist/investigation-tools/insider-graph.js.map +1 -0
- package/dist/investigation-tools/insider-suspicion-scorer.d.ts +97 -0
- package/dist/investigation-tools/insider-suspicion-scorer.d.ts.map +1 -0
- package/dist/investigation-tools/insider-suspicion-scorer.js +327 -0
- package/dist/investigation-tools/insider-suspicion-scorer.js.map +1 -0
- package/dist/investigation-tools/multi-forum-scraper.d.ts +104 -0
- package/dist/investigation-tools/multi-forum-scraper.d.ts.map +1 -0
- package/dist/investigation-tools/multi-forum-scraper.js +415 -0
- package/dist/investigation-tools/multi-forum-scraper.js.map +1 -0
- package/dist/investigation-tools/price-fetcher.d.ts +81 -0
- package/dist/investigation-tools/price-fetcher.d.ts.map +1 -0
- package/dist/investigation-tools/price-fetcher.js +268 -0
- package/dist/investigation-tools/price-fetcher.js.map +1 -0
- package/dist/investigation-tools/shared.d.ts +39 -0
- package/dist/investigation-tools/shared.d.ts.map +1 -0
- package/dist/investigation-tools/shared.js +203 -0
- package/dist/investigation-tools/shared.js.map +1 -0
- package/dist/investigation-tools/timeline-linker.d.ts +90 -0
- package/dist/investigation-tools/timeline-linker.d.ts.map +1 -0
- package/dist/investigation-tools/timeline-linker.js +219 -0
- package/dist/investigation-tools/timeline-linker.js.map +1 -0
- package/dist/investigation-tools/volume-scanner.d.ts +70 -0
- package/dist/investigation-tools/volume-scanner.d.ts.map +1 -0
- package/dist/investigation-tools/volume-scanner.js +227 -0
- package/dist/investigation-tools/volume-scanner.js.map +1 -0
- package/dist/model.d.ts +136 -0
- package/dist/model.d.ts.map +1 -0
- package/dist/model.js +1071 -0
- package/dist/model.js.map +1 -0
- package/dist/patching.d.ts +45 -0
- package/dist/patching.d.ts.map +1 -0
- package/dist/patching.js +317 -0
- package/dist/patching.js.map +1 -0
- package/dist/prompts.d.ts +15 -0
- package/dist/prompts.d.ts.map +1 -0
- package/dist/prompts.js +351 -0
- package/dist/prompts.js.map +1 -0
- package/dist/replay-log.d.ts +54 -0
- package/dist/replay-log.d.ts.map +1 -0
- package/dist/replay-log.js +94 -0
- package/dist/replay-log.js.map +1 -0
- package/dist/runtime.d.ts +53 -0
- package/dist/runtime.d.ts.map +1 -0
- package/dist/runtime.js +259 -0
- package/dist/runtime.js.map +1 -0
- package/dist/settings.d.ts +39 -0
- package/dist/settings.d.ts.map +1 -0
- package/dist/settings.js +146 -0
- package/dist/settings.js.map +1 -0
- package/dist/tool-defs.d.ts +58 -0
- package/dist/tool-defs.d.ts.map +1 -0
- package/dist/tool-defs.js +1029 -0
- package/dist/tool-defs.js.map +1 -0
- package/dist/tools.d.ts +72 -0
- package/dist/tools.d.ts.map +1 -0
- package/dist/tools.js +1454 -0
- package/dist/tools.js.map +1 -0
- package/dist/tui.d.ts +49 -0
- package/dist/tui.d.ts.map +1 -0
- package/dist/tui.js +699 -0
- package/dist/tui.js.map +1 -0
- package/package.json +126 -0
|
@@ -0,0 +1,1029 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Provider-neutral tool definitions for the OpenPlanter agent.
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for tool schemas. Converter helpers produce the
|
|
5
|
+
* provider-specific shapes expected by OpenAI and Anthropic APIs.
|
|
6
|
+
*/
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
// Tool definitions
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
export const TOOL_DEFINITIONS = [
|
|
11
|
+
{
|
|
12
|
+
name: "list_files",
|
|
13
|
+
description: "List files in the workspace directory. Optionally filter with a glob pattern.",
|
|
14
|
+
parameters: {
|
|
15
|
+
type: "object",
|
|
16
|
+
properties: {
|
|
17
|
+
glob: {
|
|
18
|
+
type: "string",
|
|
19
|
+
description: "Optional glob pattern to filter files.",
|
|
20
|
+
},
|
|
21
|
+
},
|
|
22
|
+
required: [],
|
|
23
|
+
additionalProperties: false,
|
|
24
|
+
},
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
name: "search_files",
|
|
28
|
+
description: "Search file contents in the workspace for a text or regex query.",
|
|
29
|
+
parameters: {
|
|
30
|
+
type: "object",
|
|
31
|
+
properties: {
|
|
32
|
+
query: {
|
|
33
|
+
type: "string",
|
|
34
|
+
description: "Text or regex to search for.",
|
|
35
|
+
},
|
|
36
|
+
glob: {
|
|
37
|
+
type: "string",
|
|
38
|
+
description: "Optional glob pattern to restrict which files are searched.",
|
|
39
|
+
},
|
|
40
|
+
},
|
|
41
|
+
required: ["query"],
|
|
42
|
+
additionalProperties: false,
|
|
43
|
+
},
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
name: "repo_map",
|
|
47
|
+
description: "Build a lightweight map of source files and symbols to speed up code navigation.",
|
|
48
|
+
parameters: {
|
|
49
|
+
type: "object",
|
|
50
|
+
properties: {
|
|
51
|
+
glob: {
|
|
52
|
+
type: "string",
|
|
53
|
+
description: "Optional glob pattern to limit which files are scanned.",
|
|
54
|
+
},
|
|
55
|
+
max_files: {
|
|
56
|
+
type: "integer",
|
|
57
|
+
description: "Maximum number of files to scan (1-500, default 200).",
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
required: [],
|
|
61
|
+
additionalProperties: false,
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
name: "web_search",
|
|
66
|
+
description: "Search the web using the Exa API. Returns URLs, titles, and optional page text.",
|
|
67
|
+
parameters: {
|
|
68
|
+
type: "object",
|
|
69
|
+
properties: {
|
|
70
|
+
query: {
|
|
71
|
+
type: "string",
|
|
72
|
+
description: "Web search query string.",
|
|
73
|
+
},
|
|
74
|
+
num_results: {
|
|
75
|
+
type: "integer",
|
|
76
|
+
description: "Number of results to return (1-20, default 10).",
|
|
77
|
+
},
|
|
78
|
+
include_text: {
|
|
79
|
+
type: "boolean",
|
|
80
|
+
description: "Whether to include page text in results.",
|
|
81
|
+
},
|
|
82
|
+
},
|
|
83
|
+
required: ["query"],
|
|
84
|
+
additionalProperties: false,
|
|
85
|
+
},
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
name: "fetch_url",
|
|
89
|
+
description: "Fetch and return the text content of one or more URLs.",
|
|
90
|
+
parameters: {
|
|
91
|
+
type: "object",
|
|
92
|
+
properties: {
|
|
93
|
+
urls: {
|
|
94
|
+
type: "array",
|
|
95
|
+
items: { type: "string" },
|
|
96
|
+
description: "List of URLs to fetch.",
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
required: ["urls"],
|
|
100
|
+
additionalProperties: false,
|
|
101
|
+
},
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
name: "read_file",
|
|
105
|
+
description: "Read the contents of a file in the workspace. Lines are numbered LINE:HASH|content by default for use with hashline_edit. Set hashline=false for plain N|content.",
|
|
106
|
+
parameters: {
|
|
107
|
+
type: "object",
|
|
108
|
+
properties: {
|
|
109
|
+
path: {
|
|
110
|
+
type: "string",
|
|
111
|
+
description: "Relative or absolute path within the workspace.",
|
|
112
|
+
},
|
|
113
|
+
hashline: {
|
|
114
|
+
type: "boolean",
|
|
115
|
+
description: "Prefix each line with LINE:HASH| format for content verification. Default true.",
|
|
116
|
+
},
|
|
117
|
+
},
|
|
118
|
+
required: ["path"],
|
|
119
|
+
additionalProperties: false,
|
|
120
|
+
},
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
name: "write_file",
|
|
124
|
+
description: "Create or overwrite a file in the workspace with the given content.",
|
|
125
|
+
parameters: {
|
|
126
|
+
type: "object",
|
|
127
|
+
properties: {
|
|
128
|
+
path: {
|
|
129
|
+
type: "string",
|
|
130
|
+
description: "Relative path for the file.",
|
|
131
|
+
},
|
|
132
|
+
content: {
|
|
133
|
+
type: "string",
|
|
134
|
+
description: "Full file content to write.",
|
|
135
|
+
},
|
|
136
|
+
},
|
|
137
|
+
required: ["path", "content"],
|
|
138
|
+
additionalProperties: false,
|
|
139
|
+
},
|
|
140
|
+
},
|
|
141
|
+
{
|
|
142
|
+
name: "apply_patch",
|
|
143
|
+
description: "Apply a Codex-style patch to one or more files. " +
|
|
144
|
+
"Use the *** Begin Patch / *** End Patch format with " +
|
|
145
|
+
"Update File, Add File, and Delete File operations.",
|
|
146
|
+
parameters: {
|
|
147
|
+
type: "object",
|
|
148
|
+
properties: {
|
|
149
|
+
patch: {
|
|
150
|
+
type: "string",
|
|
151
|
+
description: "The full patch block in Codex patch format.",
|
|
152
|
+
},
|
|
153
|
+
},
|
|
154
|
+
required: ["patch"],
|
|
155
|
+
additionalProperties: false,
|
|
156
|
+
},
|
|
157
|
+
},
|
|
158
|
+
{
|
|
159
|
+
name: "edit_file",
|
|
160
|
+
description: "Replace a specific text span in a file. Provide the exact old text " +
|
|
161
|
+
"to find and the new text to replace it with. The old text must appear " +
|
|
162
|
+
"exactly once in the file.",
|
|
163
|
+
parameters: {
|
|
164
|
+
type: "object",
|
|
165
|
+
properties: {
|
|
166
|
+
path: {
|
|
167
|
+
type: "string",
|
|
168
|
+
description: "Relative path to the file to edit.",
|
|
169
|
+
},
|
|
170
|
+
old_text: {
|
|
171
|
+
type: "string",
|
|
172
|
+
description: "The exact text to find and replace.",
|
|
173
|
+
},
|
|
174
|
+
new_text: {
|
|
175
|
+
type: "string",
|
|
176
|
+
description: "The replacement text.",
|
|
177
|
+
},
|
|
178
|
+
},
|
|
179
|
+
required: ["path", "old_text", "new_text"],
|
|
180
|
+
additionalProperties: false,
|
|
181
|
+
},
|
|
182
|
+
},
|
|
183
|
+
{
|
|
184
|
+
name: "hashline_edit",
|
|
185
|
+
description: "Edit a file using hash-anchored line references from read_file(hashline=true). " +
|
|
186
|
+
"Operations: set_line (replace one line), replace_lines (replace a range), " +
|
|
187
|
+
"insert_after (insert new lines after an anchor).",
|
|
188
|
+
parameters: {
|
|
189
|
+
type: "object",
|
|
190
|
+
properties: {
|
|
191
|
+
path: {
|
|
192
|
+
type: "string",
|
|
193
|
+
description: "Relative path to the file.",
|
|
194
|
+
},
|
|
195
|
+
edits: {
|
|
196
|
+
type: "array",
|
|
197
|
+
items: {
|
|
198
|
+
type: "object",
|
|
199
|
+
properties: {
|
|
200
|
+
set_line: {
|
|
201
|
+
type: "string",
|
|
202
|
+
description: "Anchor 'N:HH' for single-line replace.",
|
|
203
|
+
},
|
|
204
|
+
replace_lines: {
|
|
205
|
+
type: "object",
|
|
206
|
+
description: "Range with 'start' and 'end' anchors.",
|
|
207
|
+
properties: {
|
|
208
|
+
start: { type: "string" },
|
|
209
|
+
end: { type: "string" },
|
|
210
|
+
},
|
|
211
|
+
required: ["start", "end"],
|
|
212
|
+
additionalProperties: false,
|
|
213
|
+
},
|
|
214
|
+
insert_after: {
|
|
215
|
+
type: "string",
|
|
216
|
+
description: "Anchor 'N:HH' to insert after.",
|
|
217
|
+
},
|
|
218
|
+
content: {
|
|
219
|
+
type: "string",
|
|
220
|
+
description: "New content for the operation.",
|
|
221
|
+
},
|
|
222
|
+
},
|
|
223
|
+
required: [],
|
|
224
|
+
additionalProperties: false,
|
|
225
|
+
},
|
|
226
|
+
description: "Edit operations: set_line, replace_lines, or insert_after.",
|
|
227
|
+
},
|
|
228
|
+
},
|
|
229
|
+
required: ["path", "edits"],
|
|
230
|
+
additionalProperties: false,
|
|
231
|
+
},
|
|
232
|
+
},
|
|
233
|
+
{
|
|
234
|
+
name: "run_shell",
|
|
235
|
+
description: "Execute a shell command from the workspace root and return its output.",
|
|
236
|
+
parameters: {
|
|
237
|
+
type: "object",
|
|
238
|
+
properties: {
|
|
239
|
+
command: {
|
|
240
|
+
type: "string",
|
|
241
|
+
description: "Shell command to execute.",
|
|
242
|
+
},
|
|
243
|
+
timeout: {
|
|
244
|
+
type: "integer",
|
|
245
|
+
description: "Timeout in seconds for this command (default: agent default, max: 600).",
|
|
246
|
+
},
|
|
247
|
+
},
|
|
248
|
+
required: ["command"],
|
|
249
|
+
additionalProperties: false,
|
|
250
|
+
},
|
|
251
|
+
},
|
|
252
|
+
{
|
|
253
|
+
name: "run_shell_bg",
|
|
254
|
+
description: "Start a shell command in the background. Returns a job ID to check or kill later.",
|
|
255
|
+
parameters: {
|
|
256
|
+
type: "object",
|
|
257
|
+
properties: {
|
|
258
|
+
command: {
|
|
259
|
+
type: "string",
|
|
260
|
+
description: "Shell command to run in the background.",
|
|
261
|
+
},
|
|
262
|
+
},
|
|
263
|
+
required: ["command"],
|
|
264
|
+
additionalProperties: false,
|
|
265
|
+
},
|
|
266
|
+
},
|
|
267
|
+
{
|
|
268
|
+
name: "check_shell_bg",
|
|
269
|
+
description: "Check the status and output of a background job started with run_shell_bg.",
|
|
270
|
+
parameters: {
|
|
271
|
+
type: "object",
|
|
272
|
+
properties: {
|
|
273
|
+
job_id: {
|
|
274
|
+
type: "integer",
|
|
275
|
+
description: "The job ID returned by run_shell_bg.",
|
|
276
|
+
},
|
|
277
|
+
},
|
|
278
|
+
required: ["job_id"],
|
|
279
|
+
additionalProperties: false,
|
|
280
|
+
},
|
|
281
|
+
},
|
|
282
|
+
{
|
|
283
|
+
name: "kill_shell_bg",
|
|
284
|
+
description: "Kill a background job started with run_shell_bg.",
|
|
285
|
+
parameters: {
|
|
286
|
+
type: "object",
|
|
287
|
+
properties: {
|
|
288
|
+
job_id: {
|
|
289
|
+
type: "integer",
|
|
290
|
+
description: "The job ID returned by run_shell_bg.",
|
|
291
|
+
},
|
|
292
|
+
},
|
|
293
|
+
required: ["job_id"],
|
|
294
|
+
additionalProperties: false,
|
|
295
|
+
},
|
|
296
|
+
},
|
|
297
|
+
{
|
|
298
|
+
name: "think",
|
|
299
|
+
description: "Record an internal planning thought. Use this to reason about the task before acting.",
|
|
300
|
+
parameters: {
|
|
301
|
+
type: "object",
|
|
302
|
+
properties: {
|
|
303
|
+
note: {
|
|
304
|
+
type: "string",
|
|
305
|
+
description: "Your planning thought or reasoning note.",
|
|
306
|
+
},
|
|
307
|
+
},
|
|
308
|
+
required: ["note"],
|
|
309
|
+
additionalProperties: false,
|
|
310
|
+
},
|
|
311
|
+
},
|
|
312
|
+
{
|
|
313
|
+
name: "subtask",
|
|
314
|
+
description: "Spawn a recursive sub-agent to solve a smaller sub-problem. The result is returned as an observation.",
|
|
315
|
+
parameters: {
|
|
316
|
+
type: "object",
|
|
317
|
+
properties: {
|
|
318
|
+
objective: {
|
|
319
|
+
type: "string",
|
|
320
|
+
description: "Clear objective for the sub-agent to accomplish.",
|
|
321
|
+
},
|
|
322
|
+
model: {
|
|
323
|
+
type: "string",
|
|
324
|
+
description: "Optional model for subtask (e.g. 'claude-sonnet-4-5-20250929', 'claude-haiku-4-5-20251001').",
|
|
325
|
+
},
|
|
326
|
+
reasoning_effort: {
|
|
327
|
+
type: "string",
|
|
328
|
+
enum: ["xhigh", "high", "medium", "low"],
|
|
329
|
+
description: "Optional reasoning effort for the subtask model. For OpenAI codex models, this controls the delegation level.",
|
|
330
|
+
},
|
|
331
|
+
acceptance_criteria: {
|
|
332
|
+
type: "string",
|
|
333
|
+
description: "Acceptance criteria for judging the subtask result. A lightweight judge evaluates the result against these criteria and appends PASS/FAIL to your observation. Be specific and verifiable.",
|
|
334
|
+
},
|
|
335
|
+
},
|
|
336
|
+
required: ["objective", "acceptance_criteria"],
|
|
337
|
+
additionalProperties: false,
|
|
338
|
+
},
|
|
339
|
+
},
|
|
340
|
+
{
|
|
341
|
+
name: "execute",
|
|
342
|
+
description: "Hand an atomic sub-problem to a leaf executor agent with full tool access. " +
|
|
343
|
+
"Use this when the sub-problem requires no further decomposition and can be " +
|
|
344
|
+
"solved directly (e.g. write a file, run tests, apply a patch). The executor " +
|
|
345
|
+
"has no subtask or execute tools — it must solve the objective in one pass.",
|
|
346
|
+
parameters: {
|
|
347
|
+
type: "object",
|
|
348
|
+
properties: {
|
|
349
|
+
objective: {
|
|
350
|
+
type: "string",
|
|
351
|
+
description: "Clear, specific objective for the executor to accomplish.",
|
|
352
|
+
},
|
|
353
|
+
acceptance_criteria: {
|
|
354
|
+
type: "string",
|
|
355
|
+
description: "Acceptance criteria for judging the executor result. A lightweight judge evaluates the result against these criteria and appends PASS/FAIL to your observation. Be specific and verifiable.",
|
|
356
|
+
},
|
|
357
|
+
},
|
|
358
|
+
required: ["objective", "acceptance_criteria"],
|
|
359
|
+
additionalProperties: false,
|
|
360
|
+
},
|
|
361
|
+
},
|
|
362
|
+
{
|
|
363
|
+
name: "list_artifacts",
|
|
364
|
+
description: "List artifacts from previous subagent runs. Returns ID, objective, and result summary for each.",
|
|
365
|
+
parameters: {
|
|
366
|
+
type: "object",
|
|
367
|
+
properties: {},
|
|
368
|
+
required: [],
|
|
369
|
+
additionalProperties: false,
|
|
370
|
+
},
|
|
371
|
+
},
|
|
372
|
+
{
|
|
373
|
+
name: "read_artifact",
|
|
374
|
+
description: "Read a previous subagent's conversation log artifact. " +
|
|
375
|
+
"Returns JSONL records of the subagent's full conversation.",
|
|
376
|
+
parameters: {
|
|
377
|
+
type: "object",
|
|
378
|
+
properties: {
|
|
379
|
+
artifact_id: {
|
|
380
|
+
type: "string",
|
|
381
|
+
description: "Artifact ID from list_artifacts.",
|
|
382
|
+
},
|
|
383
|
+
offset: {
|
|
384
|
+
type: "integer",
|
|
385
|
+
description: "Start line (0-indexed). Default 0.",
|
|
386
|
+
},
|
|
387
|
+
limit: {
|
|
388
|
+
type: "integer",
|
|
389
|
+
description: "Max lines to return. Default 100.",
|
|
390
|
+
},
|
|
391
|
+
},
|
|
392
|
+
required: ["artifact_id"],
|
|
393
|
+
additionalProperties: false,
|
|
394
|
+
},
|
|
395
|
+
},
|
|
396
|
+
// -------------------------------------------------------------------------
|
|
397
|
+
// Domain-specific tools (Australian insider-trading investigation toolkit)
|
|
398
|
+
// -------------------------------------------------------------------------
|
|
399
|
+
{
|
|
400
|
+
name: "aph_holdings",
|
|
401
|
+
description: "Scrape the Australian Parliament House Register of Members' / Senators' " +
|
|
402
|
+
"Interests to extract politician shareholdings and financial interests.",
|
|
403
|
+
parameters: {
|
|
404
|
+
type: "object",
|
|
405
|
+
properties: {
|
|
406
|
+
member: {
|
|
407
|
+
type: "string",
|
|
408
|
+
description: "Member name to filter (case-insensitive substring), or 'all' (default).",
|
|
409
|
+
},
|
|
410
|
+
chamber: {
|
|
411
|
+
type: "string",
|
|
412
|
+
enum: ["house", "senate", "both"],
|
|
413
|
+
description: "Which chamber to scrape (default: both).",
|
|
414
|
+
},
|
|
415
|
+
cache_dir: {
|
|
416
|
+
type: "string",
|
|
417
|
+
description: "Directory to cache fetched pages (optional).",
|
|
418
|
+
},
|
|
419
|
+
test: {
|
|
420
|
+
type: "boolean",
|
|
421
|
+
description: "Run in test mode with embedded sample data (no network).",
|
|
422
|
+
},
|
|
423
|
+
},
|
|
424
|
+
required: [],
|
|
425
|
+
additionalProperties: false,
|
|
426
|
+
},
|
|
427
|
+
},
|
|
428
|
+
{
|
|
429
|
+
name: "asx_parser",
|
|
430
|
+
description: "Parse ASX announcements (Appendix 3Y director trades, 4C cashflow reports, " +
|
|
431
|
+
"trading halts) from HTML or PDF into structured JSON.",
|
|
432
|
+
parameters: {
|
|
433
|
+
type: "object",
|
|
434
|
+
properties: {
|
|
435
|
+
input: {
|
|
436
|
+
type: "string",
|
|
437
|
+
description: "Path or URL to the announcement file (HTML or PDF).",
|
|
438
|
+
},
|
|
439
|
+
type: {
|
|
440
|
+
type: "string",
|
|
441
|
+
enum: ["3y", "4c", "halt", "auto"],
|
|
442
|
+
description: "Announcement type (default: auto-detect).",
|
|
443
|
+
},
|
|
444
|
+
test: {
|
|
445
|
+
type: "boolean",
|
|
446
|
+
description: "Run built-in test mode with sample data.",
|
|
447
|
+
},
|
|
448
|
+
},
|
|
449
|
+
required: [],
|
|
450
|
+
additionalProperties: false,
|
|
451
|
+
},
|
|
452
|
+
},
|
|
453
|
+
{
|
|
454
|
+
name: "entity_resolver",
|
|
455
|
+
description: "Fuzzy entity matching and normalization for Australian politicians, " +
|
|
456
|
+
"ASX companies, directors, ABNs and tickers. Supports normalize, match, and resolve modes.",
|
|
457
|
+
parameters: {
|
|
458
|
+
type: "object",
|
|
459
|
+
properties: {
|
|
460
|
+
mode: {
|
|
461
|
+
type: "string",
|
|
462
|
+
enum: ["normalize", "match", "resolve"],
|
|
463
|
+
description: "Operation mode: normalize (clean names), match (find best matches), resolve (cluster duplicates).",
|
|
464
|
+
},
|
|
465
|
+
input: {
|
|
466
|
+
type: "string",
|
|
467
|
+
description: "Path to JSON file containing entities to process.",
|
|
468
|
+
},
|
|
469
|
+
reference: {
|
|
470
|
+
type: "string",
|
|
471
|
+
description: "Path to JSON file with reference entities (required for match mode).",
|
|
472
|
+
},
|
|
473
|
+
threshold: {
|
|
474
|
+
type: "integer",
|
|
475
|
+
description: "Fuzzy match score threshold 0-100 (default: 80).",
|
|
476
|
+
},
|
|
477
|
+
test: {
|
|
478
|
+
type: "boolean",
|
|
479
|
+
description: "Run built-in self-test with sample Australian data.",
|
|
480
|
+
},
|
|
481
|
+
},
|
|
482
|
+
required: [],
|
|
483
|
+
additionalProperties: false,
|
|
484
|
+
},
|
|
485
|
+
},
|
|
486
|
+
{
|
|
487
|
+
name: "hotcopper_scraper",
|
|
488
|
+
description: "Scrape HotCopper stock forum for posts related to an ASX ticker. " +
|
|
489
|
+
"Extracts sentiment, rumor flags, and post metadata for investigation.",
|
|
490
|
+
parameters: {
|
|
491
|
+
type: "object",
|
|
492
|
+
properties: {
|
|
493
|
+
ticker: {
|
|
494
|
+
type: "string",
|
|
495
|
+
description: "ASX ticker symbol (e.g. BHP, CBA, RIO).",
|
|
496
|
+
},
|
|
497
|
+
days: {
|
|
498
|
+
type: "integer",
|
|
499
|
+
description: "Look-back period in days (default: 7).",
|
|
500
|
+
},
|
|
501
|
+
pages: {
|
|
502
|
+
type: "integer",
|
|
503
|
+
description: "Maximum discussion list pages to scrape (default: 5).",
|
|
504
|
+
},
|
|
505
|
+
format: {
|
|
506
|
+
type: "string",
|
|
507
|
+
enum: ["json", "csv"],
|
|
508
|
+
description: "Output format (default: json).",
|
|
509
|
+
},
|
|
510
|
+
test: {
|
|
511
|
+
type: "boolean",
|
|
512
|
+
description: "Run in test mode with embedded sample data (no network).",
|
|
513
|
+
},
|
|
514
|
+
},
|
|
515
|
+
required: ["ticker"],
|
|
516
|
+
additionalProperties: false,
|
|
517
|
+
},
|
|
518
|
+
},
|
|
519
|
+
{
|
|
520
|
+
name: "insider_graph",
|
|
521
|
+
description: "Build directed graphs of people, companies, events, and tickers with relationship edges. " +
|
|
522
|
+
"Supports pathfinding, neighbourhood queries, clustering, and suspicion-path analysis.",
|
|
523
|
+
parameters: {
|
|
524
|
+
type: "object",
|
|
525
|
+
properties: {
|
|
526
|
+
input: {
|
|
527
|
+
type: "string",
|
|
528
|
+
description: "Input JSON file with nodes and edges.",
|
|
529
|
+
},
|
|
530
|
+
mode: {
|
|
531
|
+
type: "string",
|
|
532
|
+
enum: ["build", "query", "export"],
|
|
533
|
+
description: "Operation mode (default: build).",
|
|
534
|
+
},
|
|
535
|
+
find_path: {
|
|
536
|
+
type: "array",
|
|
537
|
+
items: { type: "string" },
|
|
538
|
+
description: "Two node IDs to find shortest path between [source, target].",
|
|
539
|
+
},
|
|
540
|
+
connections: {
|
|
541
|
+
type: "string",
|
|
542
|
+
description: "Node ID to find all connections within depth hops.",
|
|
543
|
+
},
|
|
544
|
+
depth: {
|
|
545
|
+
type: "integer",
|
|
546
|
+
description: "Hop depth for connections query (default: 2).",
|
|
547
|
+
},
|
|
548
|
+
clusters: {
|
|
549
|
+
type: "boolean",
|
|
550
|
+
description: "Find connected components / communities.",
|
|
551
|
+
},
|
|
552
|
+
suspicion: {
|
|
553
|
+
type: "string",
|
|
554
|
+
description: "Trade event ID to trace back to information sources.",
|
|
555
|
+
},
|
|
556
|
+
export_format: {
|
|
557
|
+
type: "string",
|
|
558
|
+
enum: ["json", "graphml", "dot"],
|
|
559
|
+
description: "Export format (default: json).",
|
|
560
|
+
},
|
|
561
|
+
stats: {
|
|
562
|
+
type: "boolean",
|
|
563
|
+
description: "Print graph statistics.",
|
|
564
|
+
},
|
|
565
|
+
test: {
|
|
566
|
+
type: "boolean",
|
|
567
|
+
description: "Run built-in test scenario.",
|
|
568
|
+
},
|
|
569
|
+
},
|
|
570
|
+
required: [],
|
|
571
|
+
additionalProperties: false,
|
|
572
|
+
},
|
|
573
|
+
},
|
|
574
|
+
{
|
|
575
|
+
name: "price_fetcher",
|
|
576
|
+
description: "Fetch ASX OHLCV price data from Yahoo Finance, compute derived metrics, " +
|
|
577
|
+
"and flag trading anomalies (volume spikes, price surges, gaps, volatility).",
|
|
578
|
+
parameters: {
|
|
579
|
+
type: "object",
|
|
580
|
+
properties: {
|
|
581
|
+
tickers: {
|
|
582
|
+
type: "string",
|
|
583
|
+
description: "Comma-separated ASX ticker symbols (e.g. BHP,RIO,CBA).",
|
|
584
|
+
},
|
|
585
|
+
period: {
|
|
586
|
+
type: "string",
|
|
587
|
+
description: "Look-back period (1mo, 3mo, 6mo, 1y, etc.). Default: 3mo.",
|
|
588
|
+
},
|
|
589
|
+
interval: {
|
|
590
|
+
type: "string",
|
|
591
|
+
description: "Bar interval (1d, 1wk, etc.). Default: 1d.",
|
|
592
|
+
},
|
|
593
|
+
format: {
|
|
594
|
+
type: "string",
|
|
595
|
+
enum: ["csv", "json"],
|
|
596
|
+
description: "Output format (default: csv).",
|
|
597
|
+
},
|
|
598
|
+
anomalies_only: {
|
|
599
|
+
type: "boolean",
|
|
600
|
+
description: "Output only rows with anomaly flags.",
|
|
601
|
+
},
|
|
602
|
+
summary: {
|
|
603
|
+
type: "boolean",
|
|
604
|
+
description: "Print per-ticker summary instead of row-level data.",
|
|
605
|
+
},
|
|
606
|
+
test: {
|
|
607
|
+
type: "boolean",
|
|
608
|
+
description: "Use synthetic data for offline demo (no network).",
|
|
609
|
+
},
|
|
610
|
+
},
|
|
611
|
+
required: [],
|
|
612
|
+
additionalProperties: false,
|
|
613
|
+
},
|
|
614
|
+
},
|
|
615
|
+
{
|
|
616
|
+
name: "timeline_linker",
|
|
617
|
+
description: "Correlate politician/insider trade dates against ASX announcements and events " +
|
|
618
|
+
"to build evidence chains with suspicion scores for insider-trading investigations.",
|
|
619
|
+
parameters: {
|
|
620
|
+
type: "object",
|
|
621
|
+
properties: {
|
|
622
|
+
trades: {
|
|
623
|
+
type: "string",
|
|
624
|
+
description: "Path to JSON file containing trade events.",
|
|
625
|
+
},
|
|
626
|
+
events: {
|
|
627
|
+
type: "string",
|
|
628
|
+
description: "Path to JSON file containing announcements/reports.",
|
|
629
|
+
},
|
|
630
|
+
window: {
|
|
631
|
+
type: "integer",
|
|
632
|
+
description: "Days after trade to search for correlated events (default: 14).",
|
|
633
|
+
},
|
|
634
|
+
min_score: {
|
|
635
|
+
type: "number",
|
|
636
|
+
description: "Minimum suspicion score to include (default: 0).",
|
|
637
|
+
},
|
|
638
|
+
date_from: {
|
|
639
|
+
type: "string",
|
|
640
|
+
description: "Only analyse trades on or after this date (YYYY-MM-DD).",
|
|
641
|
+
},
|
|
642
|
+
date_to: {
|
|
643
|
+
type: "string",
|
|
644
|
+
description: "Only analyse trades on or before this date (YYYY-MM-DD).",
|
|
645
|
+
},
|
|
646
|
+
summary: {
|
|
647
|
+
type: "boolean",
|
|
648
|
+
description: "Print a high-level summary.",
|
|
649
|
+
},
|
|
650
|
+
test: {
|
|
651
|
+
type: "boolean",
|
|
652
|
+
description: "Run with embedded sample data demonstrating suspicious patterns.",
|
|
653
|
+
},
|
|
654
|
+
},
|
|
655
|
+
required: [],
|
|
656
|
+
additionalProperties: false,
|
|
657
|
+
},
|
|
658
|
+
},
|
|
659
|
+
{
|
|
660
|
+
name: "volume_scanner",
|
|
661
|
+
description: "Scan ASX tickers for unusual volume and price anomalies. " +
|
|
662
|
+
"Correlates anomalies with known report dates for investigation.",
|
|
663
|
+
parameters: {
|
|
664
|
+
type: "object",
|
|
665
|
+
properties: {
|
|
666
|
+
tickers: {
|
|
667
|
+
type: "string",
|
|
668
|
+
description: "Comma-separated ASX tickers (e.g. BHP,RIO,WES).",
|
|
669
|
+
},
|
|
670
|
+
watchlist: {
|
|
671
|
+
type: "string",
|
|
672
|
+
description: "Path to a watchlist.txt file (one ticker per line).",
|
|
673
|
+
},
|
|
674
|
+
days: {
|
|
675
|
+
type: "integer",
|
|
676
|
+
description: "Lookback window in calendar days (default: 30).",
|
|
677
|
+
},
|
|
678
|
+
threshold: {
|
|
679
|
+
type: "number",
|
|
680
|
+
description: "Volume multiplier vs 20-day average to flag anomalies (default: 2.0).",
|
|
681
|
+
},
|
|
682
|
+
format: {
|
|
683
|
+
type: "string",
|
|
684
|
+
enum: ["csv", "json"],
|
|
685
|
+
description: "Output format (default: csv).",
|
|
686
|
+
},
|
|
687
|
+
report_dates: {
|
|
688
|
+
type: "string",
|
|
689
|
+
description: "Path to JSON file with report dates for correlation.",
|
|
690
|
+
},
|
|
691
|
+
test: {
|
|
692
|
+
type: "boolean",
|
|
693
|
+
description: "Run with synthetic sample data (no network required).",
|
|
694
|
+
},
|
|
695
|
+
},
|
|
696
|
+
required: [],
|
|
697
|
+
additionalProperties: false,
|
|
698
|
+
},
|
|
699
|
+
},
|
|
700
|
+
{
|
|
701
|
+
name: "asx_calendar_fetcher",
|
|
702
|
+
description: "Fetch scheduled ASX Appendix 4C (quarterly cashflow) due dates " +
|
|
703
|
+
"from official calendars. Returns ticker, company, quarter_end, " +
|
|
704
|
+
"due_date, lodged_date, and status (upcoming/lodged/overdue).",
|
|
705
|
+
parameters: {
|
|
706
|
+
type: "object",
|
|
707
|
+
properties: {
|
|
708
|
+
tickers: {
|
|
709
|
+
type: "string",
|
|
710
|
+
description: "Comma-separated ASX tickers or 'all'.",
|
|
711
|
+
},
|
|
712
|
+
period: {
|
|
713
|
+
type: "string",
|
|
714
|
+
description: "Period filter: 'next_30d', 'next_90d', 'Q1_2026', etc.",
|
|
715
|
+
},
|
|
716
|
+
format: {
|
|
717
|
+
type: "string",
|
|
718
|
+
enum: ["json", "csv"],
|
|
719
|
+
description: "Output format (default: json).",
|
|
720
|
+
},
|
|
721
|
+
test: {
|
|
722
|
+
type: "boolean",
|
|
723
|
+
description: "Run with synthetic data (no network).",
|
|
724
|
+
},
|
|
725
|
+
},
|
|
726
|
+
required: ["tickers", "period"],
|
|
727
|
+
additionalProperties: false,
|
|
728
|
+
},
|
|
729
|
+
},
|
|
730
|
+
{
|
|
731
|
+
name: "bulk_asx_announcements",
|
|
732
|
+
description: "Bulk fetch recent ASX announcements (Appendix 3Y director trades, " +
|
|
733
|
+
"4C cashflow reports, 3B issue of securities) for watchlist tickers. " +
|
|
734
|
+
"Auto-paginates and filters. Chains with asx_parser for mass processing.",
|
|
735
|
+
parameters: {
|
|
736
|
+
type: "object",
|
|
737
|
+
properties: {
|
|
738
|
+
tickers: {
|
|
739
|
+
type: "string",
|
|
740
|
+
description: "Comma-separated tickers or path to watchlist.txt.",
|
|
741
|
+
},
|
|
742
|
+
types: {
|
|
743
|
+
type: "array",
|
|
744
|
+
items: { type: "string", enum: ["3Y", "4C", "3B"] },
|
|
745
|
+
description: "Announcement types to fetch.",
|
|
746
|
+
},
|
|
747
|
+
days_back: {
|
|
748
|
+
type: "integer",
|
|
749
|
+
description: "Lookback days (default: 90).",
|
|
750
|
+
},
|
|
751
|
+
output_dir: {
|
|
752
|
+
type: "string",
|
|
753
|
+
description: "Directory to save downloaded announcement files.",
|
|
754
|
+
},
|
|
755
|
+
test: {
|
|
756
|
+
type: "boolean",
|
|
757
|
+
description: "Run with synthetic data (no network).",
|
|
758
|
+
},
|
|
759
|
+
},
|
|
760
|
+
required: ["tickers"],
|
|
761
|
+
additionalProperties: false,
|
|
762
|
+
},
|
|
763
|
+
},
|
|
764
|
+
{
|
|
765
|
+
name: "asic_officer_lookup",
|
|
766
|
+
description: "Query ASIC/ABR for company officers and directors by ABN or ASX ticker. " +
|
|
767
|
+
"Returns officer names, roles, and appointment dates. Useful for detecting " +
|
|
768
|
+
"politician-director overlaps and conflicts of interest.",
|
|
769
|
+
parameters: {
|
|
770
|
+
type: "object",
|
|
771
|
+
properties: {
|
|
772
|
+
abn_or_ticker: {
|
|
773
|
+
type: "string",
|
|
774
|
+
description: "ABN number or ASX ticker symbol to look up.",
|
|
775
|
+
},
|
|
776
|
+
max_results: {
|
|
777
|
+
type: "integer",
|
|
778
|
+
description: "Maximum officers to return (default: 20).",
|
|
779
|
+
},
|
|
780
|
+
format: {
|
|
781
|
+
type: "string",
|
|
782
|
+
enum: ["json"],
|
|
783
|
+
description: "Output format (default: json).",
|
|
784
|
+
},
|
|
785
|
+
test: {
|
|
786
|
+
type: "boolean",
|
|
787
|
+
description: "Run with synthetic data (no network).",
|
|
788
|
+
},
|
|
789
|
+
},
|
|
790
|
+
required: ["abn_or_ticker"],
|
|
791
|
+
additionalProperties: false,
|
|
792
|
+
},
|
|
793
|
+
},
|
|
794
|
+
{
|
|
795
|
+
name: "multi_forum_scraper",
|
|
796
|
+
description: "Scrape multiple forums and social media (HotCopper, Stockhead, Twitter/Nitter) " +
|
|
797
|
+
"for ASX ticker sentiment, rumors, and discussion. Extends hotcopper_scraper " +
|
|
798
|
+
"with multi-source coverage and keyword filtering.",
|
|
799
|
+
parameters: {
|
|
800
|
+
type: "object",
|
|
801
|
+
properties: {
|
|
802
|
+
ticker: {
|
|
803
|
+
type: "string",
|
|
804
|
+
description: "ASX ticker symbol (e.g. BHP, CBA, RIO).",
|
|
805
|
+
},
|
|
806
|
+
sites: {
|
|
807
|
+
type: "array",
|
|
808
|
+
items: {
|
|
809
|
+
type: "string",
|
|
810
|
+
enum: ["hotcopper", "stockhead", "twitter"],
|
|
811
|
+
},
|
|
812
|
+
description: "Sites to scrape (default: all).",
|
|
813
|
+
},
|
|
814
|
+
days: {
|
|
815
|
+
type: "integer",
|
|
816
|
+
description: "Look-back period in days (default: 7).",
|
|
817
|
+
},
|
|
818
|
+
keywords: {
|
|
819
|
+
type: "array",
|
|
820
|
+
items: { type: "string" },
|
|
821
|
+
description: "Extra keywords to search for beyond built-in rumor/sentiment terms.",
|
|
822
|
+
},
|
|
823
|
+
test: {
|
|
824
|
+
type: "boolean",
|
|
825
|
+
description: "Run with synthetic data (no network).",
|
|
826
|
+
},
|
|
827
|
+
},
|
|
828
|
+
required: ["ticker"],
|
|
829
|
+
additionalProperties: false,
|
|
830
|
+
},
|
|
831
|
+
},
|
|
832
|
+
{
|
|
833
|
+
name: "insider_suspicion_scorer",
|
|
834
|
+
description: "Aggregate scorer combining trades, volume/price anomalies, forum rumors, " +
|
|
835
|
+
"and politician holdings into composite suspicion scores (0-100). " +
|
|
836
|
+
"Enables end-to-end insider-trading signal detection.",
|
|
837
|
+
parameters: {
|
|
838
|
+
type: "object",
|
|
839
|
+
properties: {
|
|
840
|
+
trades: {
|
|
841
|
+
type: "string",
|
|
842
|
+
description: "Path to trades.json.",
|
|
843
|
+
},
|
|
844
|
+
anomalies: {
|
|
845
|
+
type: "string",
|
|
846
|
+
description: "Path to anomalies.json.",
|
|
847
|
+
},
|
|
848
|
+
rumors: {
|
|
849
|
+
type: "string",
|
|
850
|
+
description: "Path to rumors.json.",
|
|
851
|
+
},
|
|
852
|
+
holdings: {
|
|
853
|
+
type: "string",
|
|
854
|
+
description: "Path to holdings.json.",
|
|
855
|
+
},
|
|
856
|
+
output: {
|
|
857
|
+
type: "string",
|
|
858
|
+
description: "Output file path (default: stdout).",
|
|
859
|
+
},
|
|
860
|
+
min_score: {
|
|
861
|
+
type: "number",
|
|
862
|
+
description: "Minimum suspicion score to include (default: 0).",
|
|
863
|
+
},
|
|
864
|
+
test: {
|
|
865
|
+
type: "boolean",
|
|
866
|
+
description: "Run with synthetic interlocking test data.",
|
|
867
|
+
},
|
|
868
|
+
},
|
|
869
|
+
required: [],
|
|
870
|
+
additionalProperties: false,
|
|
871
|
+
},
|
|
872
|
+
},
|
|
873
|
+
];
|
|
874
|
+
// ---------------------------------------------------------------------------
|
|
875
|
+
// Internal sets
|
|
876
|
+
// ---------------------------------------------------------------------------
|
|
877
|
+
const _ARTIFACT_TOOLS = new Set([
|
|
878
|
+
"list_artifacts",
|
|
879
|
+
"read_artifact",
|
|
880
|
+
]);
|
|
881
|
+
const _DELEGATION_TOOLS = new Set([
|
|
882
|
+
"subtask",
|
|
883
|
+
"execute",
|
|
884
|
+
"list_artifacts",
|
|
885
|
+
"read_artifact",
|
|
886
|
+
]);
|
|
887
|
+
// ---------------------------------------------------------------------------
|
|
888
|
+
// Helpers
|
|
889
|
+
// ---------------------------------------------------------------------------
|
|
890
|
+
/** Remove acceptance_criteria property from subtask/execute schemas. */
|
|
891
|
+
function _stripAcceptanceCriteria(defs) {
|
|
892
|
+
return defs.map((d) => {
|
|
893
|
+
if (d.name === "subtask" || d.name === "execute") {
|
|
894
|
+
const params = structuredClone(d.parameters);
|
|
895
|
+
if (params.properties) {
|
|
896
|
+
delete params.properties["acceptance_criteria"];
|
|
897
|
+
}
|
|
898
|
+
if (params.required) {
|
|
899
|
+
params.required = params.required.filter((r) => r !== "acceptance_criteria");
|
|
900
|
+
}
|
|
901
|
+
return { ...d, parameters: params };
|
|
902
|
+
}
|
|
903
|
+
return d;
|
|
904
|
+
});
|
|
905
|
+
}
|
|
906
|
+
/**
|
|
907
|
+
* Return tool definitions based on mode.
|
|
908
|
+
*
|
|
909
|
+
* - `includeSubtask=true` (normal recursive) → everything except execute, artifact tools.
|
|
910
|
+
* - `includeSubtask=false` (flat / executor) → no subtask, no execute, no artifact tools.
|
|
911
|
+
* - `includeArtifacts=true` → add list_artifacts + read_artifact.
|
|
912
|
+
* - `includeAcceptanceCriteria=false` → strip acceptance_criteria from schemas.
|
|
913
|
+
*/
|
|
914
|
+
export function get_tool_definitions(options) {
|
|
915
|
+
const includeSubtask = options?.includeSubtask ?? true;
|
|
916
|
+
const includeArtifacts = options?.includeArtifacts ?? false;
|
|
917
|
+
const includeAcceptanceCriteria = options?.includeAcceptanceCriteria ?? false;
|
|
918
|
+
let defs;
|
|
919
|
+
if (includeSubtask) {
|
|
920
|
+
defs = TOOL_DEFINITIONS.filter((d) => d.name !== "execute" && !_ARTIFACT_TOOLS.has(d.name));
|
|
921
|
+
}
|
|
922
|
+
else {
|
|
923
|
+
defs = TOOL_DEFINITIONS.filter((d) => !_DELEGATION_TOOLS.has(d.name));
|
|
924
|
+
}
|
|
925
|
+
if (includeArtifacts) {
|
|
926
|
+
defs = defs.concat(TOOL_DEFINITIONS.filter((d) => _ARTIFACT_TOOLS.has(d.name)));
|
|
927
|
+
}
|
|
928
|
+
if (!includeAcceptanceCriteria) {
|
|
929
|
+
defs = _stripAcceptanceCriteria(defs);
|
|
930
|
+
}
|
|
931
|
+
return defs;
|
|
932
|
+
}
|
|
933
|
+
/**
|
|
934
|
+
* Recursively enforce OpenAI strict-mode constraints on `schema` in-place.
|
|
935
|
+
*
|
|
936
|
+
* Every `type: "object"` node gets `additionalProperties: false` and a
|
|
937
|
+
* complete `required` list. Optional properties (not originally required)
|
|
938
|
+
* are made nullable via `anyOf: [{ type: original }, { type: "null" }]`.
|
|
939
|
+
*/
|
|
940
|
+
function _strictFixup(schema) {
|
|
941
|
+
const schemaType = schema["type"];
|
|
942
|
+
if (schemaType === "object") {
|
|
943
|
+
const properties = (schema["properties"] ?? {});
|
|
944
|
+
const required = new Set(schema["required"] ?? []);
|
|
945
|
+
const allKeys = Object.keys(properties);
|
|
946
|
+
for (const key of allKeys) {
|
|
947
|
+
const prop = properties[key];
|
|
948
|
+
if (prop != null && typeof prop === "object") {
|
|
949
|
+
_strictFixup(prop);
|
|
950
|
+
}
|
|
951
|
+
if (!required.has(key)) {
|
|
952
|
+
if ("type" in prop) {
|
|
953
|
+
const originalType = prop["type"];
|
|
954
|
+
delete prop["type"];
|
|
955
|
+
const desc = prop["description"];
|
|
956
|
+
delete prop["description"];
|
|
957
|
+
const newProp = {
|
|
958
|
+
anyOf: [{ type: originalType }, { type: "null" }],
|
|
959
|
+
};
|
|
960
|
+
if (desc !== undefined) {
|
|
961
|
+
newProp["description"] = desc;
|
|
962
|
+
}
|
|
963
|
+
// Move remaining keys into first anyOf branch
|
|
964
|
+
for (const [k, v] of Object.entries(prop)) {
|
|
965
|
+
if (!(k in newProp)) {
|
|
966
|
+
newProp["anyOf"][0][k] = v;
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
properties[key] = newProp;
|
|
970
|
+
}
|
|
971
|
+
}
|
|
972
|
+
}
|
|
973
|
+
schema["required"] = allKeys;
|
|
974
|
+
schema["additionalProperties"] = false;
|
|
975
|
+
}
|
|
976
|
+
else if (schemaType === "array") {
|
|
977
|
+
const items = schema["items"];
|
|
978
|
+
if (items != null && typeof items === "object") {
|
|
979
|
+
_strictFixup(items);
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
}
|
|
983
|
+
/**
|
|
984
|
+
* For OpenAI strict mode: all properties must be required.
|
|
985
|
+
*
|
|
986
|
+
* Optional properties (not in 'required') are made nullable by wrapping
|
|
987
|
+
* their type with `anyOf [original, null]`. Recurses into nested objects
|
|
988
|
+
* and array items so that every `type: object` node has
|
|
989
|
+
* `additionalProperties: false` and a complete `required` list.
|
|
990
|
+
*/
|
|
991
|
+
function _makeStrictParameters(params) {
|
|
992
|
+
const out = structuredClone(params);
|
|
993
|
+
_strictFixup(out);
|
|
994
|
+
return out;
|
|
995
|
+
}
|
|
996
|
+
// ---------------------------------------------------------------------------
|
|
997
|
+
// Provider converters
|
|
998
|
+
// ---------------------------------------------------------------------------
|
|
999
|
+
/** Convert provider-neutral definitions to OpenAI tools array format. */
|
|
1000
|
+
export function to_openai_tools(defs, strict = true) {
|
|
1001
|
+
const definitions = defs ?? TOOL_DEFINITIONS;
|
|
1002
|
+
const tools = [];
|
|
1003
|
+
for (const d of definitions) {
|
|
1004
|
+
let parameters = d.parameters;
|
|
1005
|
+
if (strict) {
|
|
1006
|
+
parameters = _makeStrictParameters(d.parameters);
|
|
1007
|
+
}
|
|
1008
|
+
const func = {
|
|
1009
|
+
name: d.name,
|
|
1010
|
+
description: d.description,
|
|
1011
|
+
parameters: parameters,
|
|
1012
|
+
};
|
|
1013
|
+
if (strict) {
|
|
1014
|
+
func.strict = true;
|
|
1015
|
+
}
|
|
1016
|
+
tools.push({ type: "function", function: func });
|
|
1017
|
+
}
|
|
1018
|
+
return tools;
|
|
1019
|
+
}
|
|
1020
|
+
/** Convert provider-neutral definitions to Anthropic tools array format. */
|
|
1021
|
+
export function to_anthropic_tools(defs) {
|
|
1022
|
+
const definitions = defs ?? TOOL_DEFINITIONS;
|
|
1023
|
+
return definitions.map((d) => ({
|
|
1024
|
+
name: d.name,
|
|
1025
|
+
description: d.description,
|
|
1026
|
+
input_schema: d.parameters,
|
|
1027
|
+
}));
|
|
1028
|
+
}
|
|
1029
|
+
//# sourceMappingURL=tool-defs.js.map
|