@ifc-lite/viewer 1.19.0 → 1.19.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +15 -14
- package/.turbo/turbo-typecheck.log +1 -1
- package/CHANGELOG.md +8 -0
- package/dist/assets/basketViewActivator-CA2CTcVo.js +71 -0
- package/dist/assets/{bcf-DOG9_WPX.js → bcf-4K724hw0.js} +18 -18
- package/dist/assets/{exporters-BraHBeoi.js → exporters-xbXqEDlO.js} +53 -46
- package/dist/assets/ids-2WdONLlu.js +2033 -0
- package/dist/assets/index-BXeEKqJG.css +1 -0
- package/dist/assets/{index-BOi3BuUI.js → index-D8Epw-e7.js} +48072 -30928
- package/dist/assets/{native-bridge-CpBeOPQa.js → native-bridge-DKmx1z95.js} +2 -2
- package/dist/assets/{sandbox-Baez7n-t.js → sandbox-tccwm5Bo.js} +547 -529
- package/dist/assets/{server-client-BB6cMAXE.js → server-client-LoWPK1N2.js} +1 -1
- package/dist/assets/three-CDRZThFA.js +4057 -0
- package/dist/assets/{wasm-bridge-CAYCUHbE.js → wasm-bridge-BsJGgPMs.js} +1 -1
- package/dist/index.html +8 -7
- package/dist/samples/building-architecture.ifc +453 -0
- package/dist/samples/hello-wall.ifc +1054 -0
- package/dist/samples/infra-bridge.ifc +962 -0
- package/package.json +7 -2
- package/public/samples/building-architecture.ifc +453 -0
- package/public/samples/hello-wall.ifc +1054 -0
- package/public/samples/infra-bridge.ifc +962 -0
- package/src/App.tsx +37 -3
- package/src/components/mcp/HeroScene.tsx +876 -0
- package/src/components/mcp/McpLanding.tsx +1318 -0
- package/src/components/mcp/McpPlayground.tsx +524 -0
- package/src/components/mcp/PlaygroundChat.tsx +1097 -0
- package/src/components/mcp/PlaygroundViewer.tsx +815 -0
- package/src/components/mcp/README.md +171 -0
- package/src/components/mcp/data.ts +659 -0
- package/src/components/mcp/playground-dispatcher.ts +1649 -0
- package/src/components/mcp/playground-files.ts +107 -0
- package/src/components/mcp/playground-uploads.ts +122 -0
- package/src/components/mcp/types.ts +65 -0
- package/src/components/mcp/use-mcp-page.ts +109 -0
- package/src/components/viewer/MainToolbar.tsx +19 -0
- package/src/components/viewer/ViewportContainer.tsx +35 -4
- package/src/generated/mcp-catalog.json +82 -0
- package/vite.config.ts +6 -0
- package/dist/assets/basketViewActivator-RZy5c3Td.js +0 -1
- package/dist/assets/ids-DQ5jY0E8.js +0 -1
- package/dist/assets/index-0XpVr_S5.css +0 -1
|
@@ -0,0 +1,1649 @@
|
|
|
1
|
+
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
2
|
+
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
3
|
+
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* playground-dispatcher.ts — client-side execution surface for the MCP
|
|
7
|
+
* tool catalogue.
|
|
8
|
+
*
|
|
9
|
+
* The web playground talks to Anthropic with the same tool definitions the
|
|
10
|
+
* stdio MCP server advertises. When Claude emits a `tool_use` block we run
|
|
11
|
+
* it through `dispatch()` here, which:
|
|
12
|
+
*
|
|
13
|
+
* 1. Resolves the tool name to an SDK call against the loaded model's
|
|
14
|
+
* `BimContext` (built on top of `HeadlessLikeBackend`, the same
|
|
15
|
+
* backend the Node MCP server uses — so behaviour matches).
|
|
16
|
+
* 2. Catches `ToolExecutionError`s and converts them to MCP-shaped
|
|
17
|
+
* `tool_result` payloads with `is_error: true` + a stable error code.
|
|
18
|
+
* 3. Emits a structured payload that is small enough to round-trip back
|
|
19
|
+
* into Claude’s context without blowing the message budget.
|
|
20
|
+
*
|
|
21
|
+
* Coverage: read + mutate + BCF + IDS + export are all wired. Disk I/O
|
|
22
|
+
* (model_save, export_*) stages a Blob in `playgroundFiles` that the user
|
|
23
|
+
* downloads on click — never auto-triggered. The handful of tools that
|
|
24
|
+
* genuinely don't fit a browser (model_load federated, export_glb /
|
|
25
|
+
* export_ifcx / export_pdf_report) return a friendly
|
|
26
|
+
* UNSUPPORTED_OPERATION so the agent can route the user to the stdio MCP
|
|
27
|
+
* for those.
|
|
28
|
+
*/
|
|
29
|
+
|
|
30
|
+
import { IfcParser, type IfcDataStore, extractLengthUnitScale } from '@ifc-lite/parser';
|
|
31
|
+
import {
|
|
32
|
+
BsddNamespace,
|
|
33
|
+
createBimContext,
|
|
34
|
+
type BimContext,
|
|
35
|
+
type EntityRef,
|
|
36
|
+
} from '@ifc-lite/sdk';
|
|
37
|
+
import { EntityNode } from '@ifc-lite/query';
|
|
38
|
+
import {
|
|
39
|
+
HeadlessLikeBackend,
|
|
40
|
+
ToolErrorCode,
|
|
41
|
+
ToolExecutionError,
|
|
42
|
+
} from '@ifc-lite/mcp/browser';
|
|
43
|
+
import {
|
|
44
|
+
addCommentToTopic,
|
|
45
|
+
addTopicToProject,
|
|
46
|
+
addViewpointToTopic,
|
|
47
|
+
createBCFComment,
|
|
48
|
+
createBCFProject,
|
|
49
|
+
createBCFTopic,
|
|
50
|
+
updateTopicStatus,
|
|
51
|
+
writeBCF,
|
|
52
|
+
type BCFProject,
|
|
53
|
+
type BCFTopic,
|
|
54
|
+
} from '@ifc-lite/bcf';
|
|
55
|
+
import { parseIDS, validateIDS, type IDSDocument } from '@ifc-lite/ids';
|
|
56
|
+
import { CATALOG, paramsFor } from './data';
|
|
57
|
+
import type { CatalogTool } from './types';
|
|
58
|
+
import type { ViewerController, ColorTuple } from './PlaygroundViewer';
|
|
59
|
+
import { playgroundFiles } from './playground-files';
|
|
60
|
+
import { playgroundUploads } from './playground-uploads';
|
|
61
|
+
|
|
62
|
+
// ── loaded-model handle ────────────────────────────────────────────────────
|
|
63
|
+
|
|
64
|
+
export interface LoadedPlaygroundModel {
|
|
65
|
+
id: string;
|
|
66
|
+
name: string;
|
|
67
|
+
fileSize: number;
|
|
68
|
+
/** Raw bytes — kept around so the geometry processor can re-parse on
|
|
69
|
+
* demand. `store.source` would work too but only for stores parsed by
|
|
70
|
+
* this exact path; keeping our own copy is cheaper than hunting it. */
|
|
71
|
+
bytes: Uint8Array;
|
|
72
|
+
store: IfcDataStore;
|
|
73
|
+
bim: BimContext;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** Parse an IFC ArrayBuffer in the browser using the same path the
|
|
77
|
+
* stdio CLI uses (just `IfcParser.parseColumnar`). */
|
|
78
|
+
export async function parsePlaygroundModel(
|
|
79
|
+
buffer: ArrayBuffer,
|
|
80
|
+
filename: string,
|
|
81
|
+
): Promise<LoadedPlaygroundModel> {
|
|
82
|
+
// Snapshot the buffer up-front. parseColumnar may keep references into
|
|
83
|
+
// it but the geometry processor wants a fresh, owning Uint8Array.
|
|
84
|
+
const bytes = new Uint8Array(buffer.slice(0));
|
|
85
|
+
const parser = new IfcParser();
|
|
86
|
+
const store = await parser.parseColumnar(buffer);
|
|
87
|
+
store.fileSize = buffer.byteLength;
|
|
88
|
+
const id = filename.replace(/\.ifc$/i, '').replace(/[^a-zA-Z0-9]+/g, '-').toLowerCase() || 'model';
|
|
89
|
+
const backend = new HeadlessLikeBackend(store, filename, id);
|
|
90
|
+
const bim = createBimContext({ backend });
|
|
91
|
+
return { id, name: filename, fileSize: buffer.byteLength, bytes, store, bim };
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// ── tool execution ────────────────────────────────────────────────────────
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Outcome of dispatching a single tool call. Mirrors the relevant fields
|
|
98
|
+
* of an MCP `CallToolResult` so the chat view can render success and
|
|
99
|
+
* failure with the same components.
|
|
100
|
+
*
|
|
101
|
+
* When a tool produces a downloadable artifact (bcf_export, model_save,
|
|
102
|
+
* export_ifc / csv / json, ids_validate) it sets `download` so the chat
|
|
103
|
+
* panel can surface an inline "Get .bcf" / "Save IFC" button under the
|
|
104
|
+
* tool call card. The actual file lives in `playgroundFiles` (also
|
|
105
|
+
* mirrored in the sidebar Downloads panel); `download.fileId` is the
|
|
106
|
+
* handle the chat uses to trigger the click.
|
|
107
|
+
*
|
|
108
|
+
* Strict rule: download is OPT-IN per click. Tools NEVER auto-trigger.
|
|
109
|
+
*/
|
|
110
|
+
export interface ToolDispatchResult {
|
|
111
|
+
text: string;
|
|
112
|
+
structured: unknown;
|
|
113
|
+
isError: boolean;
|
|
114
|
+
errorCode?: string;
|
|
115
|
+
hint?: string;
|
|
116
|
+
download?: {
|
|
117
|
+
fileId: string;
|
|
118
|
+
filename: string;
|
|
119
|
+
mimeType: string;
|
|
120
|
+
size: number;
|
|
121
|
+
/** Short label for the download button, e.g. "Get .bcf", "Save IFC". */
|
|
122
|
+
label: string;
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/** Optional context surfaces the dispatcher can use beyond the model. */
|
|
127
|
+
export interface DispatchContext {
|
|
128
|
+
/** Inline 3D viewer controller. When absent, viewer_* tools fail with
|
|
129
|
+
* UNSUPPORTED_OPERATION and ask the user to open the viewer panel. */
|
|
130
|
+
viewer?: ViewerController | null;
|
|
131
|
+
/** Open the inline viewer panel if it's collapsed. The viewer_open tool
|
|
132
|
+
* forwards here so the agent can request it. */
|
|
133
|
+
openViewerPanel?: () => void;
|
|
134
|
+
/** Optional federated models (model_id → model). When omitted only the
|
|
135
|
+
* primary `model` argument to dispatch() is reachable; diff tools that
|
|
136
|
+
* need two models use `model_id` to look the second one up. */
|
|
137
|
+
registry?: Map<string, LoadedPlaygroundModel>;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// ── BCF session state ─────────────────────────────────────────────────────
|
|
141
|
+
// One BCF project per playground tab — bcf_topic_create accumulates topics,
|
|
142
|
+
// bcf_export bundles the lot. Lives at module scope so tools can mutate it
|
|
143
|
+
// across calls without threading it through every dispatch invocation.
|
|
144
|
+
let bcfProject: BCFProject | null = null;
|
|
145
|
+
function getBcfProject(): BCFProject {
|
|
146
|
+
if (!bcfProject) bcfProject = createBCFProject({ name: 'ifc-lite-playground', version: '2.1' });
|
|
147
|
+
return bcfProject;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Auto-stage a fresh `.bcfzip` blob in playgroundFiles after every BCF
|
|
152
|
+
* mutation. This way the chat shows a `Get .bcfzip` button on EVERY BCF
|
|
153
|
+
* call — the user doesn't have to wait for the agent to remember to
|
|
154
|
+
* call bcf_export. We re-use the same fileId across calls so the
|
|
155
|
+
* sidebar Downloads panel shows ONE always-current entry instead of
|
|
156
|
+
* a long history of stale bundles.
|
|
157
|
+
*
|
|
158
|
+
* Returns the download metadata to splice into a tool result, or null
|
|
159
|
+
* when the project has no topics yet (nothing to download).
|
|
160
|
+
*/
|
|
161
|
+
let stagedBcfFileId: string | null = null;
|
|
162
|
+
async function autoStageBcfDownload(): Promise<NonNullable<ToolDispatchResult['download']> | null> {
|
|
163
|
+
const project = getBcfProject();
|
|
164
|
+
if (project.topics.size === 0) return null;
|
|
165
|
+
const blob = await writeBCF(project);
|
|
166
|
+
// Drop the previous staged copy so the panel only ever shows the latest.
|
|
167
|
+
if (stagedBcfFileId) playgroundFiles.remove(stagedBcfFileId);
|
|
168
|
+
const filename = coerceFilename(undefined, 'bcfzip', 'issues');
|
|
169
|
+
const file = playgroundFiles.add({
|
|
170
|
+
filename,
|
|
171
|
+
mimeType: 'application/zip',
|
|
172
|
+
size: blob.size,
|
|
173
|
+
blob,
|
|
174
|
+
source: 'bcf (auto-staged)',
|
|
175
|
+
description: `${project.topics.size} topic(s) · auto-updates as you edit`,
|
|
176
|
+
});
|
|
177
|
+
stagedBcfFileId = file.id;
|
|
178
|
+
return {
|
|
179
|
+
fileId: file.id,
|
|
180
|
+
filename,
|
|
181
|
+
mimeType: 'application/zip',
|
|
182
|
+
size: blob.size,
|
|
183
|
+
label: 'Get .bcfzip',
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Read-only tool implementations the v1 playground supports. Each entry
|
|
189
|
+
* returns a (text, structured) pair the chat panel can render directly.
|
|
190
|
+
* Anything not in this map → UNSUPPORTED_OPERATION.
|
|
191
|
+
*/
|
|
192
|
+
type ToolImplResult = {
|
|
193
|
+
text: string;
|
|
194
|
+
structured: unknown;
|
|
195
|
+
download?: ToolDispatchResult['download'];
|
|
196
|
+
};
|
|
197
|
+
type ToolImpl = (model: LoadedPlaygroundModel, args: Record<string, unknown>, ctx: DispatchContext) => Promise<ToolImplResult>;
|
|
198
|
+
|
|
199
|
+
function requireViewer(ctx: DispatchContext): ViewerController {
|
|
200
|
+
if (!ctx.viewer || !ctx.viewer.isLoaded()) {
|
|
201
|
+
throw new ToolExecutionError({
|
|
202
|
+
code: ToolErrorCode.UNSUPPORTED_OPERATION,
|
|
203
|
+
message: 'Viewer is not open. Call viewer_open first to mount the inline 3D panel.',
|
|
204
|
+
hint: 'Click the "show 3D viewer" button or have the agent call viewer_open.',
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
return ctx.viewer;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
function parseColorArg(input: unknown): ColorTuple {
|
|
211
|
+
// Accept hex strings, named colors, or [r,g,b] / [r,g,b,a] arrays in 0-1.
|
|
212
|
+
if (Array.isArray(input)) {
|
|
213
|
+
const arr = input.map(Number);
|
|
214
|
+
if (arr.length === 3) return [arr[0], arr[1], arr[2], 1];
|
|
215
|
+
if (arr.length === 4) return [arr[0], arr[1], arr[2], arr[3]];
|
|
216
|
+
}
|
|
217
|
+
if (typeof input === 'string') {
|
|
218
|
+
const hex = input.startsWith('#') ? input.slice(1) : input;
|
|
219
|
+
if (/^[0-9a-fA-F]{6}$/.test(hex)) {
|
|
220
|
+
return [parseInt(hex.slice(0, 2), 16) / 255, parseInt(hex.slice(2, 4), 16) / 255, parseInt(hex.slice(4, 6), 16) / 255, 1];
|
|
221
|
+
}
|
|
222
|
+
const named: Record<string, ColorTuple> = {
|
|
223
|
+
red: [1, 0.2, 0.2, 1], orange: [1, 0.6, 0.1, 1], yellow: [1, 0.9, 0.1, 1],
|
|
224
|
+
green: [0.2, 0.8, 0.2, 1], blue: [0.2, 0.4, 1, 1], purple: [0.6, 0.2, 0.8, 1],
|
|
225
|
+
pink: [1, 0.4, 0.8, 1], teal: [0.45, 0.85, 0.79, 1], gray: [0.5, 0.5, 0.5, 1],
|
|
226
|
+
white: [1, 1, 1, 1], black: [0, 0, 0, 1],
|
|
227
|
+
chartreuse: [0.84, 1.0, 0.25, 1], magenta: [1.0, 0.36, 0.86, 1],
|
|
228
|
+
};
|
|
229
|
+
if (named[input.toLowerCase()]) return named[input.toLowerCase()];
|
|
230
|
+
}
|
|
231
|
+
// Default chartreuse
|
|
232
|
+
return [0.84, 1.0, 0.25, 1];
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function formatColorTuple(c: ColorTuple): string {
|
|
236
|
+
const r = Math.round(c[0] * 255).toString(16).padStart(2, '0');
|
|
237
|
+
const g = Math.round(c[1] * 255).toString(16).padStart(2, '0');
|
|
238
|
+
const b = Math.round(c[2] * 255).toString(16).padStart(2, '0');
|
|
239
|
+
return `#${r}${g}${b}`;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Browser-safe bSDD client routed through this site's `/api/bsdd/*` proxy.
|
|
244
|
+
* The default BsddNamespace hits `api.bsdd.buildingsmart.org` directly,
|
|
245
|
+
* which fails CORS in browsers. Vite (dev) and Vercel (prod) both rewrite
|
|
246
|
+
* `/api/bsdd/*` to that host already, so we share the SDK's namespace
|
|
247
|
+
* implementation but swap the base URL.
|
|
248
|
+
*/
|
|
249
|
+
const PROXIED_BSDD = new BsddNamespace({ apiBase: '/api/bsdd' });
|
|
250
|
+
|
|
251
|
+
const IMPLS: Record<string, ToolImpl> = {
|
|
252
|
+
// ── Discovery ───────────────────────────────────────────────────────────
|
|
253
|
+
async model_info(m) {
|
|
254
|
+
// entityIndex.byType keys are raw STEP storage names (IFCWALL, …) —
|
|
255
|
+
// user-facing surfaces use IFC EXPRESS PascalCase (IfcWall). Resolve
|
|
256
|
+
// through store.entities.getTypeName so the playground agrees with
|
|
257
|
+
// the rest of the MCP surface.
|
|
258
|
+
const counts: Record<string, number> = {};
|
|
259
|
+
for (const [storageType, ids] of m.store.entityIndex.byType) {
|
|
260
|
+
const pretty = (ids.length > 0 ? m.store.entities.getTypeName(ids[0]) : null) ?? storageType;
|
|
261
|
+
counts[pretty] = ids.length;
|
|
262
|
+
}
|
|
263
|
+
const top = Object.entries(counts)
|
|
264
|
+
.sort((a, b) => b[1] - a[1])
|
|
265
|
+
.slice(0, 20)
|
|
266
|
+
.map(([type, count]) => ({ type, count }));
|
|
267
|
+
const summary = `Model '${m.name}' (${m.store.schemaVersion}): ${m.store.entityCount.toLocaleString()} entities, ${formatBytes(m.fileSize)}`;
|
|
268
|
+
return {
|
|
269
|
+
text: summary,
|
|
270
|
+
structured: {
|
|
271
|
+
id: m.id,
|
|
272
|
+
name: m.name,
|
|
273
|
+
schema: m.store.schemaVersion,
|
|
274
|
+
entityCount: m.store.entityCount,
|
|
275
|
+
fileSize: m.fileSize,
|
|
276
|
+
typeCountsTop20: top,
|
|
277
|
+
},
|
|
278
|
+
};
|
|
279
|
+
},
|
|
280
|
+
|
|
281
|
+
async model_list(m) {
|
|
282
|
+
return {
|
|
283
|
+
text: `1 model loaded: ${m.name} (${m.store.entityCount.toLocaleString()} entities).`,
|
|
284
|
+
structured: { models: [{ id: m.id, name: m.name, entityCount: m.store.entityCount, schema: m.store.schemaVersion }] },
|
|
285
|
+
};
|
|
286
|
+
},
|
|
287
|
+
|
|
288
|
+
async schema_describe(_m, args) {
|
|
289
|
+
const type = String(args.type ?? '');
|
|
290
|
+
if (!type) {
|
|
291
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: '`type` is required.' });
|
|
292
|
+
}
|
|
293
|
+
return {
|
|
294
|
+
text: `Schema description for ${type} is best read from /mcp#schema_describe — the full reflection table requires the @ifc-lite/data introspection map (server-side only in v1).`,
|
|
295
|
+
structured: { type, note: 'In-browser schema reflection is in v0.3.' },
|
|
296
|
+
};
|
|
297
|
+
},
|
|
298
|
+
|
|
299
|
+
// ── Query ───────────────────────────────────────────────────────────────
|
|
300
|
+
async query_entities(m, args) {
|
|
301
|
+
const type = args.type as string | undefined;
|
|
302
|
+
const limit = Math.min(Number(args.limit ?? 50), 200);
|
|
303
|
+
const offset = Number(args.offset ?? 0);
|
|
304
|
+
let q = m.bim.query();
|
|
305
|
+
if (type) q = q.byType(type);
|
|
306
|
+
const all = q.toArray();
|
|
307
|
+
const page = all.slice(offset, offset + limit);
|
|
308
|
+
const head = `Found ${all.length.toLocaleString()} matching entit${all.length === 1 ? 'y' : 'ies'}${page.length < all.length ? ` (showing ${page.length})` : ''}.`;
|
|
309
|
+
const lines = page.slice(0, 25).map((e) => {
|
|
310
|
+
const name = e.name ? ` '${e.name}'` : '';
|
|
311
|
+
const gid = e.globalId ? ` GlobalId=${e.globalId}` : '';
|
|
312
|
+
return ` • ${e.type ?? '?'} #${e.ref.expressId}${name}${gid}`;
|
|
313
|
+
});
|
|
314
|
+
return {
|
|
315
|
+
text: [head, ...lines].join('\n'),
|
|
316
|
+
structured: {
|
|
317
|
+
count: all.length,
|
|
318
|
+
truncated: page.length < all.length,
|
|
319
|
+
entities: page.map((e) => ({
|
|
320
|
+
expressId: e.ref.expressId,
|
|
321
|
+
modelId: e.ref.modelId,
|
|
322
|
+
globalId: e.globalId,
|
|
323
|
+
name: e.name,
|
|
324
|
+
type: e.type,
|
|
325
|
+
description: e.description,
|
|
326
|
+
objectType: e.objectType,
|
|
327
|
+
})),
|
|
328
|
+
},
|
|
329
|
+
};
|
|
330
|
+
},
|
|
331
|
+
|
|
332
|
+
async count_entities(m, args) {
|
|
333
|
+
const groupBy = (args.group_by as string | undefined) ?? 'type';
|
|
334
|
+
const counts = new Map<string, number>();
|
|
335
|
+
if (groupBy === 'type') {
|
|
336
|
+
// Same PascalCase normalization as model_info — keep user-facing
|
|
337
|
+
// type counts aligned with the rest of the surface.
|
|
338
|
+
for (const [storageType, ids] of m.store.entityIndex.byType) {
|
|
339
|
+
const pretty = (ids.length > 0 ? m.store.entities.getTypeName(ids[0]) : null) ?? storageType;
|
|
340
|
+
counts.set(pretty, ids.length);
|
|
341
|
+
}
|
|
342
|
+
} else if (groupBy === 'storey') {
|
|
343
|
+
for (const e of m.bim.query().toArray()) {
|
|
344
|
+
const node = new EntityNode(m.store, e.ref.expressId);
|
|
345
|
+
const storey = node.storey();
|
|
346
|
+
const key = storey?.name ?? '(no storey)';
|
|
347
|
+
counts.set(key, (counts.get(key) ?? 0) + 1);
|
|
348
|
+
}
|
|
349
|
+
} else if (groupBy === 'material') {
|
|
350
|
+
for (const e of m.bim.query().toArray()) {
|
|
351
|
+
const mat = m.bim.materials(e.ref);
|
|
352
|
+
const key = mat?.name ?? '(no material)';
|
|
353
|
+
counts.set(key, (counts.get(key) ?? 0) + 1);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
const groups = Array.from(counts.entries())
|
|
357
|
+
.sort((a, b) => b[1] - a[1])
|
|
358
|
+
.map(([key, count]) => ({ key, count }));
|
|
359
|
+
return {
|
|
360
|
+
text: `Counted ${groups.length} group(s) by ${groupBy}.\n${groups.slice(0, 25).map((g) => ` • ${g.key} — ${g.count}`).join('\n')}`,
|
|
361
|
+
structured: { groupBy, groups },
|
|
362
|
+
};
|
|
363
|
+
},
|
|
364
|
+
|
|
365
|
+
async get_entity(m, args) {
|
|
366
|
+
const ref = resolveRef(m, args);
|
|
367
|
+
const data = m.bim.entity(ref);
|
|
368
|
+
if (!data) {
|
|
369
|
+
throw new ToolExecutionError({
|
|
370
|
+
code: ToolErrorCode.ENTITY_NOT_FOUND,
|
|
371
|
+
message: `No entity at ${refStr(ref)} in this model.`,
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
return {
|
|
375
|
+
text: `${data.type} '${data.name ?? '(unnamed)'}' (#${data.ref.expressId})`,
|
|
376
|
+
structured: data,
|
|
377
|
+
};
|
|
378
|
+
},
|
|
379
|
+
|
|
380
|
+
async get_entities_bulk(m, args) {
|
|
381
|
+
const gids = (args.global_ids as string[] | undefined) ?? [];
|
|
382
|
+
const out: unknown[] = [];
|
|
383
|
+
for (const gid of gids.slice(0, 200)) {
|
|
384
|
+
try {
|
|
385
|
+
const ref = resolveRef(m, { global_id: gid });
|
|
386
|
+
out.push(m.bim.entity(ref));
|
|
387
|
+
} catch {
|
|
388
|
+
out.push(null);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
return { text: `Resolved ${out.filter(Boolean).length}/${gids.length} entities.`, structured: { entities: out } };
|
|
392
|
+
},
|
|
393
|
+
|
|
394
|
+
async spatial_hierarchy(m) {
|
|
395
|
+
// Lightweight tree walk using EntityNode. The IFC spatial graph uses
|
|
396
|
+
// IfcRelAggregates for "decomposes" + IfcRelContainedInSpatialStructure
|
|
397
|
+
// for "contains" — EntityNode exposes both.
|
|
398
|
+
interface Node { expressId: number; type?: string; name?: string; children: Node[] }
|
|
399
|
+
const projects = m.store.entityIndex.byType.get('IFCPROJECT') ?? [];
|
|
400
|
+
function build(expressId: number, depth: number): Node {
|
|
401
|
+
const node = new EntityNode(m.store, expressId);
|
|
402
|
+
const out: Node = { expressId, type: node.type, name: node.name, children: [] };
|
|
403
|
+
if (depth > 6) return out; // bound the recursion for the chat budget
|
|
404
|
+
for (const child of node.decomposes()) out.children.push(build(child.expressId, depth + 1));
|
|
405
|
+
for (const child of node.contains()) out.children.push(build(child.expressId, depth + 1));
|
|
406
|
+
return out;
|
|
407
|
+
}
|
|
408
|
+
const root = projects.map((id) => build(id, 0));
|
|
409
|
+
return { text: `Spatial hierarchy for '${m.name}'.`, structured: { tree: root } };
|
|
410
|
+
},
|
|
411
|
+
|
|
412
|
+
async containment_chain(m, args) {
|
|
413
|
+
const ref = resolveRef(m, args);
|
|
414
|
+
const path: Array<{ expressId: number; type?: string; name?: string; globalId?: string }> = [];
|
|
415
|
+
let current: EntityNode | null = new EntityNode(m.store, ref.expressId);
|
|
416
|
+
let safety = 32;
|
|
417
|
+
while (current && safety-- > 0) {
|
|
418
|
+
const step: EntityNode = current;
|
|
419
|
+
path.push({ expressId: step.expressId, type: step.type, name: step.name, globalId: step.globalId });
|
|
420
|
+
// Walk up via spatial containment first, then aggregate parent.
|
|
421
|
+
const next: EntityNode | null = step.containedIn() ?? step.decomposedBy();
|
|
422
|
+
if (!next || path.some((p) => p.expressId === next.expressId)) break;
|
|
423
|
+
current = next;
|
|
424
|
+
}
|
|
425
|
+
return { text: `${path.length}-step containment path.`, structured: { path } };
|
|
426
|
+
},
|
|
427
|
+
|
|
428
|
+
async relationships(m, args) {
|
|
429
|
+
const ref = resolveRef(m, args);
|
|
430
|
+
const data = m.bim.relationships(ref);
|
|
431
|
+
return { text: `Relationships`, structured: data };
|
|
432
|
+
},
|
|
433
|
+
|
|
434
|
+
async properties_unique(m, args) {
|
|
435
|
+
const type = String(args.type ?? '');
|
|
436
|
+
const psetName = String(args.pset ?? '');
|
|
437
|
+
const propName = String(args.property ?? '');
|
|
438
|
+
if (!type || !psetName || !propName) {
|
|
439
|
+
throw new ToolExecutionError({
|
|
440
|
+
code: ToolErrorCode.INVALID_INPUT,
|
|
441
|
+
message: 'type, pset and property are all required.',
|
|
442
|
+
});
|
|
443
|
+
}
|
|
444
|
+
const counts = new Map<string, number>();
|
|
445
|
+
let total = 0;
|
|
446
|
+
for (const e of m.bim.query().byType(type).toArray()) {
|
|
447
|
+
const v = m.bim.property(e.ref, psetName, propName);
|
|
448
|
+
const key = v == null ? '(missing)' : String(v);
|
|
449
|
+
counts.set(key, (counts.get(key) ?? 0) + 1);
|
|
450
|
+
total++;
|
|
451
|
+
}
|
|
452
|
+
const values = Array.from(counts.entries()).sort((a, b) => b[1] - a[1]).map(([value, count]) => ({ value, count }));
|
|
453
|
+
const head = `${values.length} unique value(s) for ${type}.${psetName}.${propName} across ${total} entit${total === 1 ? 'y' : 'ies'}:`;
|
|
454
|
+
return { text: [head, ...values.slice(0, 30).map((v) => ` • ${v.value} — ${v.count}`)].join('\n'), structured: { values, total } };
|
|
455
|
+
},
|
|
456
|
+
|
|
457
|
+
async materials_list(m) {
|
|
458
|
+
const counts = new Map<string, number>();
|
|
459
|
+
for (const e of m.bim.query().toArray()) {
|
|
460
|
+
const mat = m.bim.materials(e.ref);
|
|
461
|
+
if (!mat) continue;
|
|
462
|
+
const key = mat.name ?? '(unnamed)';
|
|
463
|
+
counts.set(key, (counts.get(key) ?? 0) + 1);
|
|
464
|
+
}
|
|
465
|
+
const list = Array.from(counts.entries()).sort((a, b) => b[1] - a[1]).map(([name, count]) => ({ name, count }));
|
|
466
|
+
return {
|
|
467
|
+
text: `${list.length} distinct material(s) in use:\n${list.slice(0, 30).map((m) => ` • ${m.name} — ${m.count}`).join('\n')}`,
|
|
468
|
+
structured: { materials: list },
|
|
469
|
+
};
|
|
470
|
+
},
|
|
471
|
+
|
|
472
|
+
async classifications_list(m) {
|
|
473
|
+
const counts = new Map<string, number>();
|
|
474
|
+
for (const e of m.bim.query().toArray()) {
|
|
475
|
+
for (const c of m.bim.classifications(e.ref)) {
|
|
476
|
+
const key = `${c.system ?? '?'}:${c.identification ?? c.name ?? '?'}`;
|
|
477
|
+
counts.set(key, (counts.get(key) ?? 0) + 1);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
const list = Array.from(counts.entries()).sort((a, b) => b[1] - a[1]).map(([key, count]) => ({ key, count }));
|
|
481
|
+
return {
|
|
482
|
+
text: `${list.length} distinct classification reference(s):\n${list.slice(0, 30).map((c) => ` • ${c.key} — ${c.count}`).join('\n')}`,
|
|
483
|
+
structured: { classifications: list },
|
|
484
|
+
};
|
|
485
|
+
},
|
|
486
|
+
|
|
487
|
+
async georeferencing(m) {
|
|
488
|
+
const counts = m.store.entityIndex.byType.get('IFCMAPCONVERSION') ?? [];
|
|
489
|
+
return {
|
|
490
|
+
text: counts.length === 0 ? 'Model has no IfcMapConversion (no georeferencing).' : `${counts.length} IfcMapConversion entity (georeferenced).`,
|
|
491
|
+
structured: { hasGeoreference: counts.length > 0 },
|
|
492
|
+
};
|
|
493
|
+
},
|
|
494
|
+
|
|
495
|
+
async units(m) {
|
|
496
|
+
const scale = m.store.source && m.store.entityIndex
|
|
497
|
+
? extractLengthUnitScale(m.store.source, m.store.entityIndex)
|
|
498
|
+
: 1.0;
|
|
499
|
+
return {
|
|
500
|
+
text: `Length unit scale: ${scale} (lengths × ${scale} → metres). Schema: ${m.store.schemaVersion}.`,
|
|
501
|
+
structured: { lengthUnitScale: scale, schema: m.store.schemaVersion },
|
|
502
|
+
};
|
|
503
|
+
},
|
|
504
|
+
|
|
505
|
+
// ── Geometry (read from quantity sets) ──────────────────────────────────
|
|
506
|
+
async geometry_bbox(m, args) {
|
|
507
|
+
const ref = resolveRef(m, args);
|
|
508
|
+
const qsets = m.bim.quantities(ref);
|
|
509
|
+
return { text: 'Bounding-box derived from quantity sets when available; full WASM geometry is v0.2.', structured: { quantitySets: qsets } };
|
|
510
|
+
},
|
|
511
|
+
async geometry_volume(m, args) {
|
|
512
|
+
const ref = resolveRef(m, args);
|
|
513
|
+
const qsets = m.bim.quantities(ref);
|
|
514
|
+
let vol: number | null = null;
|
|
515
|
+
for (const q of qsets) for (const x of q.quantities) if (/Volume/i.test(x.name) && typeof x.value === 'number') { vol = x.value; break; }
|
|
516
|
+
return { text: vol == null ? 'No Volume quantity present.' : `Volume = ${vol.toFixed(3)} m³.`, structured: { volume: vol } };
|
|
517
|
+
},
|
|
518
|
+
async geometry_area(m, args) {
|
|
519
|
+
const ref = resolveRef(m, args);
|
|
520
|
+
const qsets = m.bim.quantities(ref);
|
|
521
|
+
let area: number | null = null;
|
|
522
|
+
for (const q of qsets) for (const x of q.quantities) if (/Area/i.test(x.name) && typeof x.value === 'number') { area = x.value; break; }
|
|
523
|
+
return { text: area == null ? 'No Area quantity present.' : `Area = ${area.toFixed(3)} m².`, structured: { area } };
|
|
524
|
+
},
|
|
525
|
+
|
|
526
|
+
// ── Validation ──────────────────────────────────────────────────────────
|
|
527
|
+
async model_audit(m) {
|
|
528
|
+
let issues = 0;
|
|
529
|
+
const products = m.bim.query().toArray();
|
|
530
|
+
let missingGid = 0, missingName = 0;
|
|
531
|
+
for (const e of products) {
|
|
532
|
+
if (!e.globalId) missingGid++;
|
|
533
|
+
if (!e.name) missingName++;
|
|
534
|
+
}
|
|
535
|
+
issues = missingGid + (missingName > products.length / 2 ? 1 : 0);
|
|
536
|
+
const score = Math.max(0, 100 - issues * 5);
|
|
537
|
+
return {
|
|
538
|
+
text: `Audit score: ${score}/100${issues > 0 ? ` (${issues} issue${issues === 1 ? '' : 's'}).` : '. Clean.'}`,
|
|
539
|
+
structured: { overall: score, issues: { missingGlobalIds: missingGid, missingNamesRatio: missingName / Math.max(1, products.length) } },
|
|
540
|
+
};
|
|
541
|
+
},
|
|
542
|
+
|
|
543
|
+
// ── bSDD (network — proxied through /api/bsdd to dodge CORS) ───────────
|
|
544
|
+
async bsdd_search(_m, args) {
|
|
545
|
+
const query = String(args.query ?? '').trim();
|
|
546
|
+
if (!query) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: '`query` is required.' });
|
|
547
|
+
try {
|
|
548
|
+
const results = await PROXIED_BSDD.search(query);
|
|
549
|
+
const head = `bSDD search '${query}' — ${results.length} result(s)${results.length > 25 ? ', showing first 25' : ''}:`;
|
|
550
|
+
const lines = results.slice(0, 25).map((r) => `• ${r.code || r.name} — ${r.name ?? ''}\n ${r.uri}`);
|
|
551
|
+
return { text: [head, ...lines].join('\n'), structured: { query, count: results.length, results: results.slice(0, 25) } };
|
|
552
|
+
} catch (err) {
|
|
553
|
+
throw rethrowBsdd(err, 'search');
|
|
554
|
+
}
|
|
555
|
+
},
|
|
556
|
+
async bsdd_class(_m, args) {
|
|
557
|
+
const ifcType = String(args.ifc_type ?? '');
|
|
558
|
+
if (!ifcType) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: '`ifc_type` is required.' });
|
|
559
|
+
try {
|
|
560
|
+
const info = await PROXIED_BSDD.fetchClassInfo(ifcType);
|
|
561
|
+
if (!info) throw new ToolExecutionError({ code: ToolErrorCode.ENTITY_NOT_FOUND, message: `bSDD has no class for '${ifcType}'.` });
|
|
562
|
+
const psetGroups = new Map<string, string[]>();
|
|
563
|
+
for (const p of info.classProperties) {
|
|
564
|
+
const k = p.propertySet ?? '(no Pset)';
|
|
565
|
+
const list = psetGroups.get(k) ?? [];
|
|
566
|
+
list.push(`${p.name}${p.dataType ? ` (${p.dataType})` : ''}`);
|
|
567
|
+
psetGroups.set(k, list);
|
|
568
|
+
}
|
|
569
|
+
const head = `bSDD class ${info.code} — ${info.classProperties.length} properties across ${psetGroups.size} Psets:`;
|
|
570
|
+
const lines: string[] = [head];
|
|
571
|
+
for (const [pset, props] of psetGroups) {
|
|
572
|
+
lines.push(`• ${pset} (${props.length}):`);
|
|
573
|
+
for (const p of props.slice(0, 10)) lines.push(` - ${p}`);
|
|
574
|
+
if (props.length > 10) lines.push(` - … +${props.length - 10} more`);
|
|
575
|
+
}
|
|
576
|
+
return { text: lines.join('\n'), structured: info as unknown as Record<string, unknown> };
|
|
577
|
+
} catch (err) {
|
|
578
|
+
throw rethrowBsdd(err, 'class lookup');
|
|
579
|
+
}
|
|
580
|
+
},
|
|
581
|
+
// ── Mutation (queues edits on the in-memory store, persists via model_save) ─
|
|
582
|
+
async entity_set_property(m, args) {
|
|
583
|
+
const ref = resolveRef(m, args);
|
|
584
|
+
const pset = String(args.pset ?? '');
|
|
585
|
+
const name = String(args.name ?? '');
|
|
586
|
+
if (!pset || !name) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'pset and name are required.' });
|
|
587
|
+
m.bim.mutate.setProperty(ref, pset, name, args.value as string | number | boolean);
|
|
588
|
+
return { text: `Queued ${pset}.${name} = ${JSON.stringify(args.value)} on #${ref.expressId}.`, structured: { expressId: ref.expressId, pset, name, value: args.value } };
|
|
589
|
+
},
|
|
590
|
+
async entity_delete_property(m, args) {
|
|
591
|
+
const ref = resolveRef(m, args);
|
|
592
|
+
const pset = String(args.pset ?? '');
|
|
593
|
+
const name = String(args.name ?? '');
|
|
594
|
+
if (!pset || !name) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'pset and name are required.' });
|
|
595
|
+
m.bim.mutate.deleteProperty(ref, pset, name);
|
|
596
|
+
return { text: `Queued delete ${pset}.${name} on #${ref.expressId}.`, structured: { expressId: ref.expressId, pset, name } };
|
|
597
|
+
},
|
|
598
|
+
async entity_set_attribute(m, args) {
|
|
599
|
+
const ref = resolveRef(m, args);
|
|
600
|
+
const attribute = String(args.attribute ?? '');
|
|
601
|
+
const value = args.value;
|
|
602
|
+
if (!attribute) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'attribute is required.' });
|
|
603
|
+
m.bim.mutate.setAttribute(ref, attribute, String(value));
|
|
604
|
+
return { text: `Queued ${attribute} = ${JSON.stringify(value)} on #${ref.expressId}.`, structured: { expressId: ref.expressId, attribute, value } };
|
|
605
|
+
},
|
|
606
|
+
async entity_create(m, args) {
|
|
607
|
+
const type = String(args.type ?? '');
|
|
608
|
+
if (!type) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'type is required.' });
|
|
609
|
+
// Use HeadlessLikeBackend's editor — it's the same path the stdio MCP
|
|
610
|
+
// takes for entity_create.
|
|
611
|
+
const editor = (m.bim as unknown as { backend: { ensureEditor(): { addEntity(t: string, a: unknown[]): { expressId: number } } } }).backend.ensureEditor();
|
|
612
|
+
const attrs = (args.attributes as unknown[] | undefined) ?? [];
|
|
613
|
+
const ref = editor.addEntity(type, attrs as Parameters<typeof editor.addEntity>[1]);
|
|
614
|
+
return { text: `Created ${type} as #${ref.expressId}.`, structured: { expressId: ref.expressId, type } };
|
|
615
|
+
},
|
|
616
|
+
async entity_delete(m, args) {
|
|
617
|
+
const ref = resolveRef(m, args);
|
|
618
|
+
// The mutate namespace doesn't expose a delete on its public surface,
|
|
619
|
+
// but the headless backend's mutation view does.
|
|
620
|
+
const view = (m.bim as unknown as { backend: { getMutationView(): { deleteEntity(id: number): boolean } | null } }).backend.getMutationView();
|
|
621
|
+
if (!view) throw new ToolExecutionError({ code: ToolErrorCode.INTERNAL_ERROR, message: 'Mutation view unavailable.' });
|
|
622
|
+
const ok = view.deleteEntity(ref.expressId);
|
|
623
|
+
return { text: ok ? `Deleted #${ref.expressId}.` : `#${ref.expressId} was not in the store.`, structured: { expressId: ref.expressId, deleted: ok } };
|
|
624
|
+
},
|
|
625
|
+
async mutation_diff(m) {
|
|
626
|
+
const view = (m.bim as unknown as { backend: { getMutationView(): { mutationHistory?: unknown[] } | null } }).backend.getMutationView();
|
|
627
|
+
const hist = view ? (view as { mutationHistory?: unknown[] }).mutationHistory ?? [] : [];
|
|
628
|
+
return { text: `${hist.length} pending mutation(s).`, structured: { count: hist.length, mutations: hist } };
|
|
629
|
+
},
|
|
630
|
+
async mutation_undo(m, args) {
|
|
631
|
+
const n = Math.max(1, Number(args.n ?? 1));
|
|
632
|
+
let undone = 0;
|
|
633
|
+
for (let i = 0; i < n; i++) {
|
|
634
|
+
if (m.bim.mutate.undo(m.id)) undone++;
|
|
635
|
+
else break;
|
|
636
|
+
}
|
|
637
|
+
return { text: `Undone ${undone} mutation(s).`, structured: { undone } };
|
|
638
|
+
},
|
|
639
|
+
async model_save(m, args) {
|
|
640
|
+
// "Save" in the playground = stage a downloadable .ifc Blob. The user
|
|
641
|
+
// explicitly clicks the download button later — never auto-triggered.
|
|
642
|
+
// Filename is always .ifc — the agent sometimes invents .ids / .bcf
|
|
643
|
+
// extensions based on prior context; we ignore them.
|
|
644
|
+
const filename = coerceFilename(args.file_path as string | undefined, 'ifc', m.id);
|
|
645
|
+
const schema = (args.schema as 'IFC2X3' | 'IFC4' | 'IFC4X3' | undefined) ?? (m.store.schemaVersion as 'IFC2X3' | 'IFC4' | 'IFC4X3');
|
|
646
|
+
const content = m.bim.export.ifc([], { schema });
|
|
647
|
+
const text = typeof content === 'string' ? content : new TextDecoder().decode(content);
|
|
648
|
+
const blob = new Blob([text], { type: 'application/x-step' });
|
|
649
|
+
const file = playgroundFiles.add({
|
|
650
|
+
filename, mimeType: 'application/x-step', size: blob.size, blob,
|
|
651
|
+
source: 'model_save', description: `Saved model with pending mutations · ${schema}`,
|
|
652
|
+
});
|
|
653
|
+
return {
|
|
654
|
+
text: `Saved ${filename} (${formatBytes(blob.size)}, ${schema}).`,
|
|
655
|
+
structured: { fileId: file.id, filename, bytes: blob.size, schema },
|
|
656
|
+
download: { fileId: file.id, filename, mimeType: 'application/x-step', size: blob.size, label: 'Save IFC' },
|
|
657
|
+
};
|
|
658
|
+
},
|
|
659
|
+
|
|
660
|
+
// ── BCF (in-session project; bcf_export stages a .bcfzip download) ─────
|
|
661
|
+
// NOTE: BCF tool text outputs print the FULL guid every time. Truncating
|
|
662
|
+
// (`guid.slice(0,8)…`) breaks the agent loop — the agent only sees the
|
|
663
|
+
// text content, so a follow-up bcf_viewpoint_create / bcf_topic_update
|
|
664
|
+
// call has nothing to anchor on. Always include the complete guid.
|
|
665
|
+
async bcf_topic_list(_m, args) {
|
|
666
|
+
const project = getBcfProject();
|
|
667
|
+
const filter = typeof args.status === 'string' ? args.status : undefined;
|
|
668
|
+
const topics: BCFTopic[] = Array.from(project.topics.values()).filter((t) => !filter || t.topicStatus === filter);
|
|
669
|
+
return {
|
|
670
|
+
text: `${topics.length} topic(s).${topics.length === 0 ? '' : '\n' + topics.map((t) => `• ${t.guid} · ${t.topicStatus} · ${t.title}`).join('\n')}`,
|
|
671
|
+
structured: { count: topics.length, topics: topics.map((t) => ({ guid: t.guid, title: t.title, status: t.topicStatus, type: t.topicType, priority: t.priority, comments: t.comments.length })) },
|
|
672
|
+
};
|
|
673
|
+
},
|
|
674
|
+
async bcf_topic_create(_m, args) {
|
|
675
|
+
const title = String(args.title ?? '').trim();
|
|
676
|
+
if (!title) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'title is required.' });
|
|
677
|
+
const project = getBcfProject();
|
|
678
|
+
const topic = createBCFTopic({
|
|
679
|
+
title,
|
|
680
|
+
description: typeof args.description === 'string' ? args.description : undefined,
|
|
681
|
+
author: typeof args.author === 'string' ? args.author : 'ifc-lite-playground',
|
|
682
|
+
topicType: typeof args.type === 'string' ? args.type : 'Issue',
|
|
683
|
+
topicStatus: typeof args.status === 'string' ? args.status : 'Open',
|
|
684
|
+
priority: typeof args.priority === 'string' ? args.priority : undefined,
|
|
685
|
+
assignedTo: typeof args.assigned_to === 'string' ? args.assigned_to : undefined,
|
|
686
|
+
labels: Array.isArray(args.labels) ? (args.labels as string[]) : undefined,
|
|
687
|
+
});
|
|
688
|
+
addTopicToProject(project, topic);
|
|
689
|
+
const download = await autoStageBcfDownload();
|
|
690
|
+
return {
|
|
691
|
+
text: `Created topic '${topic.title}' · guid=${topic.guid}`,
|
|
692
|
+
structured: { guid: topic.guid, title: topic.title },
|
|
693
|
+
...(download ? { download } : {}),
|
|
694
|
+
};
|
|
695
|
+
},
|
|
696
|
+
async bcf_topic_update(_m, args) {
|
|
697
|
+
const project = getBcfProject();
|
|
698
|
+
const guid = String(args.guid ?? '');
|
|
699
|
+
const topic = project.topics.get(guid);
|
|
700
|
+
if (!topic) throw new ToolExecutionError({ code: ToolErrorCode.ENTITY_NOT_FOUND, message: `Topic ${guid} not found.` });
|
|
701
|
+
const author = typeof args.modified_by === 'string' ? args.modified_by : 'ifc-lite-playground';
|
|
702
|
+
if (typeof args.status === 'string') updateTopicStatus(topic, args.status, author);
|
|
703
|
+
if (typeof args.priority === 'string') topic.priority = args.priority;
|
|
704
|
+
if (typeof args.comment === 'string') {
|
|
705
|
+
addCommentToTopic(topic, createBCFComment({ author, comment: args.comment }));
|
|
706
|
+
}
|
|
707
|
+
const download = await autoStageBcfDownload();
|
|
708
|
+
return {
|
|
709
|
+
text: `Topic ${guid} updated.`,
|
|
710
|
+
structured: { guid, status: topic.topicStatus },
|
|
711
|
+
...(download ? { download } : {}),
|
|
712
|
+
};
|
|
713
|
+
},
|
|
714
|
+
async bcf_topic_close(_m, args) {
|
|
715
|
+
const project = getBcfProject();
|
|
716
|
+
const guid = String(args.guid ?? '');
|
|
717
|
+
const topic = project.topics.get(guid);
|
|
718
|
+
if (!topic) throw new ToolExecutionError({ code: ToolErrorCode.ENTITY_NOT_FOUND, message: `Topic ${guid} not found.` });
|
|
719
|
+
updateTopicStatus(topic, 'Closed', typeof args.modified_by === 'string' ? args.modified_by : 'ifc-lite-playground');
|
|
720
|
+
const download = await autoStageBcfDownload();
|
|
721
|
+
return {
|
|
722
|
+
text: `Closed ${guid}.`,
|
|
723
|
+
structured: { guid },
|
|
724
|
+
...(download ? { download } : {}),
|
|
725
|
+
};
|
|
726
|
+
},
|
|
727
|
+
async bcf_viewpoint_create(_m, args) {
|
|
728
|
+
const project = getBcfProject();
|
|
729
|
+
const guid = String(args.guid ?? '');
|
|
730
|
+
const topic = project.topics.get(guid);
|
|
731
|
+
if (!topic) throw new ToolExecutionError({ code: ToolErrorCode.ENTITY_NOT_FOUND, message: `Topic ${guid} not found.` });
|
|
732
|
+
const selection = (args.selection_global_ids as string[] | undefined) ?? [];
|
|
733
|
+
const viewpoint = {
|
|
734
|
+
guid: cryptoRandomUuid(),
|
|
735
|
+
components: { selection: selection.map((g) => ({ ifcGuid: g, OriginatingSystem: 'ifc-lite-playground' })) },
|
|
736
|
+
};
|
|
737
|
+
addViewpointToTopic(topic, viewpoint as unknown as Parameters<typeof addViewpointToTopic>[1]);
|
|
738
|
+
const download = await autoStageBcfDownload();
|
|
739
|
+
return {
|
|
740
|
+
text: `Viewpoint added (${selection.length} entity selection).`,
|
|
741
|
+
structured: { viewpointGuid: viewpoint.guid, selection: selection.length },
|
|
742
|
+
...(download ? { download } : {}),
|
|
743
|
+
};
|
|
744
|
+
},
|
|
745
|
+
async bcf_export(_m, args) {
|
|
746
|
+
const project = getBcfProject();
|
|
747
|
+
const filename = coerceFilename(args.file_path as string | undefined, 'bcfzip', 'issues');
|
|
748
|
+
const blob = await writeBCF(project);
|
|
749
|
+
const file = playgroundFiles.add({
|
|
750
|
+
filename, mimeType: 'application/zip', size: blob.size, blob,
|
|
751
|
+
source: 'bcf_export', description: `${project.topics.size} topic(s)`,
|
|
752
|
+
});
|
|
753
|
+
return {
|
|
754
|
+
text: `Bundled ${filename} (${formatBytes(blob.size)}, ${project.topics.size} topic(s)).`,
|
|
755
|
+
structured: { fileId: file.id, filename, bytes: blob.size, topics: project.topics.size },
|
|
756
|
+
download: { fileId: file.id, filename, mimeType: 'application/zip', size: blob.size, label: 'Get .bcfzip' },
|
|
757
|
+
};
|
|
758
|
+
},
|
|
759
|
+
|
|
760
|
+
// ── IDS (parses + validates against the loaded model) ─────────────────
|
|
761
|
+
async ids_validate(m, args) {
|
|
762
|
+
const xml = resolveIdsXml(args);
|
|
763
|
+
if (!xml) throw new ToolExecutionError({
|
|
764
|
+
code: ToolErrorCode.INVALID_INPUT,
|
|
765
|
+
message: 'Provide IDS via `ids_path` (filename of an attached upload) or `ids_xml` (raw XML). Tell the user to drag a .ids file onto the chat input if they haven\'t attached one yet.',
|
|
766
|
+
});
|
|
767
|
+
let doc: IDSDocument;
|
|
768
|
+
try {
|
|
769
|
+
doc = parseIDS(xml);
|
|
770
|
+
} catch (err) {
|
|
771
|
+
throw new ToolExecutionError({ code: ToolErrorCode.PARSE_FAILED, message: err instanceof Error ? err.message : String(err) });
|
|
772
|
+
}
|
|
773
|
+
const accessor = makeIdsAccessor(m);
|
|
774
|
+
const report = await validateIDS(doc, accessor, {
|
|
775
|
+
modelId: m.id,
|
|
776
|
+
schemaVersion: m.store.schemaVersion,
|
|
777
|
+
entityCount: m.store.entityCount,
|
|
778
|
+
});
|
|
779
|
+
const head = `IDS '${doc.info?.title ?? 'untitled'}' · ${report.summary.passedSpecifications}/${report.summary.totalSpecifications} specs passed (${report.summary.overallPassRate.toFixed(0)}%).`;
|
|
780
|
+
const lines = report.specificationResults.map((s) => {
|
|
781
|
+
const ok = s.status === 'pass';
|
|
782
|
+
const skipped = s.status === 'not_applicable';
|
|
783
|
+
const tag = skipped ? '·' : ok ? '✓' : '✗';
|
|
784
|
+
return `${tag} ${s.specification.name ?? '(unnamed)'} — ${s.passedCount} pass / ${s.failedCount} fail · ${s.passRate.toFixed(0)}%`;
|
|
785
|
+
});
|
|
786
|
+
// Stage a downloadable JSON report so the user can save / share it.
|
|
787
|
+
const reportBlob = new Blob([JSON.stringify(report, null, 2)], { type: 'application/json' });
|
|
788
|
+
const reportSlug = (doc.info?.title ?? 'spec').replace(/[^a-zA-Z0-9]+/g, '-').toLowerCase();
|
|
789
|
+
const reportFilename = coerceFilename(undefined, 'json', `ids-report-${reportSlug}`);
|
|
790
|
+
const reportFile = playgroundFiles.add({
|
|
791
|
+
filename: reportFilename, mimeType: 'application/json', size: reportBlob.size, blob: reportBlob,
|
|
792
|
+
source: 'ids_validate',
|
|
793
|
+
description: `${report.summary.passedSpecifications}/${report.summary.totalSpecifications} specs passed`,
|
|
794
|
+
});
|
|
795
|
+
return {
|
|
796
|
+
text: [head, ...lines].join('\n'),
|
|
797
|
+
structured: report as unknown as Record<string, unknown>,
|
|
798
|
+
download: { fileId: reportFile.id, filename: reportFilename, mimeType: 'application/json', size: reportBlob.size, label: 'Get IDS report' },
|
|
799
|
+
};
|
|
800
|
+
},
|
|
801
|
+
async ids_explain(_m, args) {
|
|
802
|
+
const xml = resolveIdsXml(args);
|
|
803
|
+
if (!xml) throw new ToolExecutionError({
|
|
804
|
+
code: ToolErrorCode.INVALID_INPUT,
|
|
805
|
+
message: 'Provide IDS via `ids_path` (filename of an attached upload) or `ids_xml` (raw XML).',
|
|
806
|
+
});
|
|
807
|
+
let doc: IDSDocument;
|
|
808
|
+
try {
|
|
809
|
+
doc = parseIDS(xml);
|
|
810
|
+
} catch (err) {
|
|
811
|
+
throw new ToolExecutionError({ code: ToolErrorCode.PARSE_FAILED, message: err instanceof Error ? err.message : String(err) });
|
|
812
|
+
}
|
|
813
|
+
const head = `IDS '${doc.info?.title ?? 'untitled'}' · ${doc.specifications.length} specification(s).`;
|
|
814
|
+
const lines = doc.specifications.map((s, i) => `${i + 1}. ${s.name ?? '(unnamed)'} — applies to ${s.applicability.facets.length} facet(s); requires ${s.requirements.length} clause(s).`);
|
|
815
|
+
return { text: [head, ...lines].join('\n'), structured: doc as unknown as Record<string, unknown> };
|
|
816
|
+
},
|
|
817
|
+
|
|
818
|
+
// ── Export (CSV / JSON / IFC — staged for download) ───────────────────
|
|
819
|
+
async export_ifc(m, args) {
|
|
820
|
+
const filename = coerceFilename(args.file_path as string | undefined, 'ifc', m.id);
|
|
821
|
+
const schema = (args.schema as 'IFC2X3' | 'IFC4' | 'IFC4X3' | undefined) ?? (m.store.schemaVersion as 'IFC2X3' | 'IFC4' | 'IFC4X3');
|
|
822
|
+
let refs: EntityRef[] = [];
|
|
823
|
+
if (Array.isArray(args.global_ids)) {
|
|
824
|
+
const wanted = new Set(args.global_ids as string[]);
|
|
825
|
+
for (const e of m.bim.query().toArray()) if (wanted.has(e.globalId)) refs.push(e.ref);
|
|
826
|
+
}
|
|
827
|
+
const content = m.bim.export.ifc(refs, { schema });
|
|
828
|
+
const text = typeof content === 'string' ? content : new TextDecoder().decode(content);
|
|
829
|
+
const blob = new Blob([text], { type: 'application/x-step' });
|
|
830
|
+
const file = playgroundFiles.add({
|
|
831
|
+
filename, mimeType: 'application/x-step', size: blob.size, blob,
|
|
832
|
+
source: 'export_ifc', description: `${refs.length || m.store.entityCount} entit${(refs.length || m.store.entityCount) === 1 ? 'y' : 'ies'}`,
|
|
833
|
+
});
|
|
834
|
+
return {
|
|
835
|
+
text: `Wrote ${filename} (${formatBytes(blob.size)}).`,
|
|
836
|
+
structured: { fileId: file.id, filename, bytes: blob.size },
|
|
837
|
+
download: { fileId: file.id, filename, mimeType: 'application/x-step', size: blob.size, label: 'Save IFC' },
|
|
838
|
+
};
|
|
839
|
+
},
|
|
840
|
+
async export_csv(m, args) {
|
|
841
|
+
const cols = (args.columns as string[] | undefined) ?? ['GlobalId', 'Type', 'Name'];
|
|
842
|
+
const sep = (args.separator as string | undefined) ?? ',';
|
|
843
|
+
const filterType = args.type as string | undefined;
|
|
844
|
+
const refs = (filterType ? m.bim.query().byType(filterType).toArray() : m.bim.query().toArray()).map((e) => e.ref);
|
|
845
|
+
const csv = m.bim.export.csv(refs, { columns: cols, separator: sep });
|
|
846
|
+
const filename = coerceFilename(args.file_path as string | undefined, 'csv', filterType ?? 'entities');
|
|
847
|
+
const blob = new Blob([csv], { type: 'text/csv' });
|
|
848
|
+
const file = playgroundFiles.add({
|
|
849
|
+
filename, mimeType: 'text/csv', size: blob.size, blob,
|
|
850
|
+
source: 'export_csv', description: `${refs.length} row(s) · ${cols.join(', ')}`,
|
|
851
|
+
});
|
|
852
|
+
return {
|
|
853
|
+
text: `Wrote ${filename} (${refs.length} rows, ${formatBytes(blob.size)}).`,
|
|
854
|
+
structured: { fileId: file.id, filename, rows: refs.length, bytes: blob.size },
|
|
855
|
+
download: { fileId: file.id, filename, mimeType: 'text/csv', size: blob.size, label: 'Get .csv' },
|
|
856
|
+
};
|
|
857
|
+
},
|
|
858
|
+
async export_json(m, args) {
|
|
859
|
+
const cols = (args.columns as string[] | undefined) ?? ['GlobalId', 'Type', 'Name'];
|
|
860
|
+
const filterType = args.type as string | undefined;
|
|
861
|
+
const refs = (filterType ? m.bim.query().byType(filterType).toArray() : m.bim.query().toArray()).map((e) => e.ref);
|
|
862
|
+
const rows = m.bim.export.json(refs, cols);
|
|
863
|
+
const filename = coerceFilename(args.file_path as string | undefined, 'json', filterType ?? 'entities');
|
|
864
|
+
const text = JSON.stringify(rows, null, 2);
|
|
865
|
+
const blob = new Blob([text], { type: 'application/json' });
|
|
866
|
+
const file = playgroundFiles.add({
|
|
867
|
+
filename, mimeType: 'application/json', size: blob.size, blob,
|
|
868
|
+
source: 'export_json', description: `${rows.length} row(s) · ${cols.join(', ')}`,
|
|
869
|
+
});
|
|
870
|
+
return {
|
|
871
|
+
text: `Wrote ${filename} (${rows.length} rows, ${formatBytes(blob.size)}).`,
|
|
872
|
+
structured: { fileId: file.id, filename, rows: rows.length, bytes: blob.size },
|
|
873
|
+
download: { fileId: file.id, filename, mimeType: 'application/json', size: blob.size, label: 'Get .json' },
|
|
874
|
+
};
|
|
875
|
+
},
|
|
876
|
+
|
|
877
|
+
// ── Diff (needs two loaded models — uses ctx.registry) ────────────────
|
|
878
|
+
async model_diff(m, args, ctx) {
|
|
879
|
+
const { left, right } = resolveDiffModels(m, args, ctx);
|
|
880
|
+
const types1 = new Map<string, number>();
|
|
881
|
+
const types2 = new Map<string, number>();
|
|
882
|
+
for (const [type, ids] of left.store.entityIndex.byType) types1.set(type, ids.length);
|
|
883
|
+
for (const [type, ids] of right.store.entityIndex.byType) types2.set(type, ids.length);
|
|
884
|
+
const diffs: Array<{ type: string; left: number; right: number; delta: number }> = [];
|
|
885
|
+
for (const t of new Set([...types1.keys(), ...types2.keys()])) {
|
|
886
|
+
const a = types1.get(t) ?? 0;
|
|
887
|
+
const b = types2.get(t) ?? 0;
|
|
888
|
+
if (a !== b) diffs.push({ type: t, left: a, right: b, delta: b - a });
|
|
889
|
+
}
|
|
890
|
+
diffs.sort((a, b) => Math.abs(b.delta) - Math.abs(a.delta));
|
|
891
|
+
const head = `Diff ${left.id} → ${right.id}: ${diffs.length} type-count change(s).`;
|
|
892
|
+
return { text: [head, ...diffs.slice(0, 25).map((d) => ` • ${d.type}: ${d.left} → ${d.right} (${d.delta > 0 ? '+' : ''}${d.delta})`)].join('\n'), structured: { typeDiffs: diffs } };
|
|
893
|
+
},
|
|
894
|
+
async quantity_diff(m, args, ctx) {
|
|
895
|
+
const { left, right } = resolveDiffModels(m, args, ctx);
|
|
896
|
+
const type = (args.type as string | undefined) ?? 'IfcWall';
|
|
897
|
+
const qName = (args.quantity as string | undefined) ?? 'Volume';
|
|
898
|
+
function sumFor(model: LoadedPlaygroundModel): number {
|
|
899
|
+
let total = 0;
|
|
900
|
+
for (const e of model.bim.query().byType(type).toArray()) {
|
|
901
|
+
const v = model.bim.quantity(e.ref, '', qName);
|
|
902
|
+
if (typeof v === 'number') total += v;
|
|
903
|
+
}
|
|
904
|
+
return total;
|
|
905
|
+
}
|
|
906
|
+
const a = sumFor(left);
|
|
907
|
+
const b = sumFor(right);
|
|
908
|
+
return { text: `${type}.${qName}: ${a.toFixed(2)} → ${b.toFixed(2)} (${(b - a).toFixed(2)})`, structured: { type, quantity: qName, left: a, right: b, delta: b - a } };
|
|
909
|
+
},
|
|
910
|
+
|
|
911
|
+
// ── Viewer (drives the inline Three.js panel) ──────────────────────────
|
|
912
|
+
async viewer_ask(_m, args) {
|
|
913
|
+
const reason = String(args.reason ?? '');
|
|
914
|
+
return {
|
|
915
|
+
text: `Ask the user: "I'd like to open the inline 3D viewer${reason ? ` to ${reason}` : ''}. May I?" If they agree, call viewer_open.`,
|
|
916
|
+
structured: { suggestedTool: 'viewer_open', reason },
|
|
917
|
+
};
|
|
918
|
+
},
|
|
919
|
+
|
|
920
|
+
async viewer_open(_m, _args, ctx) {
|
|
921
|
+
if (ctx.openViewerPanel) ctx.openViewerPanel();
|
|
922
|
+
if (ctx.viewer && ctx.viewer.isLoaded()) {
|
|
923
|
+
const status = ctx.viewer.status();
|
|
924
|
+
return {
|
|
925
|
+
text: `Inline viewer ready (${status.meshCount} entities rendered). Pick interactions sync back via viewer_get_selection.`,
|
|
926
|
+
structured: { open: true, meshCount: status.meshCount, inline: true },
|
|
927
|
+
};
|
|
928
|
+
}
|
|
929
|
+
return {
|
|
930
|
+
text: 'Asked to open the inline viewer. Geometry is processing — call viewer_status in a moment to confirm it’s ready.',
|
|
931
|
+
structured: { open: true, pending: true },
|
|
932
|
+
};
|
|
933
|
+
},
|
|
934
|
+
|
|
935
|
+
async viewer_close(_m, _args, ctx) {
|
|
936
|
+
// The panel-collapse in this v1 isn't agent-controllable (the user owns
|
|
937
|
+
// chrome). We surface a friendly status instead of pretending we
|
|
938
|
+
// dismantled the canvas.
|
|
939
|
+
void ctx;
|
|
940
|
+
return { text: 'Inline viewer panel is user-controlled in the playground; toggle it from the chevron above the canvas.', structured: { closed: false, note: 'user-toggle' } };
|
|
941
|
+
},
|
|
942
|
+
|
|
943
|
+
async viewer_status(_m, _args, ctx) {
|
|
944
|
+
const v = ctx.viewer;
|
|
945
|
+
if (!v) return { text: 'No viewer attached.', structured: { open: false } };
|
|
946
|
+
const s = v.status();
|
|
947
|
+
return {
|
|
948
|
+
text: s.loaded ? `Viewer open · ${s.meshCount} meshes · ${s.selection.length} picked.` : 'Viewer panel mounted but no geometry yet.',
|
|
949
|
+
structured: s,
|
|
950
|
+
};
|
|
951
|
+
},
|
|
952
|
+
|
|
953
|
+
async viewer_colorize(_m, args, ctx) {
|
|
954
|
+
const v = requireViewer(ctx);
|
|
955
|
+
const color = parseColorArg(args.color);
|
|
956
|
+
const out = v.colorize({
|
|
957
|
+
globalIds: args.global_ids as string[] | undefined,
|
|
958
|
+
expressIds: args.express_ids as number[] | undefined,
|
|
959
|
+
type: args.type as string | undefined,
|
|
960
|
+
color,
|
|
961
|
+
});
|
|
962
|
+
return { text: `Painted ${out.count} entit${out.count === 1 ? 'y' : 'ies'} ${formatColorTuple(color)}.`, structured: { count: out.count, color } };
|
|
963
|
+
},
|
|
964
|
+
|
|
965
|
+
async viewer_isolate(_m, args, ctx) {
|
|
966
|
+
const v = requireViewer(ctx);
|
|
967
|
+
const out = v.isolate({
|
|
968
|
+
globalIds: args.global_ids as string[] | undefined,
|
|
969
|
+
expressIds: args.express_ids as number[] | undefined,
|
|
970
|
+
type: args.type as string | undefined,
|
|
971
|
+
});
|
|
972
|
+
return { text: `Isolated ${out.count} entit${out.count === 1 ? 'y' : 'ies'}; everything else hidden.`, structured: { count: out.count } };
|
|
973
|
+
},
|
|
974
|
+
|
|
975
|
+
async viewer_hide(_m, args, ctx) {
|
|
976
|
+
const v = requireViewer(ctx);
|
|
977
|
+
const out = v.hide({
|
|
978
|
+
globalIds: args.global_ids as string[] | undefined,
|
|
979
|
+
expressIds: args.express_ids as number[] | undefined,
|
|
980
|
+
type: args.type as string | undefined,
|
|
981
|
+
});
|
|
982
|
+
return { text: `Hid ${out.count} entit${out.count === 1 ? 'y' : 'ies'}.`, structured: { count: out.count } };
|
|
983
|
+
},
|
|
984
|
+
|
|
985
|
+
async viewer_show(_m, args, ctx) {
|
|
986
|
+
const v = requireViewer(ctx);
|
|
987
|
+
const out = v.show({
|
|
988
|
+
globalIds: args.global_ids as string[] | undefined,
|
|
989
|
+
expressIds: args.express_ids as number[] | undefined,
|
|
990
|
+
type: args.type as string | undefined,
|
|
991
|
+
});
|
|
992
|
+
return { text: `Showed ${out.count} entit${out.count === 1 ? 'y' : 'ies'}.`, structured: { count: out.count } };
|
|
993
|
+
},
|
|
994
|
+
|
|
995
|
+
async viewer_reset(_m, _args, ctx) {
|
|
996
|
+
const v = requireViewer(ctx);
|
|
997
|
+
v.reset();
|
|
998
|
+
return { text: 'Reset: visibility, colours, and section restored to defaults.', structured: { reset: true } };
|
|
999
|
+
},
|
|
1000
|
+
|
|
1001
|
+
async viewer_fly_to(_m, args, ctx) {
|
|
1002
|
+
const v = requireViewer(ctx);
|
|
1003
|
+
const out = v.flyTo({
|
|
1004
|
+
globalIds: args.global_ids as string[] | undefined,
|
|
1005
|
+
expressIds: args.express_ids as number[] | undefined,
|
|
1006
|
+
});
|
|
1007
|
+
if (out.count === 0) {
|
|
1008
|
+
throw new ToolExecutionError({ code: ToolErrorCode.ENTITY_NOT_FOUND, message: 'No matching entities to frame.' });
|
|
1009
|
+
}
|
|
1010
|
+
return { text: `Flying camera to ${out.count} entit${out.count === 1 ? 'y' : 'ies'}.`, structured: { count: out.count } };
|
|
1011
|
+
},
|
|
1012
|
+
|
|
1013
|
+
async viewer_set_section(_m, args, ctx) {
|
|
1014
|
+
const v = requireViewer(ctx);
|
|
1015
|
+
const axis = String(args.axis ?? '').toLowerCase();
|
|
1016
|
+
if (axis !== 'x' && axis !== 'y' && axis !== 'z') {
|
|
1017
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'axis must be "x", "y", or "z".' });
|
|
1018
|
+
}
|
|
1019
|
+
const position = Number(args.position ?? 0);
|
|
1020
|
+
if (!Number.isFinite(position)) {
|
|
1021
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'position must be a number.' });
|
|
1022
|
+
}
|
|
1023
|
+
v.setSection({ axis: axis as 'x' | 'y' | 'z', position });
|
|
1024
|
+
return { text: `Section ${axis} = ${position.toFixed(2)}.`, structured: { axis, position } };
|
|
1025
|
+
},
|
|
1026
|
+
|
|
1027
|
+
async viewer_clear_section(_m, _args, ctx) {
|
|
1028
|
+
const v = requireViewer(ctx);
|
|
1029
|
+
v.clearSection();
|
|
1030
|
+
return { text: 'Section cleared.', structured: { cleared: true } };
|
|
1031
|
+
},
|
|
1032
|
+
|
|
1033
|
+
async viewer_color_by_storey(_m, _args, ctx) {
|
|
1034
|
+
const v = requireViewer(ctx);
|
|
1035
|
+
const out = v.colorByStorey();
|
|
1036
|
+
return { text: `Coloured by storey — ${out.groups} group${out.groups === 1 ? '' : 's'}.`, structured: out };
|
|
1037
|
+
},
|
|
1038
|
+
|
|
1039
|
+
async viewer_color_by_property(m, args, ctx) {
|
|
1040
|
+
const v = requireViewer(ctx);
|
|
1041
|
+
const type = String(args.type ?? '');
|
|
1042
|
+
const psetName = String(args.pset ?? '');
|
|
1043
|
+
const propName = String(args.property ?? '');
|
|
1044
|
+
if (!type || !psetName || !propName) {
|
|
1045
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'type, pset, and property are required.' });
|
|
1046
|
+
}
|
|
1047
|
+
const out = v.colorByProperty({
|
|
1048
|
+
type,
|
|
1049
|
+
pset: psetName,
|
|
1050
|
+
property: propName,
|
|
1051
|
+
sample: (expressId) => {
|
|
1052
|
+
const ref: EntityRef = { modelId: m.id, expressId };
|
|
1053
|
+
return m.bim.property(ref, psetName, propName);
|
|
1054
|
+
},
|
|
1055
|
+
});
|
|
1056
|
+
const lines = out.legend.map((l) => ` • ${l.value} — ${l.count}`);
|
|
1057
|
+
return { text: `Coloured ${type} by ${psetName}.${propName} — ${out.legend.length} bucket(s):\n${lines.join('\n')}`, structured: out };
|
|
1058
|
+
},
|
|
1059
|
+
|
|
1060
|
+
async viewer_get_selection(m, args, ctx) {
|
|
1061
|
+
const v = requireViewer(ctx);
|
|
1062
|
+
const sel = v.getSelection();
|
|
1063
|
+
if (sel.length === 0) return { text: 'No selection in viewer.', structured: { selection: [] } };
|
|
1064
|
+
const include = new Set((args.include as string[] | undefined) ?? ['attributes']);
|
|
1065
|
+
const enriched = sel.map((s) => {
|
|
1066
|
+
const ref: EntityRef = { modelId: m.id, expressId: s.expressId };
|
|
1067
|
+
const data = m.bim.entity(ref);
|
|
1068
|
+
const out: Record<string, unknown> = { ...s, entity: data };
|
|
1069
|
+
if (include.has('attributes') && data) out.attributes = m.bim.attributes(ref);
|
|
1070
|
+
if (include.has('properties') && data) out.properties = m.bim.properties(ref);
|
|
1071
|
+
if (include.has('quantities') && data) out.quantities = m.bim.quantities(ref);
|
|
1072
|
+
if (include.has('classifications') && data) out.classifications = m.bim.classifications(ref);
|
|
1073
|
+
if (include.has('materials') && data) out.materials = m.bim.materials(ref);
|
|
1074
|
+
return out;
|
|
1075
|
+
});
|
|
1076
|
+
const head = `${sel.length} entit${sel.length === 1 ? 'y' : 'ies'} selected:`;
|
|
1077
|
+
const lines = enriched.map((e) => {
|
|
1078
|
+
const data = e.entity as { type?: string; name?: string; globalId?: string } | null;
|
|
1079
|
+
return `• ${data?.type ?? '?'} #${(e as { expressId: number }).expressId}${data?.name ? ` '${data.name}'` : ''}${data?.globalId ? ` GlobalId=${data.globalId}` : ''}`;
|
|
1080
|
+
});
|
|
1081
|
+
return { text: [head, ...lines].join('\n'), structured: { selection: enriched } };
|
|
1082
|
+
},
|
|
1083
|
+
|
|
1084
|
+
async viewer_describe_selection(m, _args, ctx) {
|
|
1085
|
+
const v = requireViewer(ctx);
|
|
1086
|
+
const sel = v.getSelection();
|
|
1087
|
+
if (sel.length === 0) return { text: 'Nothing selected — click an entity in the viewer first.', structured: { selection: [] } };
|
|
1088
|
+
const enriched = sel.map((s) => {
|
|
1089
|
+
const ref: EntityRef = { modelId: m.id, expressId: s.expressId };
|
|
1090
|
+
return {
|
|
1091
|
+
...s,
|
|
1092
|
+
entity: m.bim.entity(ref),
|
|
1093
|
+
attributes: m.bim.attributes(ref),
|
|
1094
|
+
properties: m.bim.properties(ref),
|
|
1095
|
+
quantities: m.bim.quantities(ref),
|
|
1096
|
+
classifications: m.bim.classifications(ref),
|
|
1097
|
+
materials: m.bim.materials(ref),
|
|
1098
|
+
};
|
|
1099
|
+
});
|
|
1100
|
+
const head = `${enriched.length} selected (full detail):`;
|
|
1101
|
+
const lines: string[] = [head];
|
|
1102
|
+
for (const e of enriched) {
|
|
1103
|
+
const data = e.entity as { type?: string; name?: string; globalId?: string } | null;
|
|
1104
|
+
lines.push(`• ${data?.type ?? '?'} #${e.expressId} '${data?.name ?? '(unnamed)'}'`);
|
|
1105
|
+
if (data?.globalId) lines.push(` GlobalId: ${data.globalId}`);
|
|
1106
|
+
if (e.properties && e.properties.length > 0) {
|
|
1107
|
+
const psets = e.properties.map((p) => `${p.name} (${p.properties.length})`);
|
|
1108
|
+
lines.push(` Property sets: ${psets.join(', ')}`);
|
|
1109
|
+
}
|
|
1110
|
+
if (e.materials) {
|
|
1111
|
+
const mat = e.materials as { name?: string; layers?: Array<{ materialName?: string; name?: string }> };
|
|
1112
|
+
if (mat.layers?.length) lines.push(` Materials: ${mat.layers.map((l) => l.materialName ?? l.name).join(', ')}`);
|
|
1113
|
+
else if (mat.name) lines.push(` Material: ${mat.name}`);
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
return { text: lines.join('\n'), structured: { selection: enriched } };
|
|
1117
|
+
},
|
|
1118
|
+
|
|
1119
|
+
async bsdd_property_sets(_m, args) {
|
|
1120
|
+
const ifcType = String(args.ifc_type ?? '');
|
|
1121
|
+
if (!ifcType) throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: '`ifc_type` is required.' });
|
|
1122
|
+
try {
|
|
1123
|
+
const psets = await PROXIED_BSDD.getPropertySets(ifcType);
|
|
1124
|
+
const groups = Array.from(psets.entries()).map(([name, props]) => ({ name, properties: props }));
|
|
1125
|
+
const head = `bSDD property sets for ${ifcType} — ${groups.length} Pset(s):`;
|
|
1126
|
+
const lines = [head, ...groups.map((g) => `• ${g.name} (${g.properties.length} properties)`)];
|
|
1127
|
+
return { text: lines.join('\n'), structured: { ifcType, propertySets: groups } };
|
|
1128
|
+
} catch (err) {
|
|
1129
|
+
throw rethrowBsdd(err, 'property-set lookup');
|
|
1130
|
+
}
|
|
1131
|
+
},
|
|
1132
|
+
|
|
1133
|
+
async bsdd_match(m, args) {
|
|
1134
|
+
// Find related bSDD classes for an entity by IFC type. Mirrors the
|
|
1135
|
+
// stdio MCP path: pull the entity's IFC type, then ask bSDD for related
|
|
1136
|
+
// dictionary classes.
|
|
1137
|
+
let expressId: number | null = null;
|
|
1138
|
+
if (typeof args.express_id === 'number') expressId = args.express_id;
|
|
1139
|
+
else if (typeof args.global_id === 'string') {
|
|
1140
|
+
const ref = resolveRef(m, args);
|
|
1141
|
+
expressId = ref.expressId;
|
|
1142
|
+
}
|
|
1143
|
+
if (expressId == null) {
|
|
1144
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: 'Provide express_id or global_id.' });
|
|
1145
|
+
}
|
|
1146
|
+
const ifcType = m.store.entities.getTypeName(expressId) ?? 'Unknown';
|
|
1147
|
+
try {
|
|
1148
|
+
const candidates = await PROXIED_BSDD.searchRelatedClasses(ifcType);
|
|
1149
|
+
const head = `bSDD candidates for ${ifcType} (#${expressId}) — ${candidates.length} match(es):`;
|
|
1150
|
+
const lines = [head, ...candidates.slice(0, 10).map((c) => `• ${c.code} — ${c.name} (${c.dictionaryUri})`)];
|
|
1151
|
+
if (candidates.length > 10) lines.push(` … +${candidates.length - 10} more`);
|
|
1152
|
+
return { text: lines.join('\n'), structured: { ifcType, expressId, candidates } };
|
|
1153
|
+
} catch (err) {
|
|
1154
|
+
throw rethrowBsdd(err, 'related-class search');
|
|
1155
|
+
}
|
|
1156
|
+
},
|
|
1157
|
+
|
|
1158
|
+
// ── Discovery (extras) ─────────────────────────────────────────────────
|
|
1159
|
+
async model_unload(m, args) {
|
|
1160
|
+
// The playground v1 design is single-model. We accept the call and
|
|
1161
|
+
// report it, but do NOT actually drop the model — the parent owns the
|
|
1162
|
+
// load lifecycle (sample picker / drop zone). Surface that contract
|
|
1163
|
+
// honestly instead of silently no-oping.
|
|
1164
|
+
const target = String(args.model_id ?? m.id);
|
|
1165
|
+
if (target === m.id) {
|
|
1166
|
+
return {
|
|
1167
|
+
text: `model_unload is a no-op in the web playground — close the tab or pick another sample to drop the model. (Targeted '${target}'.)`,
|
|
1168
|
+
structured: { modelId: target, unloaded: false, reason: 'browser-singleton' },
|
|
1169
|
+
};
|
|
1170
|
+
}
|
|
1171
|
+
return {
|
|
1172
|
+
text: `Model '${target}' isn't loaded in this session.`,
|
|
1173
|
+
structured: { modelId: target, unloaded: false, reason: 'not-loaded' },
|
|
1174
|
+
};
|
|
1175
|
+
},
|
|
1176
|
+
|
|
1177
|
+
async model_load(_m, args) {
|
|
1178
|
+
// Federated load isn't wired in v1 — only the active sample is loaded.
|
|
1179
|
+
// Throw so the agent sees an error result and routes the user to the
|
|
1180
|
+
// sample picker / dropzone (or the stdio MCP).
|
|
1181
|
+
const path = String(args.file_path ?? '');
|
|
1182
|
+
throw new ToolExecutionError({
|
|
1183
|
+
code: ToolErrorCode.UNSUPPORTED_OPERATION,
|
|
1184
|
+
message: `model_load isn't supported in the web playground (single-model session). To load '${path || 'another file'}', the user picks it from the sample list or drops it on the dropzone. The stdio MCP supports federated load.`,
|
|
1185
|
+
});
|
|
1186
|
+
},
|
|
1187
|
+
|
|
1188
|
+
// ── Mutation (composer) ────────────────────────────────────────────────
|
|
1189
|
+
async mutation_batch(m, args, ctx) {
|
|
1190
|
+
// Apply N ops in order. We just dispatch each op back through the
|
|
1191
|
+
// existing IMPLS so behaviour exactly matches calling them one by one
|
|
1192
|
+
// — no separate codepath to drift from. Failure stops the batch and
|
|
1193
|
+
// reports per-step results so the agent can decide whether to undo.
|
|
1194
|
+
const ops = args.operations as Array<{ tool: string; args?: Record<string, unknown> }> | undefined;
|
|
1195
|
+
if (!Array.isArray(ops) || ops.length === 0) {
|
|
1196
|
+
throw new ToolExecutionError({ code: ToolErrorCode.INVALID_INPUT, message: '`operations: [{tool, args}, …]` is required.' });
|
|
1197
|
+
}
|
|
1198
|
+
const results: Array<{ tool: string; ok: boolean; text: string; errorCode?: string }> = [];
|
|
1199
|
+
for (const op of ops) {
|
|
1200
|
+
const impl = IMPLS[op.tool];
|
|
1201
|
+
if (!impl) {
|
|
1202
|
+
results.push({ tool: op.tool, ok: false, text: `Unknown tool: ${op.tool}`, errorCode: ToolErrorCode.INVALID_INPUT });
|
|
1203
|
+
break;
|
|
1204
|
+
}
|
|
1205
|
+
try {
|
|
1206
|
+
const out = await impl(m, op.args ?? {}, ctx);
|
|
1207
|
+
results.push({ tool: op.tool, ok: true, text: out.text });
|
|
1208
|
+
} catch (err) {
|
|
1209
|
+
const code = err instanceof ToolExecutionError ? err.code : ToolErrorCode.INTERNAL_ERROR;
|
|
1210
|
+
results.push({ tool: op.tool, ok: false, text: err instanceof Error ? err.message : String(err), errorCode: code });
|
|
1211
|
+
break;
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
const passed = results.filter((r) => r.ok).length;
|
|
1215
|
+
const head = passed === ops.length
|
|
1216
|
+
? `Batch ok · ${passed}/${ops.length} ops applied.`
|
|
1217
|
+
: `Batch stopped · ${passed}/${ops.length} ops applied; the rest were skipped.`;
|
|
1218
|
+
const lines = [head, ...results.map((r, i) => ` ${i + 1}. ${r.ok ? 'ok' : 'fail'} — ${r.tool}: ${r.text}`)];
|
|
1219
|
+
return { text: lines.join('\n'), structured: { results, passed, total: ops.length } };
|
|
1220
|
+
},
|
|
1221
|
+
|
|
1222
|
+
// ── Viewer (extras) ────────────────────────────────────────────────────
|
|
1223
|
+
async viewer_wait_for_selection(_m, args, ctx) {
|
|
1224
|
+
// Block until the user clicks something in the viewer (or timeout).
|
|
1225
|
+
// The viewer already exposes `setOnSelectionChange`; we register a
|
|
1226
|
+
// one-shot listener and resolve when it fires with a non-empty
|
|
1227
|
+
// selection, falling back to the timeout payload otherwise.
|
|
1228
|
+
const v = requireViewer(ctx);
|
|
1229
|
+
const timeoutMs = Math.max(500, Math.min(Number(args.timeout_ms ?? 60_000), 5 * 60_000));
|
|
1230
|
+
const t0 = Date.now();
|
|
1231
|
+
const initial = v.getSelection();
|
|
1232
|
+
if (initial.length > 0) {
|
|
1233
|
+
// Already something selected — return immediately so the agent
|
|
1234
|
+
// doesn't pointlessly stall.
|
|
1235
|
+
return {
|
|
1236
|
+
text: `Already selected ${initial.length} entit${initial.length === 1 ? 'y' : 'ies'}.`,
|
|
1237
|
+
structured: { selection: initial, waitedMs: 0, timedOut: false },
|
|
1238
|
+
};
|
|
1239
|
+
}
|
|
1240
|
+
// Use the multi-subscriber API so we don't replace whichever handler
|
|
1241
|
+
// the panel registered (which would silently kill live selection
|
|
1242
|
+
// updates everywhere else after the first wait_for_selection call).
|
|
1243
|
+
const hits: import('./PlaygroundViewer').SelectionHit[] = await new Promise((resolve) => {
|
|
1244
|
+
let unsubscribe: (() => void) | null = null;
|
|
1245
|
+
const timer = window.setTimeout(() => {
|
|
1246
|
+
unsubscribe?.();
|
|
1247
|
+
resolve([]);
|
|
1248
|
+
}, timeoutMs);
|
|
1249
|
+
unsubscribe = v.subscribeSelection((sel) => {
|
|
1250
|
+
if (sel.length === 0) return; // ignore deselects
|
|
1251
|
+
window.clearTimeout(timer);
|
|
1252
|
+
unsubscribe?.();
|
|
1253
|
+
resolve(sel);
|
|
1254
|
+
});
|
|
1255
|
+
});
|
|
1256
|
+
const waitedMs = Date.now() - t0;
|
|
1257
|
+
if (hits.length === 0) {
|
|
1258
|
+
return {
|
|
1259
|
+
text: `Timed out after ${Math.round(waitedMs / 1000)}s with no selection.`,
|
|
1260
|
+
structured: { selection: [], waitedMs, timedOut: true },
|
|
1261
|
+
};
|
|
1262
|
+
}
|
|
1263
|
+
return {
|
|
1264
|
+
text: `User selected ${hits.length} entit${hits.length === 1 ? 'y' : 'ies'} (waited ${Math.round(waitedMs / 1000)}s).`,
|
|
1265
|
+
structured: { selection: hits, waitedMs, timedOut: false },
|
|
1266
|
+
};
|
|
1267
|
+
},
|
|
1268
|
+
};
|
|
1269
|
+
|
|
1270
|
+
// ── helpers ────────────────────────────────────────────────────────────────
|
|
1271
|
+
|
|
1272
|
+
function resolveRef(m: LoadedPlaygroundModel, args: Record<string, unknown>): EntityRef {
|
|
1273
|
+
if (typeof args.express_id === 'number') {
|
|
1274
|
+
return { modelId: m.id, expressId: args.express_id };
|
|
1275
|
+
}
|
|
1276
|
+
if (typeof args.global_id === 'string') {
|
|
1277
|
+
// Linear scan — fine for v1 since we only have one model in memory.
|
|
1278
|
+
for (const [, ids] of m.store.entityIndex.byType) {
|
|
1279
|
+
for (const id of ids) {
|
|
1280
|
+
const node = new EntityNode(m.store, id);
|
|
1281
|
+
if (node.globalId === args.global_id) return { modelId: m.id, expressId: id };
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
throw new ToolExecutionError({
|
|
1285
|
+
code: ToolErrorCode.ENTITY_NOT_FOUND,
|
|
1286
|
+
message: `No entity with GlobalId '${args.global_id}' in this model.`,
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
throw new ToolExecutionError({
|
|
1290
|
+
code: ToolErrorCode.INVALID_INPUT,
|
|
1291
|
+
message: 'Provide either global_id or express_id.',
|
|
1292
|
+
});
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1295
|
+
function refStr(ref: EntityRef): string {
|
|
1296
|
+
return `#${ref.expressId} (model=${ref.modelId})`;
|
|
1297
|
+
}
|
|
1298
|
+
|
|
1299
|
+
function formatBytes(bytes: number): string {
|
|
1300
|
+
if (bytes >= 1024 * 1024) return (bytes / (1024 * 1024)).toFixed(1) + ' MB';
|
|
1301
|
+
if (bytes >= 1024) return (bytes / 1024).toFixed(0) + ' KB';
|
|
1302
|
+
return bytes + ' B';
|
|
1303
|
+
}
|
|
1304
|
+
|
|
1305
|
+
/**
|
|
1306
|
+
* Pull IDS XML from whichever knob the agent reached for. The MCP wire
|
|
1307
|
+
* surface uses `ids_path` (a filename); we look it up in the user's
|
|
1308
|
+
* attached uploads via `playgroundUploads`. The agent may also pass
|
|
1309
|
+
* `ids_xml` as raw XML (for v0.1 compat with the Node MCP that reads
|
|
1310
|
+
* disk). Returns trimmed XML or null when neither knob worked.
|
|
1311
|
+
*/
|
|
1312
|
+
function resolveIdsXml(args: Record<string, unknown>): string | null {
|
|
1313
|
+
// Path-based — preferred when the user attached a .ids file. Tolerate
|
|
1314
|
+
// a few variant arg names the agent invents.
|
|
1315
|
+
const path = String(args.ids_path ?? args.path ?? args.file_path ?? '').trim();
|
|
1316
|
+
if (path) {
|
|
1317
|
+
const upload = playgroundUploads.resolve(path);
|
|
1318
|
+
if (upload) return upload.text.trim();
|
|
1319
|
+
// The agent might have referenced an old / non-existent file. Surface
|
|
1320
|
+
// that distinct from "no IDS at all" so it can ask the user to drop
|
|
1321
|
+
// the file rather than re-paste raw XML.
|
|
1322
|
+
throw new ToolExecutionError({
|
|
1323
|
+
code: ToolErrorCode.ENTITY_NOT_FOUND,
|
|
1324
|
+
message: `No attached file matches '${path}'. Tell the user to drag the .ids onto the chat input, then retry.`,
|
|
1325
|
+
});
|
|
1326
|
+
}
|
|
1327
|
+
// Direct XML — works without an upload.
|
|
1328
|
+
const xml = String(args.ids_xml ?? args.ids ?? '').trim();
|
|
1329
|
+
if (xml) return xml;
|
|
1330
|
+
return null;
|
|
1331
|
+
}
|
|
1332
|
+
|
|
1333
|
+
/**
|
|
1334
|
+
* Strip whatever extension the agent supplied (or any odd path components)
|
|
1335
|
+
* and force the canonical one for the artifact this tool actually produces.
|
|
1336
|
+
*
|
|
1337
|
+
* The agent loves to invent filenames like `wall_fire_rating.ids` when the
|
|
1338
|
+
* user asks "save the wall fire ratings" — but `model_save` writes IFC,
|
|
1339
|
+
* `bcf_export` writes BCFZIP, `ids_validate` writes a JSON report. Trusting
|
|
1340
|
+
* the agent's extension means the user clicks Save IFC and gets a `.ids`
|
|
1341
|
+
* file the OS won't recognise. Always enforce.
|
|
1342
|
+
*
|
|
1343
|
+
* coerceFilename('wall_fire_rating.ids', 'ifc') → 'wall_fire_rating.ifc'
|
|
1344
|
+
* coerceFilename('/tmp/foo.bar/baz.csv', 'json') → 'baz.json'
|
|
1345
|
+
* coerceFilename(undefined, 'bcfzip', 'issues') → 'issues.bcfzip'
|
|
1346
|
+
*/
|
|
1347
|
+
function coerceFilename(
|
|
1348
|
+
raw: string | undefined,
|
|
1349
|
+
ext: 'ifc' | 'bcfzip' | 'csv' | 'json',
|
|
1350
|
+
fallbackBase: string,
|
|
1351
|
+
): string {
|
|
1352
|
+
// Lift the basename out of any path the agent supplied.
|
|
1353
|
+
let base = (typeof raw === 'string' ? raw.split(/[\\/]/).pop() ?? '' : '').trim();
|
|
1354
|
+
if (!base) base = fallbackBase;
|
|
1355
|
+
// Drop any extension already on it (incl. multi-dot like .bcf.zip).
|
|
1356
|
+
base = base.replace(/\.(ifc|ifczip|bcfzip|bcf|zip|csv|json|tsv|xml|ids|gltf|glb|ifcx|pdf)$/i, '');
|
|
1357
|
+
base = base.replace(/[^\w.\-]+/g, '_'); // sanitize for OS download
|
|
1358
|
+
if (!base) base = fallbackBase;
|
|
1359
|
+
return `${base}.${ext}`;
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
/** Resolve (left, right) diff models from the dispatch context. The agent
|
|
1363
|
+
* passes `a` / `b` model_ids; we look them up in the registry, falling
|
|
1364
|
+
* back to the primary model for one side if the agent only provided the
|
|
1365
|
+
* other id (rare, but lets the chat work with a single loaded model). */
|
|
1366
|
+
function resolveDiffModels(
|
|
1367
|
+
primary: LoadedPlaygroundModel,
|
|
1368
|
+
args: Record<string, unknown>,
|
|
1369
|
+
ctx: DispatchContext,
|
|
1370
|
+
): { left: LoadedPlaygroundModel; right: LoadedPlaygroundModel } {
|
|
1371
|
+
const aId = String(args.a ?? '');
|
|
1372
|
+
const bId = String(args.b ?? '');
|
|
1373
|
+
if (!aId || !bId) {
|
|
1374
|
+
throw new ToolExecutionError({
|
|
1375
|
+
code: ToolErrorCode.INVALID_INPUT,
|
|
1376
|
+
message: 'Both `a` and `b` model_ids are required. Load a second model first.',
|
|
1377
|
+
});
|
|
1378
|
+
}
|
|
1379
|
+
const left = aId === primary.id ? primary : ctx.registry?.get(aId);
|
|
1380
|
+
const right = bId === primary.id ? primary : ctx.registry?.get(bId);
|
|
1381
|
+
if (!left || !right) {
|
|
1382
|
+
throw new ToolExecutionError({
|
|
1383
|
+
code: ToolErrorCode.MODEL_NOT_FOUND,
|
|
1384
|
+
message: `Both models must be loaded; missing: ${[!left && aId, !right && bId].filter(Boolean).join(', ')}`,
|
|
1385
|
+
});
|
|
1386
|
+
}
|
|
1387
|
+
return { left, right };
|
|
1388
|
+
}
|
|
1389
|
+
|
|
1390
|
+
/** Surface IDS-accessor lookup failures at debug level instead of dropping
|
|
1391
|
+
* them silently. A regression in EntityNode would otherwise turn into
|
|
1392
|
+
* changed IDS results without any signal in devtools — debug-level logging
|
|
1393
|
+
* gives an opt-in trail without polluting normal browser sessions. */
|
|
1394
|
+
function logIdsAccessorMiss(fn: string, id: number, err: unknown): void {
|
|
1395
|
+
// eslint-disable-next-line no-console
|
|
1396
|
+
console.debug(`[playground-dispatcher] IDS accessor ${fn} miss`, { expressId: id, err });
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
/** Build the IDS validator's data accessor from a loaded model. Implements
|
|
1400
|
+
* the full IFCDataAccessor surface @ifc-lite/ids expects (see
|
|
1401
|
+
* packages/ids/src/types.ts:384). Each method bridges to the SDK's bim
|
|
1402
|
+
* namespaces or directly to EntityNode. */
|
|
1403
|
+
function makeIdsAccessor(m: LoadedPlaygroundModel): import('@ifc-lite/ids').IFCDataAccessor {
|
|
1404
|
+
const ref = (id: number): EntityRef => ({ modelId: m.id, expressId: id });
|
|
1405
|
+
return {
|
|
1406
|
+
getEntityType(id) {
|
|
1407
|
+
try { return new EntityNode(m.store, id).type; } catch (err) { logIdsAccessorMiss('getEntityType', id, err); return undefined; }
|
|
1408
|
+
},
|
|
1409
|
+
getEntityName(id) {
|
|
1410
|
+
try { return new EntityNode(m.store, id).name || undefined; } catch (err) { logIdsAccessorMiss('getEntityName', id, err); return undefined; }
|
|
1411
|
+
},
|
|
1412
|
+
getGlobalId(id) {
|
|
1413
|
+
try { return new EntityNode(m.store, id).globalId || undefined; } catch (err) { logIdsAccessorMiss('getGlobalId', id, err); return undefined; }
|
|
1414
|
+
},
|
|
1415
|
+
getDescription(id) {
|
|
1416
|
+
try { return new EntityNode(m.store, id).description || undefined; } catch (err) { logIdsAccessorMiss('getDescription', id, err); return undefined; }
|
|
1417
|
+
},
|
|
1418
|
+
getObjectType(id) {
|
|
1419
|
+
try { return new EntityNode(m.store, id).objectType || undefined; } catch (err) { logIdsAccessorMiss('getObjectType', id, err); return undefined; }
|
|
1420
|
+
},
|
|
1421
|
+
getEntitiesByType(typeName) {
|
|
1422
|
+
const wantedUpper = typeName.toUpperCase();
|
|
1423
|
+
const out: number[] = [];
|
|
1424
|
+
for (const [t, ids] of m.store.entityIndex.byType) {
|
|
1425
|
+
if (t.toUpperCase() === wantedUpper) for (const id of ids) out.push(id);
|
|
1426
|
+
}
|
|
1427
|
+
return out;
|
|
1428
|
+
},
|
|
1429
|
+
getAllEntityIds() {
|
|
1430
|
+
const out: number[] = [];
|
|
1431
|
+
for (const id of m.store.entityIndex.byId.keys()) out.push(id);
|
|
1432
|
+
return out;
|
|
1433
|
+
},
|
|
1434
|
+
getPropertyValue(id, psetName, propName) {
|
|
1435
|
+
const v = m.bim.property(ref(id), psetName, propName);
|
|
1436
|
+
if (v == null) return undefined;
|
|
1437
|
+
return { value: v, dataType: typeof v === 'number' ? 'IFCREAL' : typeof v === 'boolean' ? 'IFCBOOLEAN' : 'IFCLABEL', propertySetName: psetName, propertyName: propName };
|
|
1438
|
+
},
|
|
1439
|
+
getPropertySets(id) {
|
|
1440
|
+
return m.bim.properties(ref(id)).map((pset) => ({
|
|
1441
|
+
name: pset.name,
|
|
1442
|
+
properties: pset.properties.map((p) => ({
|
|
1443
|
+
name: p.name,
|
|
1444
|
+
value: p.value as string | number | boolean | null,
|
|
1445
|
+
dataType: typeof p.value === 'number' ? 'IFCREAL' : typeof p.value === 'boolean' ? 'IFCBOOLEAN' : 'IFCLABEL',
|
|
1446
|
+
})),
|
|
1447
|
+
}));
|
|
1448
|
+
},
|
|
1449
|
+
getClassifications(id) {
|
|
1450
|
+
return m.bim.classifications(ref(id)).map((c) => ({
|
|
1451
|
+
system: c.system ?? '',
|
|
1452
|
+
value: c.identification ?? c.name ?? '',
|
|
1453
|
+
name: c.name,
|
|
1454
|
+
}));
|
|
1455
|
+
},
|
|
1456
|
+
getMaterials(id) {
|
|
1457
|
+
const mat = m.bim.materials(ref(id));
|
|
1458
|
+
if (!mat) return [];
|
|
1459
|
+
const layers = (mat as { layers?: Array<{ materialName?: string; name?: string }>; name?: string });
|
|
1460
|
+
if (Array.isArray(layers.layers) && layers.layers.length > 0) {
|
|
1461
|
+
return layers.layers.map((l) => ({ name: l.materialName ?? l.name ?? '' }));
|
|
1462
|
+
}
|
|
1463
|
+
if (layers.name) return [{ name: layers.name }];
|
|
1464
|
+
return [];
|
|
1465
|
+
},
|
|
1466
|
+
getParent(id) {
|
|
1467
|
+
try {
|
|
1468
|
+
const parent = new EntityNode(m.store, id).containedIn() ?? new EntityNode(m.store, id).decomposedBy();
|
|
1469
|
+
if (!parent) return undefined;
|
|
1470
|
+
return { expressId: parent.expressId, entityType: parent.type ?? '' };
|
|
1471
|
+
} catch (err) { logIdsAccessorMiss('getParent', id, err); return undefined; }
|
|
1472
|
+
},
|
|
1473
|
+
getAttribute(id, attributeName) {
|
|
1474
|
+
const attrs = m.bim.attributes(ref(id));
|
|
1475
|
+
const found = attrs.find((a) => a.name === attributeName);
|
|
1476
|
+
return found ? String(found.value) : undefined;
|
|
1477
|
+
},
|
|
1478
|
+
};
|
|
1479
|
+
}
|
|
1480
|
+
|
|
1481
|
+
/** Tiny RFC4122-ish v4 UUID. Browsers ship crypto.randomUUID but TypeScript
|
|
1482
|
+
* lib.dom doesn't always type it; fall back to a Math.random implementation
|
|
1483
|
+
* for ancient browsers. */
|
|
1484
|
+
function cryptoRandomUuid(): string {
|
|
1485
|
+
const c = (globalThis as { crypto?: { randomUUID?: () => string } }).crypto;
|
|
1486
|
+
if (c?.randomUUID) return c.randomUUID();
|
|
1487
|
+
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (ch) => {
|
|
1488
|
+
const r = (Math.random() * 16) | 0;
|
|
1489
|
+
return (ch === 'x' ? r : (r & 0x3) | 0x8).toString(16);
|
|
1490
|
+
});
|
|
1491
|
+
}
|
|
1492
|
+
|
|
1493
|
+
interface BsddHttpErrorLike {
|
|
1494
|
+
name: string;
|
|
1495
|
+
status: number;
|
|
1496
|
+
retryAfterSeconds?: number;
|
|
1497
|
+
url: string;
|
|
1498
|
+
statusText: string;
|
|
1499
|
+
}
|
|
1500
|
+
function rethrowBsdd(err: unknown, label: string): ToolExecutionError {
|
|
1501
|
+
if (err && typeof err === 'object' && (err as { name?: string }).name === 'BsddHttpError') {
|
|
1502
|
+
const e = err as BsddHttpErrorLike;
|
|
1503
|
+
if (e.status === 429) {
|
|
1504
|
+
return new ToolExecutionError({
|
|
1505
|
+
code: ToolErrorCode.RATE_LIMITED,
|
|
1506
|
+
message: `bSDD rate-limited the ${label} request (HTTP 429).`,
|
|
1507
|
+
details: { url: e.url, status: e.status, retryAfterSeconds: e.retryAfterSeconds },
|
|
1508
|
+
hint: e.retryAfterSeconds != null ? `Retry after ${e.retryAfterSeconds}s.` : 'Avoid back-to-back bSDD calls.',
|
|
1509
|
+
});
|
|
1510
|
+
}
|
|
1511
|
+
return new ToolExecutionError({
|
|
1512
|
+
code: ToolErrorCode.EXTERNAL_SERVICE_FAILED,
|
|
1513
|
+
message: `bSDD ${label} failed: HTTP ${e.status} ${e.statusText}.`,
|
|
1514
|
+
details: { url: e.url, status: e.status },
|
|
1515
|
+
});
|
|
1516
|
+
}
|
|
1517
|
+
if (err instanceof ToolExecutionError) return err;
|
|
1518
|
+
return new ToolExecutionError({
|
|
1519
|
+
code: ToolErrorCode.INTERNAL_ERROR,
|
|
1520
|
+
message: err instanceof Error ? err.message : String(err),
|
|
1521
|
+
});
|
|
1522
|
+
}
|
|
1523
|
+
|
|
1524
|
+
// ── public API ─────────────────────────────────────────────────────────────
|
|
1525
|
+
|
|
1526
|
+
/** All tool names the playground knows how to execute (for the chat tools[] list). */
|
|
1527
|
+
export function supportedToolNames(): string[] {
|
|
1528
|
+
return Object.keys(IMPLS);
|
|
1529
|
+
}
|
|
1530
|
+
|
|
1531
|
+
/** Anthropic-compatible JSON schema for a single tool's input. */
|
|
1532
|
+
export interface AnthropicInputSchema {
|
|
1533
|
+
type: 'object';
|
|
1534
|
+
properties: Record<string, { type: string; description?: string }>;
|
|
1535
|
+
required?: string[];
|
|
1536
|
+
}
|
|
1537
|
+
export interface AnthropicToolDef {
|
|
1538
|
+
name: string;
|
|
1539
|
+
description: string;
|
|
1540
|
+
input_schema: AnthropicInputSchema;
|
|
1541
|
+
}
|
|
1542
|
+
|
|
1543
|
+
/** Build the `tools` array Anthropic expects, derived from CATALOG +
|
|
1544
|
+
* supportedToolNames(). Always returns the literal-typed shape Anthropic's
|
|
1545
|
+
* SDK demands (input_schema.type === 'object'). */
|
|
1546
|
+
export function anthropicToolDefinitions(): AnthropicToolDef[] {
|
|
1547
|
+
const supported = new Set(supportedToolNames());
|
|
1548
|
+
return CATALOG.tools
|
|
1549
|
+
.filter((t: CatalogTool) => supported.has(t.name))
|
|
1550
|
+
.map((t) => ({
|
|
1551
|
+
name: t.name,
|
|
1552
|
+
description: t.description,
|
|
1553
|
+
input_schema: ensureObjectSchema(t),
|
|
1554
|
+
}));
|
|
1555
|
+
}
|
|
1556
|
+
|
|
1557
|
+
/** Anthropic requires every tool's input_schema.type === 'object'. Some catalog
|
|
1558
|
+
* schemas are missing `properties` — fill in a minimal one from paramsFor(). */
|
|
1559
|
+
function ensureObjectSchema(tool: CatalogTool): AnthropicInputSchema {
|
|
1560
|
+
const raw = tool.inputSchema as { type?: string; properties?: Record<string, { type?: string; description?: string }>; required?: string[] } | undefined;
|
|
1561
|
+
if (raw && raw.type === 'object' && raw.properties && Object.keys(raw.properties).length > 0) {
|
|
1562
|
+
const properties: AnthropicInputSchema['properties'] = {};
|
|
1563
|
+
for (const [k, v] of Object.entries(raw.properties)) {
|
|
1564
|
+
properties[k] = { type: typeof v?.type === 'string' ? v.type : 'string', ...(v?.description ? { description: v.description } : {}) };
|
|
1565
|
+
}
|
|
1566
|
+
return {
|
|
1567
|
+
type: 'object',
|
|
1568
|
+
properties,
|
|
1569
|
+
...(Array.isArray(raw.required) && raw.required.length > 0 ? { required: raw.required } : {}),
|
|
1570
|
+
};
|
|
1571
|
+
}
|
|
1572
|
+
const params = paramsFor(tool);
|
|
1573
|
+
const properties: AnthropicInputSchema['properties'] = {};
|
|
1574
|
+
const required: string[] = [];
|
|
1575
|
+
for (const p of params) {
|
|
1576
|
+
properties[p.name] = { type: jsonSchemaType(p.type), ...(p.description ? { description: p.description } : {}) };
|
|
1577
|
+
if (p.required) required.push(p.name);
|
|
1578
|
+
}
|
|
1579
|
+
return { type: 'object', properties, ...(required.length > 0 ? { required } : {}) };
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
function jsonSchemaType(t: string): string {
|
|
1583
|
+
if (t.startsWith('integer')) return 'integer';
|
|
1584
|
+
if (t.startsWith('number')) return 'number';
|
|
1585
|
+
if (t.startsWith('boolean')) return 'boolean';
|
|
1586
|
+
if (t.endsWith('[]') || t.startsWith('Array<')) return 'array';
|
|
1587
|
+
if (t.startsWith('{') || t.startsWith('object')) return 'object';
|
|
1588
|
+
return 'string';
|
|
1589
|
+
}
|
|
1590
|
+
|
|
1591
|
+
/**
|
|
1592
|
+
* Run a single tool call against the loaded model. Mirrors the wire-format
|
|
1593
|
+
* shape of an MCP tools/call result so the chat panel renderer doesn’t have
|
|
1594
|
+
* to know the dispatcher is local.
|
|
1595
|
+
*
|
|
1596
|
+
* The optional `ctx` carries the live viewer controller; tools that touch
|
|
1597
|
+
* the inline 3D panel (viewer_*) require it. When a non-viewer tool is
|
|
1598
|
+
* called the context is harmlessly ignored.
|
|
1599
|
+
*/
|
|
1600
|
+
export async function dispatch(
|
|
1601
|
+
model: LoadedPlaygroundModel,
|
|
1602
|
+
toolName: string,
|
|
1603
|
+
args: Record<string, unknown>,
|
|
1604
|
+
ctx: DispatchContext = {},
|
|
1605
|
+
): Promise<ToolDispatchResult> {
|
|
1606
|
+
const tool = CATALOG.tools.find((t) => t.name === toolName);
|
|
1607
|
+
if (!tool) {
|
|
1608
|
+
return {
|
|
1609
|
+
text: `Unknown tool: ${toolName}`,
|
|
1610
|
+
structured: null,
|
|
1611
|
+
isError: true,
|
|
1612
|
+
errorCode: ToolErrorCode.INVALID_INPUT,
|
|
1613
|
+
};
|
|
1614
|
+
}
|
|
1615
|
+
// The v2 surface includes mutate, BCF, IDS, export, diff. Anything with
|
|
1616
|
+
// an entry in IMPLS is wired client-side; the catalogue still includes
|
|
1617
|
+
// a few v0.2 / v0.5 entries (export_glb, export_ifcx, export_pdf_report)
|
|
1618
|
+
// that aren't implemented yet — those fall through to the
|
|
1619
|
+
// UNSUPPORTED_OPERATION branch below.
|
|
1620
|
+
const impl = IMPLS[toolName];
|
|
1621
|
+
if (!impl) {
|
|
1622
|
+
return {
|
|
1623
|
+
text: `${toolName} isn’t implemented in the web playground yet. (See the catalogue for the full surface — the stdio MCP supports it.)`,
|
|
1624
|
+
structured: { code: ToolErrorCode.UNSUPPORTED_OPERATION },
|
|
1625
|
+
isError: true,
|
|
1626
|
+
errorCode: ToolErrorCode.UNSUPPORTED_OPERATION,
|
|
1627
|
+
};
|
|
1628
|
+
}
|
|
1629
|
+
try {
|
|
1630
|
+
const out = await impl(model, args, ctx);
|
|
1631
|
+
return { text: out.text, structured: out.structured, isError: false, download: out.download };
|
|
1632
|
+
} catch (err) {
|
|
1633
|
+
if (err instanceof ToolExecutionError) {
|
|
1634
|
+
return {
|
|
1635
|
+
text: err.message,
|
|
1636
|
+
structured: err.details ?? null,
|
|
1637
|
+
isError: true,
|
|
1638
|
+
errorCode: err.code,
|
|
1639
|
+
hint: err.hint,
|
|
1640
|
+
};
|
|
1641
|
+
}
|
|
1642
|
+
return {
|
|
1643
|
+
text: err instanceof Error ? err.message : String(err),
|
|
1644
|
+
structured: null,
|
|
1645
|
+
isError: true,
|
|
1646
|
+
errorCode: ToolErrorCode.INTERNAL_ERROR,
|
|
1647
|
+
};
|
|
1648
|
+
}
|
|
1649
|
+
}
|