@ifc-lite/viewer 1.17.4 → 1.17.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +16 -16
- package/.turbo/turbo-typecheck.log +1 -1
- package/CHANGELOG.md +117 -0
- package/DESKTOP_CONTRACT_VERSION +1 -1
- package/dist/assets/{basketViewActivator-BmnNtVfZ.js → basketViewActivator-86rgogji.js} +1 -1
- package/dist/assets/drawing-2d-DoxKMqbO.js +257 -0
- package/dist/assets/{exporters-ChAtBmlj.js → exporters-CcPS9MK5.js} +2274 -2227
- package/dist/assets/{geometry.worker-BQ0rzNo-.js → geometry.worker-BFUYA08u.js} +1 -1
- package/dist/assets/ids-DQ5jY0E8.js +1 -0
- package/dist/assets/ifc-lite_bg-BINvzoCP.wasm +0 -0
- package/dist/assets/{index-Co8E2-FE.js → index-Bfms9I4A.js} +35160 -33084
- package/dist/assets/index-_bfZsDCC.css +1 -0
- package/dist/assets/{native-bridge-BRvbckFQ.js → native-bridge-DUyLCMZS.js} +104 -104
- package/dist/assets/{sandbox-DZiNLNMk.js → sandbox-C8575tul.js} +4340 -4322
- package/dist/assets/{server-client-BV8zHZ7Y.js → server-client-BuZK7OST.js} +1 -1
- package/dist/assets/{wasm-bridge-g01g7T9b.js → wasm-bridge-JsqEGDV8.js} +1 -1
- package/dist/index.html +8 -7
- package/index.html +1 -0
- package/package.json +7 -7
- package/src/App.tsx +16 -2
- package/src/components/viewer/CesiumOverlay.tsx +62 -19
- package/src/components/viewer/ChatPanel.tsx +195 -91
- package/src/components/viewer/MainToolbar.tsx +4 -3
- package/src/components/viewer/PropertiesPanel.tsx +16 -2
- package/src/components/viewer/SettingsPage.tsx +252 -101
- package/src/components/viewer/ThemeSwitch.tsx +63 -7
- package/src/components/viewer/ViewerLayout.tsx +1 -0
- package/src/components/viewer/Viewport.tsx +14 -2
- package/src/components/viewer/ViewportContainer.tsx +49 -64
- package/src/components/viewer/ViewportOverlays.tsx +5 -2
- package/src/components/viewer/bcf/BCFTopicDetail.tsx +4 -4
- package/src/components/viewer/chat/ModelSelector.tsx +90 -54
- package/src/components/viewer/properties/GeoreferencingPanel.tsx +113 -51
- package/src/components/viewer/properties/LocationMap.tsx +9 -7
- package/src/components/viewer/properties/ModelMetadataPanel.tsx +1 -1
- package/src/components/viewer/tools/SectionCapControls.tsx +237 -0
- package/src/components/viewer/tools/SectionPanel.tsx +39 -18
- package/src/components/viewer/useAnimationLoop.ts +9 -1
- package/src/components/viewer/useRenderUpdates.ts +1 -1
- package/src/hooks/ids/idsDataAccessor.ts +60 -24
- package/src/hooks/ingest/viewerModelIngest.ts +7 -2
- package/src/hooks/useIfcFederation.ts +326 -71
- package/src/hooks/useIfcLoader.ts +1 -0
- package/src/hooks/useViewControls.ts +13 -5
- package/src/index.css +484 -10
- package/src/lib/desktop-entitlement.ts +2 -4
- package/src/lib/geo/cesium-bridge.ts +15 -7
- package/src/lib/geo/effective-georef.test.ts +73 -0
- package/src/lib/geo/effective-georef.ts +111 -0
- package/src/lib/geo/reproject.ts +105 -19
- package/src/lib/llm/byok-guard.test.ts +77 -0
- package/src/lib/llm/byok-guard.ts +39 -0
- package/src/lib/llm/free-models.test.ts +0 -6
- package/src/lib/llm/models.ts +104 -42
- package/src/lib/llm/stream-client.ts +74 -110
- package/src/lib/llm/stream-direct.test.ts +130 -0
- package/src/lib/llm/stream-direct.ts +316 -0
- package/src/lib/llm/types.ts +14 -2
- package/src/main.tsx +1 -10
- package/src/services/api-keys.ts +73 -0
- package/src/store/constants.ts +20 -2
- package/src/store/index.ts +12 -5
- package/src/store/slices/cesiumSlice.ts +5 -0
- package/src/store/slices/chatSlice.test.ts +6 -76
- package/src/store/slices/chatSlice.ts +17 -58
- package/src/store/slices/sectionSlice.test.ts +87 -7
- package/src/store/slices/sectionSlice.ts +151 -5
- package/src/store/slices/uiSlice.ts +28 -5
- package/src/store/types.ts +26 -0
- package/src/utils/nativeSpatialDataStore.ts +4 -1
- package/src/utils/viewportUtils.ts +7 -2
- package/src/vite-env.d.ts +0 -4
- package/dist/assets/drawing-2d-gWfpdfYe.js +0 -257
- package/dist/assets/ids-B4jTqB1O.js +0 -1
- package/dist/assets/ifc-lite_bg-BX4E7TX8.wasm +0 -0
- package/dist/assets/index-DckuDqlv.css +0 -1
- package/src/components/viewer/UpgradePage.tsx +0 -71
- package/src/lib/desktop/ClerkDesktopEntitlementSync.tsx +0 -175
- package/src/lib/llm/ClerkChatSync.tsx +0 -74
- package/src/lib/llm/clerk-auth.ts +0 -62
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
2
|
+
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
3
|
+
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Direct browser-to-provider streaming for BYOK (Bring Your Own Key) models.
|
|
7
|
+
*
|
|
8
|
+
* Anthropic: Uses the official @anthropic-ai/sdk with `dangerouslyAllowBrowser`.
|
|
9
|
+
* OpenAI: Uses fetch against the OpenAI chat completions API (same SSE format
|
|
10
|
+
* the proxy already returns, so SSE parsing is shared).
|
|
11
|
+
*
|
|
12
|
+
* Keys are stored in localStorage and sent directly to the provider.
|
|
13
|
+
* They never pass through our server.
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
17
|
+
import { readSseStream, type StreamMessage, type StreamOptions } from './stream-client.js';
|
|
18
|
+
|
|
19
|
+
const STREAM_REQUEST_TIMEOUT_MS = 45_000;
|
|
20
|
+
|
|
21
|
+
// ── Anthropic ──────────────────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
type AnthropicMediaType = 'image/png' | 'image/jpeg' | 'image/gif' | 'image/webp';
|
|
24
|
+
|
|
25
|
+
type AnthropicContentBlock =
|
|
26
|
+
| { type: 'text'; text: string }
|
|
27
|
+
| { type: 'image'; source: { type: 'base64'; media_type: AnthropicMediaType; data: string } };
|
|
28
|
+
|
|
29
|
+
function toAnthropicMessages(
|
|
30
|
+
messages: StreamMessage[],
|
|
31
|
+
): Array<{ role: 'user' | 'assistant'; content: string | AnthropicContentBlock[] }> {
|
|
32
|
+
return messages
|
|
33
|
+
.filter((m) => m.role === 'user' || m.role === 'assistant')
|
|
34
|
+
.map((m) => {
|
|
35
|
+
if (typeof m.content === 'string') {
|
|
36
|
+
return { role: m.role as 'user' | 'assistant', content: m.content };
|
|
37
|
+
}
|
|
38
|
+
// Multimodal content — convert OpenAI-style parts to Anthropic format
|
|
39
|
+
const blocks: AnthropicContentBlock[] = m.content.map((part) => {
|
|
40
|
+
if (part.type === 'text') {
|
|
41
|
+
return { type: 'text' as const, text: part.text };
|
|
42
|
+
}
|
|
43
|
+
// image_url → Anthropic image block
|
|
44
|
+
const dataUrl = part.image_url.url;
|
|
45
|
+
const match = dataUrl.match(/^data:(image\/[^;]+);base64,(.+)$/);
|
|
46
|
+
if (match) {
|
|
47
|
+
return {
|
|
48
|
+
type: 'image' as const,
|
|
49
|
+
source: {
|
|
50
|
+
type: 'base64' as const,
|
|
51
|
+
media_type: match[1] as AnthropicMediaType,
|
|
52
|
+
data: match[2],
|
|
53
|
+
},
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
// Fallback: pass URL as text
|
|
57
|
+
return { type: 'text' as const, text: `[Image: ${dataUrl.slice(0, 100)}]` };
|
|
58
|
+
});
|
|
59
|
+
return { role: m.role as 'user' | 'assistant', content: blocks };
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export async function streamAnthropicChat(
|
|
64
|
+
apiKey: string,
|
|
65
|
+
options: Omit<StreamOptions, 'proxyUrl' | 'authToken' | 'onUsageInfo'>,
|
|
66
|
+
): Promise<void> {
|
|
67
|
+
const { model, messages, system, signal, onChunk, onComplete, onError, onFinishReason } = options;
|
|
68
|
+
|
|
69
|
+
const client = new Anthropic({
|
|
70
|
+
apiKey,
|
|
71
|
+
dangerouslyAllowBrowser: true,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
let fullText = '';
|
|
75
|
+
try {
|
|
76
|
+
const stream = client.messages.stream({
|
|
77
|
+
model,
|
|
78
|
+
max_tokens: 8192,
|
|
79
|
+
temperature: 0.3,
|
|
80
|
+
system: system || undefined,
|
|
81
|
+
messages: toAnthropicMessages(messages),
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// Wire up abort signal
|
|
85
|
+
if (signal) {
|
|
86
|
+
const onAbort = () => stream.abort();
|
|
87
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
88
|
+
stream.on('end', () => signal.removeEventListener('abort', onAbort));
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
stream.on('text', (text) => {
|
|
92
|
+
fullText += text;
|
|
93
|
+
onChunk(text);
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
const finalMessage = await stream.finalMessage();
|
|
97
|
+
|
|
98
|
+
if (signal?.aborted) return;
|
|
99
|
+
|
|
100
|
+
const stopReason = finalMessage.stop_reason;
|
|
101
|
+
onFinishReason?.(stopReason === 'end_turn' ? 'stop' : stopReason);
|
|
102
|
+
onComplete(fullText);
|
|
103
|
+
} catch (err) {
|
|
104
|
+
if (signal?.aborted) return;
|
|
105
|
+
|
|
106
|
+
if (err instanceof Anthropic.APIError) {
|
|
107
|
+
const msg = err.status === 401
|
|
108
|
+
? 'Invalid Anthropic API key. Check your key in Settings.'
|
|
109
|
+
: err.status === 429
|
|
110
|
+
? 'Anthropic rate limit reached. Please wait and try again.'
|
|
111
|
+
: `Anthropic error (${err.status}): ${err.message}`;
|
|
112
|
+
onError(new Error(msg));
|
|
113
|
+
} else {
|
|
114
|
+
onError(err instanceof Error ? err : new Error(String(err)));
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// ── OpenAI ─────────────────────────────────────────────────────────────────
|
|
120
|
+
|
|
121
|
+
import { getModelById } from './models.js';
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Stream an OpenAI model. Automatically picks the right API:
|
|
125
|
+
* - Chat Completions (`/v1/chat/completions`) for standard chat models
|
|
126
|
+
* - Responses (`/v1/responses`) for Codex-style models
|
|
127
|
+
*/
|
|
128
|
+
export async function streamOpenAiChat(
|
|
129
|
+
apiKey: string,
|
|
130
|
+
options: Omit<StreamOptions, 'proxyUrl' | 'authToken' | 'onUsageInfo'>,
|
|
131
|
+
): Promise<void> {
|
|
132
|
+
const modelDef = getModelById(options.model);
|
|
133
|
+
if (modelDef?.openaiApi === 'responses') {
|
|
134
|
+
return streamOpenAiResponses(apiKey, options);
|
|
135
|
+
}
|
|
136
|
+
return streamOpenAiChatCompletions(apiKey, options);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/** Standard Chat Completions API (GPT-5.4, GPT-5.4 Mini, etc.) */
|
|
140
|
+
async function streamOpenAiChatCompletions(
|
|
141
|
+
apiKey: string,
|
|
142
|
+
options: Omit<StreamOptions, 'proxyUrl' | 'authToken' | 'onUsageInfo'>,
|
|
143
|
+
): Promise<void> {
|
|
144
|
+
const { model, messages, system, signal, onChunk, onComplete, onError, onFinishReason } = options;
|
|
145
|
+
|
|
146
|
+
const allMessages: StreamMessage[] = system
|
|
147
|
+
? [{ role: 'system', content: system }, ...messages]
|
|
148
|
+
: [...messages];
|
|
149
|
+
|
|
150
|
+
const { response, cleanup } = await openAiFetch(
|
|
151
|
+
'https://api.openai.com/v1/chat/completions',
|
|
152
|
+
{
|
|
153
|
+
model,
|
|
154
|
+
messages: allMessages.map((m) => ({ role: m.role, content: m.content })),
|
|
155
|
+
stream: true,
|
|
156
|
+
temperature: 0.3,
|
|
157
|
+
max_completion_tokens: 8192,
|
|
158
|
+
},
|
|
159
|
+
apiKey,
|
|
160
|
+
signal,
|
|
161
|
+
onError,
|
|
162
|
+
);
|
|
163
|
+
if (!response) return;
|
|
164
|
+
|
|
165
|
+
if (!response.body) { cleanup(); onError(new Error('No response body')); return; }
|
|
166
|
+
|
|
167
|
+
let fullText = '';
|
|
168
|
+
let finishReason: string | null = null;
|
|
169
|
+
|
|
170
|
+
const ok = await readSseStream(response.body, signal, (data) => {
|
|
171
|
+
const parsed = JSON.parse(data) as {
|
|
172
|
+
choices?: Array<{ delta?: { content?: string }; finish_reason?: string | null }>;
|
|
173
|
+
};
|
|
174
|
+
const content = parsed.choices?.[0]?.delta?.content;
|
|
175
|
+
if (content) { fullText += content; onChunk(content); }
|
|
176
|
+
const fr = parsed.choices?.[0]?.finish_reason;
|
|
177
|
+
if (fr) finishReason = fr;
|
|
178
|
+
}, onError);
|
|
179
|
+
|
|
180
|
+
cleanup();
|
|
181
|
+
if (ok) { onFinishReason?.(finishReason); onComplete(fullText); }
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/** Responses API for Codex-style models (GPT-5.3 Codex) */
|
|
185
|
+
async function streamOpenAiResponses(
|
|
186
|
+
apiKey: string,
|
|
187
|
+
options: Omit<StreamOptions, 'proxyUrl' | 'authToken' | 'onUsageInfo'>,
|
|
188
|
+
): Promise<void> {
|
|
189
|
+
const { model, messages, system, signal, onChunk, onComplete, onError, onFinishReason } = options;
|
|
190
|
+
|
|
191
|
+
// Build the input array: system instructions + conversation
|
|
192
|
+
const input: Array<{ role: string; content: string | unknown[] }> = [];
|
|
193
|
+
if (system) {
|
|
194
|
+
input.push({ role: 'developer', content: system });
|
|
195
|
+
}
|
|
196
|
+
for (const m of messages) {
|
|
197
|
+
input.push({ role: m.role, content: m.content });
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const { response, cleanup } = await openAiFetch(
|
|
201
|
+
'https://api.openai.com/v1/responses',
|
|
202
|
+
{
|
|
203
|
+
model,
|
|
204
|
+
input,
|
|
205
|
+
stream: true,
|
|
206
|
+
max_output_tokens: 8192,
|
|
207
|
+
},
|
|
208
|
+
apiKey,
|
|
209
|
+
signal,
|
|
210
|
+
onError,
|
|
211
|
+
);
|
|
212
|
+
if (!response) return;
|
|
213
|
+
|
|
214
|
+
if (!response.body) { cleanup(); onError(new Error('No response body')); return; }
|
|
215
|
+
|
|
216
|
+
let fullText = '';
|
|
217
|
+
// Map Responses API terminal events → chat-style finish_reason.
|
|
218
|
+
// `response.incomplete` is any non-completed terminal state: when the
|
|
219
|
+
// reason is `max_output_tokens` — or simply absent — map to 'length' so
|
|
220
|
+
// the ChatPanel "Continue" UX can resume a truncated Codex reply. Other
|
|
221
|
+
// explicit reasons (e.g. `content_filter`) pass through unchanged.
|
|
222
|
+
let finishReason: string | null = 'stop';
|
|
223
|
+
|
|
224
|
+
const ok = await readSseStream(response.body, signal, (data) => {
|
|
225
|
+
const event = JSON.parse(data) as {
|
|
226
|
+
type?: string;
|
|
227
|
+
delta?: string;
|
|
228
|
+
response?: {
|
|
229
|
+
status?: string;
|
|
230
|
+
incomplete_details?: { reason?: string } | null;
|
|
231
|
+
};
|
|
232
|
+
};
|
|
233
|
+
if (event.type === 'response.output_text.delta' && event.delta) {
|
|
234
|
+
fullText += event.delta;
|
|
235
|
+
onChunk(event.delta);
|
|
236
|
+
} else if (event.type === 'response.incomplete') {
|
|
237
|
+
const reason = event.response?.incomplete_details?.reason;
|
|
238
|
+
finishReason = reason == null || reason === 'max_output_tokens' ? 'length' : reason;
|
|
239
|
+
} else if (event.type === 'response.completed') {
|
|
240
|
+
finishReason = 'stop';
|
|
241
|
+
}
|
|
242
|
+
}, onError);
|
|
243
|
+
|
|
244
|
+
cleanup();
|
|
245
|
+
if (ok) { onFinishReason?.(finishReason); onComplete(fullText); }
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// ── Shared helpers ─────────────────────────────────────────────────────────
|
|
249
|
+
|
|
250
|
+
async function openAiFetch(
|
|
251
|
+
url: string,
|
|
252
|
+
body: Record<string, unknown>,
|
|
253
|
+
apiKey: string,
|
|
254
|
+
signal: AbortSignal | undefined,
|
|
255
|
+
onError: (err: Error) => void,
|
|
256
|
+
): Promise<{ response: Response | null; cleanup: () => void }> {
|
|
257
|
+
const controller = new AbortController();
|
|
258
|
+
const timeoutId = setTimeout(
|
|
259
|
+
() => controller.abort(new Error('Chat request timed out. Please try again.')),
|
|
260
|
+
STREAM_REQUEST_TIMEOUT_MS,
|
|
261
|
+
);
|
|
262
|
+
const abortFromParent = () => controller.abort(signal?.reason);
|
|
263
|
+
if (signal) {
|
|
264
|
+
if (signal.aborted) { clearTimeout(timeoutId); return { response: null, cleanup: () => {} }; }
|
|
265
|
+
signal.addEventListener('abort', abortFromParent, { once: true });
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// cleanup() clears the connect timeout and removes the abort listener.
|
|
269
|
+
// Callers must call it AFTER streaming completes, not before — otherwise
|
|
270
|
+
// user cancellation during SSE consumption won't abort the fetch.
|
|
271
|
+
const cleanup = () => {
|
|
272
|
+
clearTimeout(timeoutId);
|
|
273
|
+
signal?.removeEventListener('abort', abortFromParent);
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
let response: Response;
|
|
277
|
+
try {
|
|
278
|
+
response = await fetch(url, {
|
|
279
|
+
method: 'POST',
|
|
280
|
+
headers: {
|
|
281
|
+
Authorization: `Bearer ${apiKey}`,
|
|
282
|
+
'Content-Type': 'application/json',
|
|
283
|
+
},
|
|
284
|
+
body: JSON.stringify(body),
|
|
285
|
+
signal: controller.signal,
|
|
286
|
+
});
|
|
287
|
+
} catch (err) {
|
|
288
|
+
cleanup();
|
|
289
|
+
if (signal?.aborted) return { response: null, cleanup: () => {} };
|
|
290
|
+
if (controller.signal.aborted && controller.signal.reason instanceof Error) {
|
|
291
|
+
onError(controller.signal.reason);
|
|
292
|
+
} else {
|
|
293
|
+
onError(err instanceof Error ? err : new Error(String(err)));
|
|
294
|
+
}
|
|
295
|
+
return { response: null, cleanup: () => {} };
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
if (!response.ok) {
|
|
299
|
+
cleanup();
|
|
300
|
+
let detail = `OpenAI error (${response.status})`;
|
|
301
|
+
try {
|
|
302
|
+
const errBody = (await response.json()) as { error?: { message?: string } };
|
|
303
|
+
if (response.status === 401) {
|
|
304
|
+
detail = 'Invalid OpenAI API key. Check your key in the chat panel.';
|
|
305
|
+
} else if (response.status === 429) {
|
|
306
|
+
detail = 'OpenAI rate limit reached. Please wait and try again.';
|
|
307
|
+
} else if (errBody.error?.message) {
|
|
308
|
+
detail = `OpenAI: ${errBody.error.message}`;
|
|
309
|
+
}
|
|
310
|
+
} catch { /* ignore parse failure */ }
|
|
311
|
+
onError(new Error(detail));
|
|
312
|
+
return { response: null, cleanup: () => {} };
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
return { response, cleanup };
|
|
316
|
+
}
|
package/src/lib/llm/types.ts
CHANGED
|
@@ -126,7 +126,15 @@ export interface FileAttachment {
|
|
|
126
126
|
isImage?: boolean;
|
|
127
127
|
}
|
|
128
128
|
|
|
129
|
-
export type ModelTier = 'free' | '
|
|
129
|
+
export type ModelTier = 'free' | 'byok';
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Where requests for this model are routed.
|
|
133
|
+
* - 'proxy': through the server-side proxy (free models)
|
|
134
|
+
* - 'anthropic': direct browser-to-Anthropic API (user's own key)
|
|
135
|
+
* - 'openai': direct browser-to-OpenAI API (user's own key)
|
|
136
|
+
*/
|
|
137
|
+
export type ModelSource = 'proxy' | 'anthropic' | 'openai';
|
|
130
138
|
|
|
131
139
|
/** Relative cost indicator for paid models */
|
|
132
140
|
export type ModelCost = '$' | '$$' | '$$$';
|
|
@@ -136,6 +144,8 @@ export interface LLMModel {
|
|
|
136
144
|
name: string;
|
|
137
145
|
provider: string;
|
|
138
146
|
tier: ModelTier;
|
|
147
|
+
/** Where requests are routed — proxy (free) or direct to provider (BYOK) */
|
|
148
|
+
source: ModelSource;
|
|
139
149
|
contextWindow: number;
|
|
140
150
|
/** Whether this model accepts image inputs in chat content */
|
|
141
151
|
supportsImages: boolean;
|
|
@@ -143,8 +153,10 @@ export interface LLMModel {
|
|
|
143
153
|
supportsFileAttachments: boolean;
|
|
144
154
|
/** Notes shown in model selector */
|
|
145
155
|
notes?: string;
|
|
146
|
-
/** Relative cost indicator (
|
|
156
|
+
/** Relative cost indicator (BYOK models only) */
|
|
147
157
|
cost?: ModelCost;
|
|
158
|
+
/** OpenAI API variant: 'chat' (default) or 'responses' (Codex-style models) */
|
|
159
|
+
openaiApi?: 'chat' | 'responses';
|
|
148
160
|
}
|
|
149
161
|
|
|
150
162
|
export type ChatStatus = 'idle' | 'sending' | 'streaming' | 'error';
|
package/src/main.tsx
CHANGED
|
@@ -8,21 +8,12 @@
|
|
|
8
8
|
|
|
9
9
|
import React from 'react';
|
|
10
10
|
import ReactDOM from 'react-dom/client';
|
|
11
|
-
import { ClerkProvider } from '@clerk/clerk-react';
|
|
12
11
|
import { App } from './App';
|
|
13
12
|
import './index.css';
|
|
14
13
|
import 'maplibre-gl/dist/maplibre-gl.css';
|
|
15
14
|
|
|
16
|
-
const clerkPublishableKey = (import.meta.env.VITE_CLERK_PUBLISHABLE_KEY as string | undefined)?.trim();
|
|
17
|
-
|
|
18
15
|
ReactDOM.createRoot(document.getElementById('root')!).render(
|
|
19
16
|
<React.StrictMode>
|
|
20
|
-
|
|
21
|
-
<ClerkProvider publishableKey={clerkPublishableKey}>
|
|
22
|
-
<App />
|
|
23
|
-
</ClerkProvider>
|
|
24
|
-
) : (
|
|
25
|
-
<App />
|
|
26
|
-
)}
|
|
17
|
+
<App />
|
|
27
18
|
</React.StrictMode>
|
|
28
19
|
);
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
2
|
+
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
3
|
+
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* BYOK (Bring Your Own Key) API key storage.
|
|
7
|
+
*
|
|
8
|
+
* Stores user-provided API keys for Anthropic and OpenAI in localStorage.
|
|
9
|
+
* Keys are sent directly from the browser to the provider APIs — they never
|
|
10
|
+
* pass through our server proxy.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
export interface ApiKeyConfig {
|
|
14
|
+
anthropicKey: string;
|
|
15
|
+
openaiKey: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const STORAGE_KEY = 'ifc-lite:api-keys:v1';
|
|
19
|
+
const CHANGED_EVENT = 'ifc-lite:api-keys-changed';
|
|
20
|
+
|
|
21
|
+
const EMPTY_CONFIG: ApiKeyConfig = {
|
|
22
|
+
anthropicKey: '',
|
|
23
|
+
openaiKey: '',
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
function sanitize(value: unknown): ApiKeyConfig {
|
|
27
|
+
const parsed = value && typeof value === 'object' ? (value as Partial<ApiKeyConfig>) : {};
|
|
28
|
+
return {
|
|
29
|
+
anthropicKey: typeof parsed.anthropicKey === 'string' ? parsed.anthropicKey.trim() : '',
|
|
30
|
+
openaiKey: typeof parsed.openaiKey === 'string' ? parsed.openaiKey.trim() : '',
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function getApiKeys(): ApiKeyConfig {
|
|
35
|
+
try {
|
|
36
|
+
return sanitize(JSON.parse(localStorage.getItem(STORAGE_KEY) ?? '{}'));
|
|
37
|
+
} catch {
|
|
38
|
+
return { ...EMPTY_CONFIG };
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function updateApiKeys(updates: Partial<ApiKeyConfig>): ApiKeyConfig {
|
|
43
|
+
const next = { ...getApiKeys(), ...updates };
|
|
44
|
+
// Trim keys before saving
|
|
45
|
+
next.anthropicKey = next.anthropicKey.trim();
|
|
46
|
+
next.openaiKey = next.openaiKey.trim();
|
|
47
|
+
localStorage.setItem(STORAGE_KEY, JSON.stringify(next));
|
|
48
|
+
window.dispatchEvent(new Event(CHANGED_EVENT));
|
|
49
|
+
return next;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function clearApiKeys(): ApiKeyConfig {
|
|
53
|
+
localStorage.removeItem(STORAGE_KEY);
|
|
54
|
+
window.dispatchEvent(new Event(CHANGED_EVENT));
|
|
55
|
+
return { ...EMPTY_CONFIG };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export function subscribeApiKeys(listener: () => void): () => void {
|
|
59
|
+
window.addEventListener(CHANGED_EVENT, listener);
|
|
60
|
+
return () => window.removeEventListener(CHANGED_EVENT, listener);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function hasAnthropicKey(): boolean {
|
|
64
|
+
return getApiKeys().anthropicKey.length > 0;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export function hasOpenaiKey(): boolean {
|
|
68
|
+
return getApiKeys().openaiKey.length > 0;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function hasAnyApiKey(): boolean {
|
|
72
|
+
return hasAnthropicKey() || hasOpenaiKey();
|
|
73
|
+
}
|
package/src/store/constants.ts
CHANGED
|
@@ -30,6 +30,24 @@ export const SECTION_PLANE_DEFAULTS = {
|
|
|
30
30
|
ENABLED: true,
|
|
31
31
|
/** Default flipped state */
|
|
32
32
|
FLIPPED: false,
|
|
33
|
+
/** Default: render filled/hatched cap surfaces at the cut */
|
|
34
|
+
SHOW_CAP: true,
|
|
35
|
+
/** Default: draw polygon outlines on the cut surfaces */
|
|
36
|
+
SHOW_OUTLINES: true,
|
|
37
|
+
} as const;
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Default cut-surface appearance. RGBA tuples are 0-1 per channel. Screen-space
|
|
41
|
+
* hatch settings are in pixels so the hatch stays readable at any zoom level.
|
|
42
|
+
*/
|
|
43
|
+
export const SECTION_CAP_DEFAULTS = {
|
|
44
|
+
FILL_COLOR: [0.92, 0.88, 0.78, 1.0] as [number, number, number, number], // warm paper
|
|
45
|
+
STROKE_COLOR: [0.10, 0.10, 0.10, 1.0] as [number, number, number, number], // ink
|
|
46
|
+
PATTERN: 'diagonal' as const,
|
|
47
|
+
SPACING_PX: 8,
|
|
48
|
+
ANGLE_RAD: Math.PI / 4,
|
|
49
|
+
WIDTH_PX: 1.0,
|
|
50
|
+
SECONDARY_ANGLE_RAD: -Math.PI / 4,
|
|
33
51
|
} as const;
|
|
34
52
|
|
|
35
53
|
// ============================================================================
|
|
@@ -52,10 +70,10 @@ export const EDGE_LOCK_DEFAULTS = {
|
|
|
52
70
|
// ============================================================================
|
|
53
71
|
|
|
54
72
|
/** Resolve the initial theme: localStorage override > system preference > dark fallback */
|
|
55
|
-
function getInitialTheme(): 'light' | 'dark' {
|
|
73
|
+
function getInitialTheme(): 'light' | 'dark' | 'colorful' {
|
|
56
74
|
if (typeof window === 'undefined') return 'dark';
|
|
57
75
|
const saved = localStorage.getItem('ifc-lite-theme');
|
|
58
|
-
if (saved === 'light' || saved === 'dark') return saved;
|
|
76
|
+
if (saved === 'light' || saved === 'dark' || saved === 'colorful') return saved;
|
|
59
77
|
return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light';
|
|
60
78
|
}
|
|
61
79
|
|
package/src/store/index.ts
CHANGED
|
@@ -43,7 +43,7 @@ import { CAMERA_DEFAULTS, SECTION_PLANE_DEFAULTS, UI_DEFAULTS, TYPE_VISIBILITY_D
|
|
|
43
43
|
export type * from './types.js';
|
|
44
44
|
|
|
45
45
|
// Explicitly re-export multi-model types that need to be imported by name
|
|
46
|
-
export type { EntityRef, SchemaVersion, FederatedModel, MeasurementConstraintEdge, OrthogonalAxis } from './types.js';
|
|
46
|
+
export type { EntityRef, SchemaVersion, FederatedModel, MeasurementConstraintEdge, OrthogonalAxis, SectionCapStyle, SectionCapHatchId, SectionPlane, SectionPlaneAxis } from './types.js';
|
|
47
47
|
|
|
48
48
|
// Re-export utility functions for entity references
|
|
49
49
|
export { entityRefToString, stringToEntityRef, entityRefEquals, isIfcxDataStore } from './types.js';
|
|
@@ -195,12 +195,18 @@ const createViewerStore = () => create<ViewerState>()((...args) => ({
|
|
|
195
195
|
cornerValence: 0,
|
|
196
196
|
},
|
|
197
197
|
|
|
198
|
-
// Section plane
|
|
198
|
+
// Section plane: reset axis/position/enabled/flipped (those are
|
|
199
|
+
// model-relative and meaningless when switching files), but PRESERVE
|
|
200
|
+
// the user's cap appearance preferences (showCap, showOutlines,
|
|
201
|
+
// capStyle). Those round-trip to localStorage via the slice's
|
|
202
|
+
// persistence helpers; clobbering them here was the cause of "my
|
|
203
|
+
// hatch / colour resets to defaults every time I open a file".
|
|
199
204
|
sectionPlane: {
|
|
200
|
-
|
|
205
|
+
...get().sectionPlane,
|
|
206
|
+
axis: SECTION_PLANE_DEFAULTS.AXIS,
|
|
201
207
|
position: SECTION_PLANE_DEFAULTS.POSITION,
|
|
202
|
-
enabled:
|
|
203
|
-
flipped:
|
|
208
|
+
enabled: SECTION_PLANE_DEFAULTS.ENABLED,
|
|
209
|
+
flipped: SECTION_PLANE_DEFAULTS.FLIPPED,
|
|
204
210
|
},
|
|
205
211
|
|
|
206
212
|
// Camera
|
|
@@ -224,6 +230,7 @@ const createViewerStore = () => create<ViewerState>()((...args) => ({
|
|
|
224
230
|
separationLinesRadius: UI_DEFAULTS.SEPARATION_LINES_RADIUS,
|
|
225
231
|
|
|
226
232
|
// Cesium
|
|
233
|
+
cesiumAvailable: false,
|
|
227
234
|
cesiumEnabled: false,
|
|
228
235
|
cesiumTerrainHeight: null,
|
|
229
236
|
cesiumTerrainClamp: false,
|
|
@@ -23,6 +23,8 @@ export type CesiumDataSource =
|
|
|
23
23
|
|
|
24
24
|
export interface CesiumSlice {
|
|
25
25
|
// State
|
|
26
|
+
/** Whether a loaded model (or user mutations) provide enough georeferencing to place in Cesium. */
|
|
27
|
+
cesiumAvailable: boolean;
|
|
26
28
|
cesiumEnabled: boolean;
|
|
27
29
|
cesiumDataSource: CesiumDataSource;
|
|
28
30
|
/** Resolved Cesium ion access token (user override or build-time default). */
|
|
@@ -41,6 +43,7 @@ export interface CesiumSlice {
|
|
|
41
43
|
cesiumGlbLoaded: boolean;
|
|
42
44
|
|
|
43
45
|
// Actions
|
|
46
|
+
setCesiumAvailable: (available: boolean) => void;
|
|
44
47
|
setCesiumEnabled: (enabled: boolean) => void;
|
|
45
48
|
toggleCesium: () => void;
|
|
46
49
|
setCesiumDataSource: (source: CesiumDataSource) => void;
|
|
@@ -93,6 +96,7 @@ function resolveIonToken(): string {
|
|
|
93
96
|
}
|
|
94
97
|
|
|
95
98
|
export const createCesiumSlice: StateCreator<CesiumSlice, [], [], CesiumSlice> = (set) => ({
|
|
99
|
+
cesiumAvailable: false,
|
|
96
100
|
cesiumEnabled: false,
|
|
97
101
|
cesiumDataSource: loadDataSource(),
|
|
98
102
|
cesiumIonToken: resolveIonToken(),
|
|
@@ -103,6 +107,7 @@ export const createCesiumSlice: StateCreator<CesiumSlice, [], [], CesiumSlice> =
|
|
|
103
107
|
cesiumTerrainClipY: null,
|
|
104
108
|
cesiumGlbLoaded: false,
|
|
105
109
|
|
|
110
|
+
setCesiumAvailable: (available) => set({ cesiumAvailable: available }),
|
|
106
111
|
setCesiumEnabled: (enabled) => set({ cesiumEnabled: enabled }),
|
|
107
112
|
toggleCesium: () => set((s) => ({ cesiumEnabled: !s.cesiumEnabled })),
|
|
108
113
|
setCesiumDataSource: (source) => {
|
|
@@ -8,7 +8,7 @@ import { buildErrorFeedbackContent } from './chatSlice.js';
|
|
|
8
8
|
import { create } from 'zustand';
|
|
9
9
|
import { createChatSlice, type ChatSlice } from './chatSlice.js';
|
|
10
10
|
import { createPatchDiagnostic, createPreflightDiagnostic } from '../../lib/llm/script-diagnostics.js';
|
|
11
|
-
import { DEFAULT_FREE_MODEL,
|
|
11
|
+
import { DEFAULT_FREE_MODEL, DEFAULT_BYOK_MODEL } from '../../lib/llm/models.js';
|
|
12
12
|
|
|
13
13
|
function withMockLocalStorage(fn: () => void) {
|
|
14
14
|
const original = globalThis.localStorage;
|
|
@@ -217,52 +217,14 @@ test('clearChatMessages resets streaming state as well as persisted messages', (
|
|
|
217
217
|
assert.deepEqual(useChatStore.getState().chatAttachments, []);
|
|
218
218
|
});
|
|
219
219
|
|
|
220
|
-
test('
|
|
221
|
-
withMockLocalStorage(() => {
|
|
222
|
-
globalThis.localStorage.setItem('ifc-lite-chat-model:user-a', DEFAULT_PRO_MODEL.id);
|
|
223
|
-
globalThis.localStorage.setItem('ifc-lite-chat-messages:user-a', JSON.stringify([
|
|
224
|
-
{
|
|
225
|
-
id: 'persisted-a',
|
|
226
|
-
role: 'user',
|
|
227
|
-
content: 'hello from A',
|
|
228
|
-
createdAt: 1,
|
|
229
|
-
},
|
|
230
|
-
]));
|
|
231
|
-
globalThis.localStorage.setItem('ifc-lite-chat-model:user-b', DEFAULT_FREE_MODEL.id);
|
|
232
|
-
globalThis.localStorage.setItem('ifc-lite-chat-messages:user-b', JSON.stringify([
|
|
233
|
-
{
|
|
234
|
-
id: 'persisted-b',
|
|
235
|
-
role: 'assistant',
|
|
236
|
-
content: 'hello from B',
|
|
237
|
-
createdAt: 2,
|
|
238
|
-
},
|
|
239
|
-
]));
|
|
240
|
-
|
|
241
|
-
const useChatStore = create<ChatSlice>()((...args) => createChatSlice(...args));
|
|
242
|
-
useChatStore.getState().switchChatUserContext('user-a', true, { restoreMessages: true });
|
|
243
|
-
|
|
244
|
-
assert.equal(useChatStore.getState().chatActiveModel, DEFAULT_PRO_MODEL.id);
|
|
245
|
-
assert.equal(useChatStore.getState().chatMessages[0]?.id, 'persisted-a');
|
|
246
|
-
|
|
247
|
-
useChatStore.getState().switchChatUserContext('user-b', false, {
|
|
248
|
-
clearPersistedCurrent: true,
|
|
249
|
-
restoreMessages: true,
|
|
250
|
-
});
|
|
251
|
-
|
|
252
|
-
assert.equal(useChatStore.getState().chatActiveModel, DEFAULT_FREE_MODEL.id);
|
|
253
|
-
assert.equal(useChatStore.getState().chatMessages[0]?.id, 'persisted-b');
|
|
254
|
-
assert.equal(globalThis.localStorage.getItem('ifc-lite-chat-messages:user-a'), null);
|
|
255
|
-
});
|
|
256
|
-
});
|
|
257
|
-
|
|
258
|
-
test('setChatHasPro falls back to a free model when entitlement is removed', () => {
|
|
220
|
+
test('setChatHasByokKey falls back to a free model when keys are removed', () => {
|
|
259
221
|
const useChatStore = create<ChatSlice>()((...args) => createChatSlice(...args));
|
|
260
|
-
useChatStore.getState().
|
|
261
|
-
useChatStore.getState().setChatActiveModel(
|
|
222
|
+
useChatStore.getState().setChatHasByokKey(true);
|
|
223
|
+
useChatStore.getState().setChatActiveModel(DEFAULT_BYOK_MODEL.id);
|
|
262
224
|
|
|
263
|
-
useChatStore.getState().
|
|
225
|
+
useChatStore.getState().setChatHasByokKey(false);
|
|
264
226
|
|
|
265
|
-
assert.equal(useChatStore.getState().
|
|
227
|
+
assert.equal(useChatStore.getState().chatHasByokKey, false);
|
|
266
228
|
assert.equal(useChatStore.getState().chatActiveModel, DEFAULT_FREE_MODEL.id);
|
|
267
229
|
});
|
|
268
230
|
|
|
@@ -291,35 +253,3 @@ test('removeChatAttachment only removes the targeted attachment id', () => {
|
|
|
291
253
|
);
|
|
292
254
|
});
|
|
293
255
|
|
|
294
|
-
test('switchChatUserContext ignores malformed persisted messages', () => {
|
|
295
|
-
withMockLocalStorage(() => {
|
|
296
|
-
globalThis.localStorage.setItem('ifc-lite-chat-messages:user-a', JSON.stringify([
|
|
297
|
-
{
|
|
298
|
-
id: 'valid',
|
|
299
|
-
role: 'user',
|
|
300
|
-
content: 'hello',
|
|
301
|
-
createdAt: 1,
|
|
302
|
-
attachments: [
|
|
303
|
-
{ id: 'att-1', name: 'ok.csv', type: 'text/csv', size: 20, textContent: 'a,b\n1,2' },
|
|
304
|
-
{ name: 'missing-id.csv', type: 'text/csv', size: 20 },
|
|
305
|
-
],
|
|
306
|
-
},
|
|
307
|
-
{
|
|
308
|
-
id: 123,
|
|
309
|
-
role: 'assistant',
|
|
310
|
-
content: 'bad',
|
|
311
|
-
createdAt: 2,
|
|
312
|
-
},
|
|
313
|
-
]));
|
|
314
|
-
|
|
315
|
-
const useChatStore = create<ChatSlice>()((...args) => createChatSlice(...args));
|
|
316
|
-
useChatStore.getState().switchChatUserContext('user-a', false, { restoreMessages: true });
|
|
317
|
-
|
|
318
|
-
assert.equal(useChatStore.getState().chatMessages.length, 1);
|
|
319
|
-
assert.equal(useChatStore.getState().chatMessages[0]?.id, 'valid');
|
|
320
|
-
assert.deepEqual(
|
|
321
|
-
useChatStore.getState().chatMessages[0]?.attachments?.map((attachment) => attachment.id),
|
|
322
|
-
['att-1'],
|
|
323
|
-
);
|
|
324
|
-
});
|
|
325
|
-
});
|