create-wirejs-app 2.0.168-llm → 2.0.170

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "create-wirejs-app",
3
- "version": "2.0.168-llm",
3
+ "version": "2.0.170",
4
4
  "description": "Initializes a wirejs package.",
5
5
  "author": "Jon Wire",
6
6
  "license": "MIT",
@@ -5,7 +5,7 @@ import {
5
5
  withContext,
6
6
  } from "wirejs-resources";
7
7
 
8
- const permanentAdmins = ['admin'];
8
+ const permanentAdmins = ['admin', 'iambipedal@gmail.com'];
9
9
 
10
10
  export type Setting = {
11
11
  key: string;
@@ -0,0 +1,63 @@
1
+ import {
2
+ AuthenticationApi,
3
+ BackgroundJob,
4
+ withContext
5
+ } from "wirejs-resources";
6
+ import { tooledHandler } from "./tooled-handler.js";
7
+ import { Infra } from "./infra.js";
8
+ export type { Chunk, ChunkData, Conversation, ConversationMessage, Role } from './types.js';
9
+
10
+ export const LLM = (auth: AuthenticationApi) => {
11
+ const infra = new Infra('app', 'llm', {
12
+ models: ['mistral-nemo', 'llama3.2', 'llama3:8b', 'llama2'],
13
+ systemPrompt: `You are a helpful assistant.`,
14
+ });
15
+
16
+ const chatRunner = new BackgroundJob('app', 'chatRunner', {
17
+ handler: tooledHandler(infra)
18
+ });
19
+
20
+ return withContext(context => ({
21
+ async send(room: string, message: string) {
22
+ const user = await auth.requireCurrentUser(context);
23
+ await infra.assertUserIsAuthorized(user, room);
24
+ if (!room || !message || !message.trim()) {
25
+ throw new Error('Room and message are required');
26
+ }
27
+ await chatRunner.start(room, message.trim());
28
+ },
29
+ async getRoom(room: string) {
30
+ const user = await auth.requireCurrentUser(context);
31
+ await infra.assertUserIsAuthorized(user, room);
32
+ return infra.getStream(context, room);
33
+ },
34
+ async getHistory(room: string) {
35
+ const user = await auth.requireCurrentUser(context);
36
+ await infra.assertUserIsAuthorized(user, room);
37
+ const history = await infra.getHistory(room);
38
+ return history.filter(m => [
39
+ 'assistant', 'user', 'step'
40
+ ].includes(m.role) && !!m.content);
41
+ },
42
+ async getMessage(room: string, mid: number) {
43
+ const user = await auth.requireCurrentUser(context);
44
+ await infra.assertUserIsAuthorized(user, room);
45
+ return infra.getMessage(room, mid);
46
+ },
47
+ async createRoom() {
48
+ const user = await auth.requireCurrentUser(context);
49
+ const conversation = await infra.createConversation(user);
50
+ return conversation.conversationId;
51
+ },
52
+ async getConversations() {
53
+ const user = await auth.requireCurrentUser(context);
54
+ return infra.listUserConversations(user);
55
+ },
56
+ async deleteConversation(conversationId: string) {
57
+ const user = await auth.requireCurrentUser(context);
58
+ await infra.assertUserIsAuthorized(user, conversationId);
59
+ await infra.deleteConversation(conversationId)
60
+ return { success: true };
61
+ }
62
+ }))
63
+ };
@@ -0,0 +1,322 @@
1
+ import { randomUUID } from 'crypto';
2
+ import {
3
+ AssistantMessage,
4
+ Context,
5
+ DistributedTable,
6
+ LLM as LLMService,
7
+ LLMChunk,
8
+ LLMMessage,
9
+ PassThruParser,
10
+ RealtimeService,
11
+ Resource,
12
+ Setting,
13
+ User,
14
+ ToolDefinition,
15
+ } from "wirejs-resources";
16
+ import { fromAsync, pad } from "./utils.js";
17
+ import { Chunk, ChunkData, Conversation, ConversationMessage, WorkflowStep } from "./types.js";
18
+
19
+ const DEFAULT_MODELS_LIST = ['mistral-nemo', 'llama3.2', 'llama3:8b', 'llama2'];
20
+
21
+ export type PromptOptions = {
22
+ systemPromptOverride?: string;
23
+ history: LLMMessage[];
24
+ tools?: ToolDefinition[];
25
+ } | {
26
+ systemPromptOverride?: string;
27
+ prompt: string;
28
+ tools?: ToolDefinition[];
29
+ };
30
+
31
+ export type RespondOptions = {
32
+ conversationId: string;
33
+ history: LLMMessage[];
34
+ systemPromptOverride?: string;
35
+ tools?: ToolDefinition[],
36
+ } | {
37
+ conversationId: string;
38
+ mid: number;
39
+ prompt: string;
40
+ systemPromptOverride?: string;
41
+ tools?: ToolDefinition[],
42
+ };
43
+
44
+ export type InfraOptions = {
45
+ models?: string[];
46
+ systemPrompt?: string;
47
+ tools?: ToolDefinition[],
48
+ }
49
+
50
+ export class Infra extends Resource {
51
+ private conversations: ReturnType<typeof makeConversationsTable>;
52
+ private messages: ReturnType<typeof makeMessagesTable>;
53
+ private realtime: ReturnType<typeof makeRealtimeService>;
54
+ private llm: ReturnType<typeof makeLLMService>;
55
+ private modelSetting: ReturnType<typeof makeModelsOverrideSetting>;
56
+ private tools?: ToolDefinition[];
57
+
58
+ constructor(scope: string | Resource, id: string, options?: InfraOptions) {
59
+ super(scope, id);
60
+ this.conversations = makeConversationsTable(this);
61
+ this.messages = makeMessagesTable(this);
62
+ this.realtime = makeRealtimeService(this);
63
+ this.llm = makeLLMService(this, options?.systemPrompt, options?.models);
64
+ this.modelSetting = makeModelsOverrideSetting(this, options?.models);
65
+ }
66
+
67
+ async prompt(options: PromptOptions): Promise<AssistantMessage> {
68
+ // TODO: debounce and/or redesign model settings relationship
69
+ const models = (await this.modelSetting.read()).split(',').map(s => s.trim());
70
+ const tools = options.tools ?? this.tools;
71
+
72
+ console.log('prompt', (options as any).prompt);
73
+
74
+ return this.llm.continueConversation({
75
+ systemPrompt: options.systemPromptOverride,
76
+ history: 'history' in options ? options.history : [{
77
+ role: 'user',
78
+ content: options.prompt
79
+ }],
80
+ ...(models.length > 0 ? { models } : {}),
81
+ ...(tools ? { tools } : {}),
82
+ });
83
+ }
84
+
85
+ async respond(options: RespondOptions): Promise<ConversationMessage & AssistantMessage> {
86
+ // TODO: debounce and/or redesign model settings relationship
87
+ const models = (await this.modelSetting.read()).split(',').map(s => s.trim());
88
+
89
+ const mid = 'mid' in options ? options.mid : options.history.length;
90
+
91
+ // responses are not stream directly because they're unnecessarily frequent.
92
+ // we slow this down, send messages in batches to reduce unnecessary cost.
93
+ let seq = 0;
94
+ let batch: string[] = [];
95
+ let lastBatch = new Date().getTime();
96
+
97
+ const onChunk = options.conversationId ?
98
+ (async (chunk: LLMChunk) => {
99
+ if (chunk.message.role !== 'assistant') return;
100
+ if ((chunk.message.tool_calls?.length ?? 0) > 0) return;
101
+ if (chunk.message.content === '') return;
102
+ batch.push(chunk.message.content);
103
+ if (new Date().getTime() - lastBatch > 150) {
104
+ const text = batch.join('');
105
+ batch = [];
106
+ await this.realtime.publish(options.conversationId, [{
107
+ mid,
108
+ seq: seq++,
109
+ pad: pad(),
110
+ data: { type: 'text', text, role: 'assistant' }
111
+ }]);
112
+ lastBatch = new Date().getTime();
113
+ }
114
+ }) : undefined
115
+ ;
116
+
117
+ console.log(
118
+ 'continue conversation',
119
+ options.systemPromptOverride,
120
+ (options as any).prompt
121
+ );
122
+
123
+ const tools = options.tools ?? this.tools;
124
+
125
+ const result = await this.llm.continueConversation({
126
+ systemPrompt: options.systemPromptOverride,
127
+ history: 'history' in options ? options.history : [{
128
+ role: 'user',
129
+ content: options.prompt
130
+ }],
131
+ onChunk,
132
+ ...(models.length > 0 ? { models } : {}),
133
+ ...(tools ? { tools } : {}),
134
+ });
135
+
136
+
137
+ if (batch.length > 0) {
138
+ const text = batch.join('');
139
+ await this.realtime.publish(options.conversationId, [{
140
+ mid,
141
+ seq: seq++,
142
+ pad: pad(),
143
+ data: { type: 'text', text, role: 'assistant' }
144
+ }]);
145
+ }
146
+
147
+ return this.addMessage(options.conversationId, mid, result);
148
+ }
149
+
150
+ async createConversation(user: User): Promise<Conversation> {
151
+ const createdAt = Date.now();
152
+ const timestamp = new Date().toLocaleString();
153
+ const name = `Conversation ${timestamp}`;
154
+ const userId = user.id;
155
+
156
+ for (let i = 0; i < 10; i++) {
157
+ const conversationId = randomUUID();
158
+ const conversation = {
159
+ conversationId, userId, createdAt, name
160
+ } satisfies Conversation;
161
+ await this.conversations.save(conversation, { onlyIfNotExists: true });
162
+ return conversation;
163
+ }
164
+
165
+ throw new Error("Could not create a unique conversation ID!");
166
+ }
167
+
168
+ async updateConversationName(conversationId: string, name: string): Promise<void> {
169
+ const conversation = await this.getConversation(conversationId);
170
+ if (!conversation) throw new Error("Conversation doesn't exist.");
171
+ conversation.name = name;
172
+ await this.conversations.save(conversation);
173
+ await this.sendControlMessage(conversationId, {
174
+ type: 'title',
175
+ value: name
176
+ });
177
+ }
178
+
179
+ async getConversation(conversationId: string): Promise<Conversation | undefined> {
180
+ return this.conversations.get({ conversationId });
181
+ }
182
+
183
+ async listUserConversations(user: User): Promise<Conversation[]> {
184
+ const conversationsGen = this.conversations.query({
185
+ by: 'userId-createdAt',
186
+ where: { userId: { eq: user.id } }
187
+ });
188
+ const conversations: Conversation[] = await fromAsync(conversationsGen);
189
+ return conversations.sort((a, b) => b.createdAt - a.createdAt);
190
+ }
191
+
192
+ async assertUserIsAuthorized(
193
+ user: User,
194
+ conversation: string | Conversation | undefined
195
+ ) : Promise<void> {
196
+ if (typeof conversation === 'string') {
197
+ const record = await this.getConversation(conversation);
198
+ return this.assertUserIsAuthorized(user, record);
199
+ } else if (conversation?.userId !== user.id) {
200
+ throw new Error("Not authorized");
201
+ }
202
+ }
203
+
204
+ async deleteConversation(conversationId: string): Promise<void> {
205
+ // start with the header. if needs be, individual messages can be cleaned up
206
+ // later. whereas if we start with the messages and are interrupted, we'd just
207
+ // but corrupting a conversation.
208
+ await this.conversations.delete({ conversationId });
209
+
210
+ const messagesGen = this.messages.query({
211
+ by: 'conversationId-mid',
212
+ where: { conversationId: { eq: conversationId } }
213
+ });
214
+ const messagesToDelete = await fromAsync(messagesGen);
215
+ await Promise.all(messagesToDelete.map(msg => this.messages.delete(msg)));
216
+ }
217
+
218
+ async getHistory(conversationId: string): Promise<ConversationMessage[]> {
219
+ const storedMessages = this.messages.query({
220
+ by: 'conversationId-mid',
221
+ where: { conversationId: { eq: conversationId } }
222
+ });
223
+
224
+ // Convert async generator to array and sort by mid
225
+ const messagesArray = await fromAsync(storedMessages);
226
+ messagesArray.sort((a, b) => a.mid - b.mid);
227
+ return messagesArray;
228
+ };
229
+
230
+ async getMessage(
231
+ conversationId: string,
232
+ mid: number
233
+ ): Promise<ConversationMessage | undefined> {
234
+ return this.messages.get({ conversationId, mid });
235
+ }
236
+
237
+ async addMessage<T extends LLMMessage | WorkflowStep>(
238
+ conversationId: string,
239
+ mid: number,
240
+ message: T,
241
+ broadcast: boolean = false
242
+ ): Promise<ConversationMessage & T> {
243
+ const fullMessage: ConversationMessage = {
244
+ conversationId,
245
+ mid,
246
+ ...message,
247
+ createdAt: Date.now()
248
+ };
249
+
250
+ await this.messages.save(fullMessage, { onlyIfNotExists: true });
251
+
252
+ if (broadcast) {
253
+ await this.realtime.publish(conversationId, [{
254
+ mid,
255
+ seq: 0,
256
+ pad: pad(),
257
+ data: {
258
+ type: 'text',
259
+ text: message.content,
260
+ role: message.role
261
+ }
262
+ }]);
263
+ }
264
+
265
+ return fullMessage;
266
+ };
267
+
268
+ getStream(context: Context, conversationId: string) {
269
+ return this.realtime.getStream(context, conversationId);
270
+ }
271
+
272
+ async sendControlMessage(conversationId: string, data: ChunkData, mid: number = -1): Promise<void> {
273
+ await this.realtime.publish(conversationId, [{
274
+ mid,
275
+ seq: 0,
276
+ pad: pad(),
277
+ data
278
+ }]);
279
+ }
280
+ }
281
+
282
+ const makeRealtimeService = (scope: Resource) => new RealtimeService<Chunk>(scope, 'realtime');
283
+
284
+ const makeConversationsTable = (scope: Resource) => new DistributedTable(
285
+ scope,
286
+ 'conversations',
287
+ {
288
+ parse: PassThruParser<Conversation>,
289
+ key: {
290
+ partition: { field: 'conversationId', type: 'string' },
291
+ },
292
+ indexes: [
293
+ {
294
+ partition: { field: 'userId', type: 'string' },
295
+ sort: { field: 'createdAt', type: 'number' }
296
+ }
297
+ ],
298
+ }
299
+ );
300
+
301
+ const makeMessagesTable = (scope: Resource) => new DistributedTable(
302
+ scope,
303
+ 'messages',
304
+ {
305
+ parse: PassThruParser<ConversationMessage>,
306
+ key: {
307
+ partition: { field: 'conversationId', type: 'string' },
308
+ sort: { field: 'mid', type: 'number' }
309
+ }
310
+ }
311
+ );
312
+
313
+ const makeLLMService = (scope: Resource, systemPrompt?: string, models?: string[]) =>
314
+ new LLMService(scope, 'llm', {
315
+ models: models ?? DEFAULT_MODELS_LIST,
316
+ systemPrompt
317
+ });
318
+
319
+ const makeModelsOverrideSetting = (scope: Resource, models?: string[]) => new Setting(scope, 'models', {
320
+ private: false,
321
+ init: () => (models ?? DEFAULT_MODELS_LIST).join(', ')
322
+ });
@@ -0,0 +1,21 @@
1
+ import { dedent } from "./utils.js";
2
+
3
+ export const generateConversationTitle = (message: string) => dedent`
4
+ You generate short, descriptive conversation titles based on the user's initial message.
5
+
6
+ Rules:
7
+ - Return ONLY the title text, nothing else
8
+ - 3-6 words maximum
9
+ - Capture the main topic or question
10
+ - No quotes, no explanations
11
+
12
+ Examples:
13
+ - User asks about weather -> "Weather Information Request"
14
+ - User asks to explain quantum physics -> "Quantum Physics Explanation"
15
+ - User asks for recipe help -> "Recipe Assistance"
16
+ - User asks about programming -> "Programming Question"
17
+
18
+ Here is the message:
19
+
20
+ ${message}
21
+ `;
@@ -0,0 +1,116 @@
1
+ import type { ToolCall } from 'wirejs-resources';
2
+ import { Infra } from './infra.js'
3
+ import { cleanTitle } from './utils.js';
4
+ import { generateConversationTitle } from './prompts.js';
5
+ import { standard } from './tools.js';
6
+
7
+ const assignConversationName = async (infra: Infra, conversationId: string, message: string) => {
8
+ const titleResponse = await infra.prompt({
9
+ prompt: generateConversationTitle(message),
10
+ });
11
+ const name = cleanTitle(titleResponse.content);
12
+ console.log('name assigned', name, titleResponse);
13
+ await infra.updateConversationName(conversationId, name);
14
+ }
15
+
16
+ export const tooledHandler = (infra: Infra) => async (
17
+ room: string,
18
+ newUserMessage: string,
19
+ ) => {
20
+ try {
21
+ // we can let the user know we're doing stuff immediately.
22
+ // respectful clients should lock the UI.
23
+ // TODO: log status events so that newly connecting clients
24
+ // also know when the UI should be locked/unlocked.
25
+ await infra.sendControlMessage(room, { type: 'start' });
26
+
27
+ // now, we can get and save state
28
+ const history = await infra.getHistory(room);
29
+ let mid = history.length;
30
+ history.push(await infra.addMessage(room, mid++, {
31
+ role: 'user', content: newUserMessage
32
+ }));
33
+
34
+ // just getting started and need a conversation title.
35
+ if (mid === 1) {
36
+ await assignConversationName(infra, room, newUserMessage);
37
+ }
38
+
39
+ let maxLoops = 10;
40
+ let toolCalls: ToolCall[];
41
+ let responseMid = 0;
42
+ do {
43
+ const tools = maxLoops > 0 ? standard : undefined;
44
+
45
+ // lets connected clients know we're working on a response now.
46
+ await infra.sendControlMessage(room, {
47
+ type: 'status',
48
+ status: `📝 Responding ...`
49
+ });
50
+
51
+ const response = await infra.respond({
52
+ conversationId: room,
53
+ history: history.filter(h => h.role !== 'step'),
54
+ tools,
55
+ mid: mid++
56
+ });
57
+ responseMid = response.mid;
58
+ history.push(response);
59
+
60
+ toolCalls = tools && tools.length > 0 ? response.tool_calls ?? [] : [];
61
+ for (const call of toolCalls) {
62
+ const name = call.function.name;
63
+ const args = call.function.arguments;
64
+ const callString = `${name}(${JSON.stringify(args)})`;
65
+
66
+ // lets connected clients know we're doing a thing
67
+ infra.sendControlMessage(room, {
68
+ type: 'status',
69
+ status: `⚒️ Calling ${callString}`.slice(0, 100) + '...'
70
+ }, mid);
71
+
72
+ try {
73
+ const t = tools!.find(t => t.name === name);
74
+ if (!t) throw new Error(`${name} does not exist.`);
75
+ const r = await t.execute(args);
76
+
77
+ // let's future and connected clients know we did a thing
78
+ history.push(await infra.addMessage(room, mid++, {
79
+ role: 'step',
80
+ content: `⚒️ Called ${callString}`.slice(0, 100) + '...',
81
+ }, true));
82
+
83
+ // makes tool results visible in the conversation history
84
+ // to the agent.
85
+ history.push(await infra.addMessage(room, mid++, {
86
+ role: 'tool',
87
+ tool_name: name,
88
+ tool_call_id: call.id || JSON.stringify([name, args]),
89
+ content: JSON.stringify(r, null, 2),
90
+ }));
91
+ } catch (error) {
92
+ history.push(await infra.addMessage(room, mid++, {
93
+ role: 'tool',
94
+ tool_name: name,
95
+ tool_call_id: call.id || JSON.stringify([name, args]),
96
+ content: String(error),
97
+ }))
98
+ }
99
+ }
100
+
101
+ maxLoops--;
102
+ } while (toolCalls.length > 0);
103
+
104
+ // tell connected clients we're done responding (unlock the UI).
105
+ await infra.sendControlMessage(room, { type: 'end' }, responseMid);
106
+
107
+ } catch (error) {
108
+ console.error('=== LLM Error ===');
109
+ console.error('LLM call failed:', error);
110
+
111
+ await infra.sendControlMessage(room, {
112
+ type: 'status',
113
+ status: 'Internal Error. Please try again.'
114
+ });
115
+ }
116
+ };
@@ -0,0 +1,150 @@
1
+ import { dedent, extractContentFromHtml } from "./utils.js";
2
+ import type { ToolDefinition } from "./types.js";
3
+ import { JSDOM } from "jsdom";
4
+
5
+ const rawFetch = async (url: string) => {
6
+ try {
7
+ const parsedUrl = new URL(
8
+ ['http://', 'https://'].some(p => url.startsWith(p)) ? url : `https://${url}`
9
+ );
10
+
11
+ // Use the same fetch logic as httpGet but optimized for analysis
12
+ const controller = new AbortController();
13
+ const timeoutId = setTimeout(() => {
14
+ console.log(`Fetch timeout reached for: ${url}`);
15
+ controller.abort();
16
+ }, 20000); // 20 second timeout for analysis
17
+
18
+ console.log(`Fetching content from: ${url}`);
19
+ const request = await fetch(parsedUrl, {
20
+ signal: controller.signal,
21
+ headers: {
22
+ 'User-Agent': 'Mozilla/5.0 (compatible; WireJS-Analyzer/1.0)',
23
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
24
+ }
25
+ });
26
+
27
+ clearTimeout(timeoutId);
28
+
29
+ if (!request.ok) {
30
+ throw new Error(`HTTP ${request.status}: ${request.statusText}`);
31
+ }
32
+
33
+ const body = await request.text();
34
+ console.log(`Fetched ${body.length} characters from: ${url}`);
35
+
36
+ return body;
37
+
38
+ } catch (error) {
39
+ console.error(`Error fetching ${url}:`, error);
40
+ if (error instanceof Error && error.name === 'AbortError') {
41
+ throw new Error(`Fetch timeout after 20 seconds for: ${url}`);
42
+ }
43
+ throw error;
44
+ }
45
+ }
46
+
47
+ /**
48
+ * Standard tool definitions include searching and fetching web content.
49
+ */
50
+ export const standard: ToolDefinition[] = [
51
+ {
52
+ name: 'fetch',
53
+ description: dedent`
54
+ Fetches raw content from an HTTP(S) URL via a GET request.
55
+
56
+ Use fetch ONLY when the URL is known and expected to contain
57
+ "raw data" like CSV, JSON, XML, YML, or plain text.
58
+ `,
59
+ parameters: {
60
+ type: 'object',
61
+ properties: {
62
+ url: {
63
+ type: 'string',
64
+ description: "Fully qualified URL string to fetch."
65
+ }
66
+ }
67
+ },
68
+ async execute({ url } : { url: string }) {
69
+ console.log(`[fetch] Received request for: ${url}`);
70
+ return rawFetch(url);
71
+ }
72
+ },
73
+ {
74
+ name: 'fetch_html_content_text',
75
+ description: dedent`
76
+ Extracts the text content from HTML at the given URL.
77
+
78
+ Use fetch_html_content_text ONLY when the URL is known and expected
79
+ to be an HTML page with meaningful content to extract.
80
+ `,
81
+ parameters: {
82
+ type: 'object',
83
+ properties: {
84
+ url: {
85
+ type: 'string',
86
+ description: "Fully qualified URL string to fetch."
87
+ }
88
+ }
89
+ },
90
+ async execute({ url } : { url: string }) {
91
+ console.log(`[natural language fetch] Received request for: ${url}`);
92
+ const content = extractContentFromHtml(await rawFetch(url));
93
+ console.log(`Extracted content:\n${content}`);
94
+ return content;
95
+ }
96
+ },
97
+ {
98
+ name: 'web_search',
99
+ description: dedent`
100
+ Searches the web using DuckDuckGo.
101
+
102
+ Use web_search ONLY when user intent indicates a need for information beyond
103
+ your training or if the subject matter is temporally sensitive AND when a
104
+ web search is likely to provide the URL you need for a future fetch or
105
+ fetch_html_content_text call.
106
+ `,
107
+ parameters: {
108
+ type: 'object',
109
+ properties: {
110
+ query: {
111
+ type: 'string',
112
+ description: 'Search text to use for searching the web. Supports DuckDuckGo search syntax.'
113
+ }
114
+ }
115
+ },
116
+ async execute({ query } : { query: string }) {
117
+ console.log(`[searching] query: ${query}`);
118
+ const rawHtml = await rawFetch(`https://html.duckduckgo.com/html/?q=${query}`);
119
+ return parseDuckDuckGoResults(rawHtml);
120
+ }
121
+ }
122
+ ];
123
+
124
+ const parseDuckDuckGoResults = (html: string) : string => {
125
+ const results: Array<{ url: string; title: string; description: string }> = [];
126
+ const dom = new JSDOM(html);
127
+ const doc = dom.window.document;
128
+
129
+ const resultElements = doc.querySelectorAll('.result');
130
+
131
+ resultElements.forEach((element) => {
132
+ const titleEl = element.querySelector('.result__a');
133
+ const snippetEl = element.querySelector('.result__snippet');
134
+ const linkEl = element.querySelector('a.result__a') as HTMLAnchorElement;
135
+
136
+ if (titleEl && snippetEl && linkEl) {
137
+ const url = new URL(linkEl.getAttribute('href') || '', 'https://html.duckduckgo.com');
138
+ const uddgParam = url.searchParams.get('uddg');
139
+ const decodedUrl = uddgParam ? decodeURIComponent(uddgParam) : '';
140
+
141
+ results.push({
142
+ url: decodedUrl,
143
+ title: titleEl.textContent?.trim() || '',
144
+ description: snippetEl.textContent?.trim() || ''
145
+ });
146
+ }
147
+ });
148
+
149
+ return JSON.stringify(results, null, 2);
150
+ };