oblien 1.1.0 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +377 -494
- package/agents.js +14 -0
- package/icons.js +11 -0
- package/index.js +16 -0
- package/package.json +9 -2
- package/sandbox.js +12 -0
- package/search.js +11 -0
- package/src/agents/agent.js +229 -0
- package/src/agents/index.js +212 -0
- package/src/agents/settings.js +100 -0
- package/src/agents/tools.js +155 -0
- package/src/icons/index.js +185 -0
- package/src/sandbox/index.js +185 -0
- package/src/sandbox/sandbox.js +124 -0
- package/src/search/index.js +191 -0
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Search Module
|
|
3
|
+
* Web search, content extraction, and research crawling
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export class OblienSearch {
|
|
7
|
+
/**
|
|
8
|
+
* @param {import('../client.js').OblienClient} client - Oblien client instance
|
|
9
|
+
*/
|
|
10
|
+
constructor(client) {
|
|
11
|
+
if (!client) {
|
|
12
|
+
throw new Error('Oblien client is required');
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
this.client = client;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Search the web with multiple queries
|
|
20
|
+
* @param {Object} options - Search options
|
|
21
|
+
* @param {Array<string>} options.queries - Array of search queries (required)
|
|
22
|
+
* @param {boolean} [options.includeAnswers=false] - Include AI-generated answers
|
|
23
|
+
* @param {Object} [options.options] - Additional search options
|
|
24
|
+
* @param {number} [options.options.maxResults] - Max results per query
|
|
25
|
+
* @param {string} [options.options.region] - Search region
|
|
26
|
+
* @param {string} [options.options.timeRange] - Time range filter
|
|
27
|
+
* @returns {Promise<Array>} Search results
|
|
28
|
+
*/
|
|
29
|
+
async search(options) {
|
|
30
|
+
const { queries, includeAnswers = false, options: searchOptions = {} } = options;
|
|
31
|
+
|
|
32
|
+
if (!queries || !Array.isArray(queries) || queries.length === 0) {
|
|
33
|
+
throw new Error('Queries array is required and must not be empty');
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const response = await this.client.post('search', {
|
|
37
|
+
queries,
|
|
38
|
+
includeAnswers,
|
|
39
|
+
options: searchOptions
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
return response;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Extract and summarize content from web pages
|
|
47
|
+
* @param {Object} options - Extract options
|
|
48
|
+
* @param {Array<Object>} options.pages - Array of pages to extract (required)
|
|
49
|
+
* @param {string} options.pages[].url - Page URL (required)
|
|
50
|
+
* @param {Array<string>} options.pages[].details - Details to extract (required)
|
|
51
|
+
* @param {string} [options.pages[].summaryLevel='medium'] - Summary level: 'brief' | 'medium' | 'detailed'
|
|
52
|
+
* @param {Object} [options.options] - Additional extraction options
|
|
53
|
+
* @returns {Promise<Object>} Extracted content
|
|
54
|
+
*/
|
|
55
|
+
async extract(options) {
|
|
56
|
+
const { pages, options: extractOptions = {} } = options;
|
|
57
|
+
|
|
58
|
+
if (!pages || !Array.isArray(pages) || pages.length === 0) {
|
|
59
|
+
throw new Error('Pages array is required and must not be empty');
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Validate pages
|
|
63
|
+
pages.forEach((page, index) => {
|
|
64
|
+
if (!page.url) {
|
|
65
|
+
throw new Error(`Page at index ${index} is missing 'url'`);
|
|
66
|
+
}
|
|
67
|
+
if (!page.details || !Array.isArray(page.details) || page.details.length === 0) {
|
|
68
|
+
throw new Error(`Page at index ${index} is missing 'details' array`);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
const response = await this.client.post('search/extract', {
|
|
73
|
+
pages,
|
|
74
|
+
options: extractOptions
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
return response;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Create deep research report with AI crawling
|
|
82
|
+
* @param {Object} options - Crawl options
|
|
83
|
+
* @param {string} options.instructions - Research instructions (required)
|
|
84
|
+
* @param {Object} [options.options] - Additional crawl options
|
|
85
|
+
* @param {boolean} [options.options.thinking] - Enable thinking mode
|
|
86
|
+
* @param {boolean} [options.options.allow_thinking_callback] - Allow thinking callbacks
|
|
87
|
+
* @param {boolean} [options.options.stream_text] - Stream text responses
|
|
88
|
+
* @param {string} [options.reportType='pdf'] - Report type: 'pdf' | 'markdown' | 'html'
|
|
89
|
+
* @param {Function} [options.onProgress] - Callback for progress updates (streaming)
|
|
90
|
+
* @returns {Promise<Object>} Research report result
|
|
91
|
+
*/
|
|
92
|
+
async crawl(options) {
|
|
93
|
+
const {
|
|
94
|
+
instructions,
|
|
95
|
+
options: crawlOptions = {},
|
|
96
|
+
reportType = 'pdf',
|
|
97
|
+
onProgress = null
|
|
98
|
+
} = options;
|
|
99
|
+
|
|
100
|
+
if (!instructions || typeof instructions !== 'string') {
|
|
101
|
+
throw new Error('Instructions string is required');
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// If onProgress is provided, we need to handle streaming
|
|
105
|
+
if (onProgress && typeof onProgress === 'function') {
|
|
106
|
+
return this._crawlWithStreaming({
|
|
107
|
+
instructions,
|
|
108
|
+
options: crawlOptions,
|
|
109
|
+
reportType,
|
|
110
|
+
onProgress
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Standard JSON response
|
|
115
|
+
const response = await this.client.post('search/crawl', {
|
|
116
|
+
instructions,
|
|
117
|
+
options: crawlOptions,
|
|
118
|
+
responseType: 'json',
|
|
119
|
+
reportType
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
return response;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Handle streaming crawl with progress callbacks
|
|
127
|
+
* @private
|
|
128
|
+
*/
|
|
129
|
+
async _crawlWithStreaming({ instructions, options, reportType, onProgress }) {
|
|
130
|
+
const url = this.client._buildURL('search/crawl');
|
|
131
|
+
const headers = this.client.getAuthHeaders();
|
|
132
|
+
|
|
133
|
+
const response = await fetch(url, {
|
|
134
|
+
method: 'POST',
|
|
135
|
+
headers,
|
|
136
|
+
body: JSON.stringify({
|
|
137
|
+
instructions,
|
|
138
|
+
options,
|
|
139
|
+
responseType: 'stream',
|
|
140
|
+
reportType
|
|
141
|
+
})
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
if (!response.ok) {
|
|
145
|
+
const error = await response.json();
|
|
146
|
+
throw new Error(error.message || error.error || 'Crawl request failed');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Handle streaming response
|
|
150
|
+
const reader = response.body.getReader();
|
|
151
|
+
const decoder = new TextDecoder();
|
|
152
|
+
let buffer = '';
|
|
153
|
+
let finalResult = null;
|
|
154
|
+
|
|
155
|
+
while (true) {
|
|
156
|
+
const { done, value } = await reader.read();
|
|
157
|
+
|
|
158
|
+
if (done) break;
|
|
159
|
+
|
|
160
|
+
buffer += decoder.decode(value, { stream: true });
|
|
161
|
+
|
|
162
|
+
// Process complete SSE messages
|
|
163
|
+
const lines = buffer.split('\n\n');
|
|
164
|
+
buffer = lines.pop() || ''; // Keep incomplete message in buffer
|
|
165
|
+
|
|
166
|
+
for (const line of lines) {
|
|
167
|
+
if (line.startsWith('data: ')) {
|
|
168
|
+
try {
|
|
169
|
+
const data = JSON.parse(line.slice(6));
|
|
170
|
+
|
|
171
|
+
if (data.type === 'crawl_end') {
|
|
172
|
+
finalResult = data.data;
|
|
173
|
+
} else if (data.type === 'error') {
|
|
174
|
+
throw new Error(data.error);
|
|
175
|
+
} else {
|
|
176
|
+
// Progress update
|
|
177
|
+
onProgress(data);
|
|
178
|
+
}
|
|
179
|
+
} catch (error) {
|
|
180
|
+
console.error('Error parsing SSE data:', error);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return finalResult || { success: true };
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
export default OblienSearch;
|
|
191
|
+
|