oblien 1.1.2 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -87,7 +87,7 @@ const overview = await agentInstance.getOverview({ days: 7 });
87
87
 
88
88
  ### 💬 Chat Module
89
89
 
90
- Create and manage chat sessions with guest support.
90
+ Create sessions, send messages, and manage guests with streaming support.
91
91
 
92
92
  ```javascript
93
93
  import { OblienChat } from 'oblien/chat';
@@ -100,6 +100,27 @@ const session = await chat.createSession({
100
100
  namespace: 'production'
101
101
  });
102
102
 
103
+ // Send message with streaming
104
+ await chat.send({
105
+ token: session.token,
106
+ message: 'Tell me about AI',
107
+ stream: true,
108
+ onChunk: (data) => console.log(data)
109
+ });
110
+
111
+ // Upload files
112
+ const uploadResult = await chat.upload({
113
+ token: session.token,
114
+ files: fileArray
115
+ });
116
+
117
+ // Send message with uploaded files
118
+ await chat.send({
119
+ token: session.token,
120
+ message: 'Analyze these files',
121
+ uploadId: uploadResult.uploadId
122
+ });
123
+
103
124
  // Create guest session
104
125
  const guestSession = await chat.createGuestSession({
105
126
  ip: '192.168.1.1',
@@ -107,17 +128,20 @@ const guestSession = await chat.createGuestSession({
107
128
  agentId: 'agent-id'
108
129
  });
109
130
 
110
- // List sessions
111
- const sessions = await chat.listSessions({ limit: 20 });
131
+ // Get guest usage
132
+ const usage = await chat.getGuestUsage(guestSession.token);
112
133
  ```
113
134
 
114
135
  **Features:**
115
- - ✅ Session management
116
- - ✅ Guest sessions with fingerprint tracking
117
- - ✅ Token generation
118
- - ✅ Automatic guest ID generation
136
+ - ✅ Session management (create, list, delete)
137
+ - ✅ Message sending with streaming support
138
+ - ✅ File uploads for agent analysis
139
+ - ✅ Guest sessions with IP + fingerprint tracking
140
+ - ✅ Guest usage monitoring and rate limiting
141
+ - ✅ Hybrid mode (works with token or client credentials)
142
+ - ✅ Cache statistics for monitoring
119
143
 
120
- 📖 [Documentation](./docs/CHAT.md) | 💡 [Examples](./examples/chat-example.js)
144
+ 📖 [Full Documentation](./docs/CHAT.md) | 💡 [Examples](./examples/chat-example.js)
121
145
 
122
146
  ---
123
147
 
@@ -517,6 +541,14 @@ MIT License - see LICENSE file for details
517
541
 
518
542
  ## Changelog
519
543
 
544
+ ### v1.3.0 (Latest)
545
+ - ✅ Added `send()` method to Chat module with streaming support
546
+ - ✅ Added `upload()` method for file attachments
547
+ - ✅ Added guest usage monitoring (`getGuestUsage()`)
548
+ - ✅ Added cache statistics (`getCacheStatistics()`)
549
+ - ✅ Hybrid mode support (token or client credentials)
550
+ - ✅ Complete Chat documentation with examples
551
+
520
552
  ### v1.2.0
521
553
  - ✅ Added Sandboxes module
522
554
  - ✅ Enhanced Agents module with proper settings sections
package/index.d.ts CHANGED
@@ -164,17 +164,60 @@ export interface GuestSessionData extends SessionData {
164
164
  };
165
165
  }
166
166
 
167
+ export interface SendMessageOptions {
168
+ token?: string;
169
+ message: string;
170
+ uploadId?: string;
171
+ files?: any[];
172
+ stream?: boolean;
173
+ onChunk?: (data: any) => void;
174
+ onError?: (error: Error) => void;
175
+ onComplete?: () => void;
176
+ metadata?: Record<string, any>;
177
+ }
178
+
179
+ export interface UploadOptions {
180
+ token?: string;
181
+ files: any[] | any;
182
+ metadata?: Record<string, any>;
183
+ }
184
+
185
+ export interface GuestUsageInfo {
186
+ success: boolean;
187
+ namespace: string;
188
+ requestCount: number;
189
+ limit: number;
190
+ remaining: number;
191
+ resetAt?: Date;
192
+ }
193
+
194
+ export interface CacheStatistics {
195
+ success: boolean;
196
+ cache: {
197
+ keys: number;
198
+ hits: number;
199
+ misses: number;
200
+ hitRate: number;
201
+ keysSize: number;
202
+ valuesSize: number;
203
+ };
204
+ }
205
+
167
206
  export class OblienChat {
168
207
  constructor(client: OblienClient, options?: ChatOptions);
169
208
 
170
209
  createSession(options: CreateSessionOptions): Promise<SessionData>;
171
210
  createGuestSession(options: CreateGuestSessionOptions): Promise<GuestSessionData>;
172
211
  getGuest(ip: string, fingerprint?: string): Promise<Guest | null>;
212
+ send(options: SendMessageOptions): Promise<any>;
213
+ upload(options: UploadOptions): Promise<any>;
173
214
  getSession(sessionId: string): Promise<any>;
174
215
  listSessions(options?: Record<string, any>): Promise<any[]>;
175
216
  deleteSession(sessionId: string): Promise<any>;
176
217
  getAllGuests(): Promise<Guest[]>;
177
218
  cleanupGuests(): Promise<number>;
219
+ getGuestUsage(token: string): Promise<GuestUsageInfo>;
220
+ getCacheStatistics(): Promise<CacheStatistics>;
178
221
  }
179
222
 
180
223
  // ============ Namespaces ============
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "oblien",
3
- "version": "1.1.2",
3
+ "version": "1.1.3",
4
4
  "description": "Server-side SDK for Oblien AI Platform - Build AI-powered applications with chat, agents, and workflows",
5
5
  "main": "index.js",
6
6
  "type": "module",
package/src/chat/index.js CHANGED
@@ -138,6 +138,205 @@ export class OblienChat {
138
138
  return null;
139
139
  }
140
140
 
141
+ /**
142
+ * Send a message in a chat session
143
+ * Hybrid mode: Works with session token OR client credentials
144
+ *
145
+ * @param {Object} options - Send options
146
+ * @param {string} [options.token] - Session token (optional, uses client credentials if not provided)
147
+ * @param {string} options.message - Message to send
148
+ * @param {string} [options.uploadId] - Upload ID for attached files
149
+ * @param {Array} [options.files] - File attachments (alternative to uploadId)
150
+ * @param {boolean} [options.stream] - Enable streaming response
151
+ * @param {Function} [options.onChunk] - Callback for streaming chunks (data) => void
152
+ * @param {Function} [options.onError] - Callback for errors (error) => void
153
+ * @param {Function} [options.onComplete] - Callback when stream completes () => void
154
+ * @param {Object} [options.metadata] - Additional metadata
155
+ * @returns {Promise<Object>} Response data
156
+ */
157
+ async send(options = {}) {
158
+ const {
159
+ token,
160
+ message,
161
+ uploadId,
162
+ files,
163
+ stream = false,
164
+ onChunk,
165
+ onError,
166
+ onComplete,
167
+ metadata = {}
168
+ } = options;
169
+
170
+ if (!message || !message.trim()) {
171
+ throw new Error('Message is required');
172
+ }
173
+
174
+ const payload = {
175
+ message,
176
+ stream,
177
+ ...metadata,
178
+ };
179
+
180
+ if (uploadId) {
181
+ payload.upload_id = uploadId;
182
+ }
183
+
184
+ if (files) {
185
+ payload.files = files;
186
+ }
187
+
188
+ // Build headers - use token if provided, otherwise use client credentials
189
+ const headers = {
190
+ ...this.client.getAuthHeaders(),
191
+ 'Content-Type': 'application/json',
192
+ };
193
+
194
+ if (token) {
195
+ headers['Authorization'] = `Bearer ${token}`;
196
+ }
197
+
198
+ const url = this.client._buildURL('ai/chat/send');
199
+
200
+ if (stream) {
201
+ // Handle streaming response
202
+ const response = await fetch(url, {
203
+ method: 'POST',
204
+ headers,
205
+ body: JSON.stringify(payload),
206
+ });
207
+
208
+ if (!response.ok) {
209
+ const error = await response.text();
210
+ throw new Error(error || `API error: ${response.status}`);
211
+ }
212
+
213
+ // Process SSE stream
214
+ const reader = response.body.getReader();
215
+ const decoder = new TextDecoder();
216
+ let buffer = '';
217
+
218
+ try {
219
+ while (true) {
220
+ const { done, value } = await reader.read();
221
+
222
+ if (done) {
223
+ if (onComplete) onComplete();
224
+ break;
225
+ }
226
+
227
+ buffer += decoder.decode(value, { stream: true });
228
+ const lines = buffer.split('\n');
229
+ buffer = lines.pop() || '';
230
+
231
+ for (const line of lines) {
232
+ if (line.trim() === '') continue;
233
+
234
+ try {
235
+ const data = JSON.parse(line);
236
+ if (onChunk) onChunk(data);
237
+ } catch (e) {
238
+ // Skip non-JSON lines (SSE format)
239
+ if (line.startsWith('data: ')) {
240
+ try {
241
+ const jsonStr = line.substring(6);
242
+ const data = JSON.parse(jsonStr);
243
+ if (onChunk) onChunk(data);
244
+ } catch (parseError) {
245
+ console.warn('Failed to parse chunk:', line);
246
+ }
247
+ }
248
+ }
249
+ }
250
+ }
251
+
252
+ return { success: true, stream: true };
253
+ } catch (error) {
254
+ if (onError) onError(error);
255
+ throw error;
256
+ }
257
+ } else {
258
+ // Regular non-streaming request
259
+ const response = await fetch(url, {
260
+ method: 'POST',
261
+ headers,
262
+ body: JSON.stringify(payload),
263
+ });
264
+
265
+ if (!response.ok) {
266
+ const error = await response.json().catch(() => ({ message: response.statusText }));
267
+ throw new Error(error.message || error.error || `API error: ${response.status}`);
268
+ }
269
+
270
+ return await response.json();
271
+ }
272
+ }
273
+
274
+ /**
275
+ * Upload files for a chat session
276
+ * Hybrid mode: Works with session token OR client credentials
277
+ *
278
+ * @param {Object} options - Upload options
279
+ * @param {string} [options.token] - Session token (optional, uses client credentials if not provided)
280
+ * @param {Array|Object} options.files - Files to upload
281
+ * @param {Object} [options.metadata] - Additional metadata
282
+ * @returns {Promise<Object>} Upload result with uploadId
283
+ */
284
+ async upload(options = {}) {
285
+ const { token, files, metadata } = options;
286
+
287
+ if (!files || (Array.isArray(files) && files.length === 0)) {
288
+ throw new Error('Files are required');
289
+ }
290
+
291
+ const formData = new FormData();
292
+
293
+ // Handle both single file and array of files
294
+ const fileArray = Array.isArray(files) ? files : [files];
295
+
296
+ fileArray.forEach((file, index) => {
297
+ if (file instanceof File || file instanceof Blob) {
298
+ formData.append('files', file);
299
+ } else if (file.path && file.buffer) {
300
+ // Node.js file object
301
+ formData.append('files', file.buffer, file.originalname || file.path);
302
+ } else {
303
+ throw new Error(`Invalid file at index ${index}`);
304
+ }
305
+ });
306
+
307
+ // Add any additional options
308
+ if (metadata) {
309
+ formData.append('metadata', JSON.stringify(metadata));
310
+ }
311
+
312
+ // Build headers - use token if provided, otherwise use client credentials
313
+ const headers = {
314
+ ...this.client.getAuthHeaders(),
315
+ };
316
+
317
+ if (token) {
318
+ headers['Authorization'] = `Bearer ${token}`;
319
+ }
320
+
321
+ // Remove Content-Type for FormData (browser will set it with boundary)
322
+ delete headers['Content-Type'];
323
+
324
+ const url = this.client._buildURL('ai/chat/upload');
325
+
326
+ const response = await fetch(url, {
327
+ method: 'POST',
328
+ headers,
329
+ body: formData,
330
+ });
331
+
332
+ if (!response.ok) {
333
+ const error = await response.json().catch(() => ({ message: response.statusText }));
334
+ throw new Error(error.message || error.error || `API error: ${response.status}`);
335
+ }
336
+
337
+ return await response.json();
338
+ }
339
+
141
340
  /**
142
341
  * Get session info
143
342
  * @param {string} sessionId - Session ID
@@ -181,6 +380,43 @@ export class OblienChat {
181
380
  async cleanupGuests() {
182
381
  return await this.guestManager.cleanup();
183
382
  }
383
+
384
+ /**
385
+ * Get guest usage information (requires session token)
386
+ * @param {string} token - Session token from guest session
387
+ * @returns {Promise<Object>} Usage information for the guest
388
+ */
389
+ async getGuestUsage(token) {
390
+ if (!token) {
391
+ throw new Error('Session token is required');
392
+ }
393
+
394
+ const headers = {
395
+ ...this.client.getAuthHeaders(),
396
+ 'Authorization': `Bearer ${token}`,
397
+ };
398
+
399
+ const response = await fetch(this.client._buildURL('ai/guest/usage'), {
400
+ method: 'GET',
401
+ headers,
402
+ });
403
+
404
+ if (!response.ok) {
405
+ const error = await response.json().catch(() => ({ message: response.statusText }));
406
+ throw new Error(error.message || error.error || `API error: ${response.status}`);
407
+ }
408
+
409
+ return await response.json();
410
+ }
411
+
412
+ /**
413
+ * Get cache statistics (admin/monitoring)
414
+ * @returns {Promise<Object>} Cache statistics
415
+ */
416
+ async getCacheStatistics() {
417
+ const data = await this.client.get('ai/guest/cache-stats');
418
+ return data;
419
+ }
184
420
  }
185
421
 
186
422
  export { ChatSession } from './session.js';