@nebula-ai/sdk 1.1.6 → 1.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -7
- package/dist/index.d.mts +3 -70
- package/dist/index.d.ts +3 -70
- package/dist/index.js +20 -163
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +20 -163
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -137,23 +137,27 @@ const result = await client.delete(['id1', 'id2', 'id3']); // Returns detailed r
|
|
|
137
137
|
const conversationId = await client.storeMemory({
|
|
138
138
|
collection_id: collection.id,
|
|
139
139
|
content: 'What is machine learning?',
|
|
140
|
-
role: 'user'
|
|
140
|
+
role: 'user',
|
|
141
|
+
metadata: { content_type: 'conversation' }
|
|
141
142
|
});
|
|
142
143
|
|
|
143
144
|
await client.storeMemory({
|
|
144
145
|
collection_id: collection.id,
|
|
145
146
|
content: 'Machine learning is a subset of AI...',
|
|
146
147
|
role: 'assistant',
|
|
147
|
-
parent_id: conversationId
|
|
148
|
+
parent_id: conversationId,
|
|
149
|
+
metadata: { content_type: 'conversation' }
|
|
148
150
|
});
|
|
149
151
|
|
|
150
|
-
// List
|
|
151
|
-
const conversations = await client.
|
|
152
|
-
collection_ids: [collection.id]
|
|
152
|
+
// List conversation memories
|
|
153
|
+
const conversations = await client.listMemories({
|
|
154
|
+
collection_ids: [collection.id],
|
|
155
|
+
metadata_filters: { 'metadata.content_type': { $eq: 'conversation' } }
|
|
153
156
|
});
|
|
154
157
|
|
|
155
|
-
// Get messages
|
|
156
|
-
const
|
|
158
|
+
// Get messages from a conversation memory
|
|
159
|
+
const conversation = await client.getMemory(conversationId);
|
|
160
|
+
const messages = conversation.chunks ?? [];
|
|
157
161
|
```
|
|
158
162
|
|
|
159
163
|
## Error Handling
|
package/dist/index.d.mts
CHANGED
|
@@ -88,13 +88,6 @@ interface GraphCommunityResult {
|
|
|
88
88
|
summary: string;
|
|
89
89
|
metadata: Record<string, any>;
|
|
90
90
|
}
|
|
91
|
-
interface AgentResponse {
|
|
92
|
-
content: string;
|
|
93
|
-
agent_id: string;
|
|
94
|
-
conversation_id?: string;
|
|
95
|
-
metadata: Record<string, any>;
|
|
96
|
-
citations: Record<string, any>[];
|
|
97
|
-
}
|
|
98
91
|
interface SearchOptions {
|
|
99
92
|
limit: number;
|
|
100
93
|
filters?: Record<string, any>;
|
|
@@ -215,56 +208,8 @@ declare class Nebula {
|
|
|
215
208
|
listCollections(options?: {
|
|
216
209
|
limit?: number;
|
|
217
210
|
offset?: number;
|
|
211
|
+
name?: string;
|
|
218
212
|
}): Promise<Collection[]>;
|
|
219
|
-
/**
|
|
220
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
221
|
-
*
|
|
222
|
-
* @param options - Configuration for listing conversations
|
|
223
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
224
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
225
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
226
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
227
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
228
|
-
*
|
|
229
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
230
|
-
*
|
|
231
|
-
* @example
|
|
232
|
-
* // Get all playground conversations
|
|
233
|
-
* const conversations = await client.listConversations({
|
|
234
|
-
* collection_ids: ['collection-id'],
|
|
235
|
-
* metadata_filters: {
|
|
236
|
-
* 'metadata.playground': { $eq: true }
|
|
237
|
-
* }
|
|
238
|
-
* });
|
|
239
|
-
*
|
|
240
|
-
* @example
|
|
241
|
-
* // Filter by session ID
|
|
242
|
-
* const conversations = await client.listConversations({
|
|
243
|
-
* metadata_filters: {
|
|
244
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
245
|
-
* }
|
|
246
|
-
* });
|
|
247
|
-
*/
|
|
248
|
-
listConversations(options?: {
|
|
249
|
-
limit?: number;
|
|
250
|
-
offset?: number;
|
|
251
|
-
collection_ids?: string[];
|
|
252
|
-
metadata_filters?: Record<string, any>;
|
|
253
|
-
}): Promise<any[]>;
|
|
254
|
-
/**
|
|
255
|
-
* Get conversation messages from the engrams API.
|
|
256
|
-
*
|
|
257
|
-
* This method retrieves conversation engrams and parses their chunks into structured messages.
|
|
258
|
-
* Expects conversation engrams to contain structured chunks with role metadata:
|
|
259
|
-
* `{text: string, role: 'user'|'assistant'|'system'}`.
|
|
260
|
-
* Converts chunks to `MemoryResponse` objects with proper role metadata.
|
|
261
|
-
*
|
|
262
|
-
* @param conversationId - Single conversation ID (returns array of messages)
|
|
263
|
-
* @param conversationIds - Multiple conversation IDs (returns map of conversation_id -> messages)
|
|
264
|
-
* @returns Messages for the requested conversation(s)
|
|
265
|
-
*/
|
|
266
|
-
getConversationMessages(conversationId: string): Promise<MemoryResponse[]>;
|
|
267
|
-
getConversationMessages(conversationIds: string[]): Promise<Record<string, MemoryResponse[]>>;
|
|
268
213
|
/** Update a collection */
|
|
269
214
|
updateCollection(options: {
|
|
270
215
|
collectionId: string;
|
|
@@ -343,8 +288,6 @@ declare class Nebula {
|
|
|
343
288
|
collectionIds?: string[];
|
|
344
289
|
mergeMetadata?: boolean;
|
|
345
290
|
}): Promise<boolean>;
|
|
346
|
-
/** Delete a conversation and all its messages */
|
|
347
|
-
deleteConversation(conversationId: string): Promise<boolean>;
|
|
348
291
|
/**
|
|
349
292
|
* Get all memories from specific collections with optional metadata filtering
|
|
350
293
|
*
|
|
@@ -484,21 +427,11 @@ declare class Nebula {
|
|
|
484
427
|
*/
|
|
485
428
|
search(options: {
|
|
486
429
|
query: string;
|
|
487
|
-
collection_ids
|
|
430
|
+
collection_ids?: string | string[];
|
|
488
431
|
limit?: number;
|
|
489
432
|
filters?: Record<string, any>;
|
|
490
|
-
search_mode?: 'fast' | 'super';
|
|
491
433
|
searchSettings?: Record<string, any>;
|
|
492
434
|
}): Promise<MemoryRecall>;
|
|
493
|
-
/**
|
|
494
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
495
|
-
*/
|
|
496
|
-
storeConversation(userMessage: string, assistantMessage: string, collectionId: string, sessionId: string): Promise<MemoryResponse>;
|
|
497
|
-
/**
|
|
498
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
499
|
-
* Now returns MemoryRecall with hierarchical memory structure
|
|
500
|
-
*/
|
|
501
|
-
searchConversations(query: string, collectionId: string, sessionId?: string, includeAllSessions?: boolean): Promise<MemoryRecall>;
|
|
502
435
|
healthCheck(): Promise<Record<string, any>>;
|
|
503
436
|
private _collectionFromDict;
|
|
504
437
|
private _memoryResponseFromDict;
|
|
@@ -508,4 +441,4 @@ declare class Nebula {
|
|
|
508
441
|
private _formDataFromObject;
|
|
509
442
|
}
|
|
510
443
|
|
|
511
|
-
export { type ActivatedEntity, type ActivatedFact, type
|
|
444
|
+
export { type ActivatedEntity, type ActivatedFact, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type GroundedUtterance, type Memory, type MemoryRecall, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type RecallFocus, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
package/dist/index.d.ts
CHANGED
|
@@ -88,13 +88,6 @@ interface GraphCommunityResult {
|
|
|
88
88
|
summary: string;
|
|
89
89
|
metadata: Record<string, any>;
|
|
90
90
|
}
|
|
91
|
-
interface AgentResponse {
|
|
92
|
-
content: string;
|
|
93
|
-
agent_id: string;
|
|
94
|
-
conversation_id?: string;
|
|
95
|
-
metadata: Record<string, any>;
|
|
96
|
-
citations: Record<string, any>[];
|
|
97
|
-
}
|
|
98
91
|
interface SearchOptions {
|
|
99
92
|
limit: number;
|
|
100
93
|
filters?: Record<string, any>;
|
|
@@ -215,56 +208,8 @@ declare class Nebula {
|
|
|
215
208
|
listCollections(options?: {
|
|
216
209
|
limit?: number;
|
|
217
210
|
offset?: number;
|
|
211
|
+
name?: string;
|
|
218
212
|
}): Promise<Collection[]>;
|
|
219
|
-
/**
|
|
220
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
221
|
-
*
|
|
222
|
-
* @param options - Configuration for listing conversations
|
|
223
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
224
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
225
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
226
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
227
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
228
|
-
*
|
|
229
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
230
|
-
*
|
|
231
|
-
* @example
|
|
232
|
-
* // Get all playground conversations
|
|
233
|
-
* const conversations = await client.listConversations({
|
|
234
|
-
* collection_ids: ['collection-id'],
|
|
235
|
-
* metadata_filters: {
|
|
236
|
-
* 'metadata.playground': { $eq: true }
|
|
237
|
-
* }
|
|
238
|
-
* });
|
|
239
|
-
*
|
|
240
|
-
* @example
|
|
241
|
-
* // Filter by session ID
|
|
242
|
-
* const conversations = await client.listConversations({
|
|
243
|
-
* metadata_filters: {
|
|
244
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
245
|
-
* }
|
|
246
|
-
* });
|
|
247
|
-
*/
|
|
248
|
-
listConversations(options?: {
|
|
249
|
-
limit?: number;
|
|
250
|
-
offset?: number;
|
|
251
|
-
collection_ids?: string[];
|
|
252
|
-
metadata_filters?: Record<string, any>;
|
|
253
|
-
}): Promise<any[]>;
|
|
254
|
-
/**
|
|
255
|
-
* Get conversation messages from the engrams API.
|
|
256
|
-
*
|
|
257
|
-
* This method retrieves conversation engrams and parses their chunks into structured messages.
|
|
258
|
-
* Expects conversation engrams to contain structured chunks with role metadata:
|
|
259
|
-
* `{text: string, role: 'user'|'assistant'|'system'}`.
|
|
260
|
-
* Converts chunks to `MemoryResponse` objects with proper role metadata.
|
|
261
|
-
*
|
|
262
|
-
* @param conversationId - Single conversation ID (returns array of messages)
|
|
263
|
-
* @param conversationIds - Multiple conversation IDs (returns map of conversation_id -> messages)
|
|
264
|
-
* @returns Messages for the requested conversation(s)
|
|
265
|
-
*/
|
|
266
|
-
getConversationMessages(conversationId: string): Promise<MemoryResponse[]>;
|
|
267
|
-
getConversationMessages(conversationIds: string[]): Promise<Record<string, MemoryResponse[]>>;
|
|
268
213
|
/** Update a collection */
|
|
269
214
|
updateCollection(options: {
|
|
270
215
|
collectionId: string;
|
|
@@ -343,8 +288,6 @@ declare class Nebula {
|
|
|
343
288
|
collectionIds?: string[];
|
|
344
289
|
mergeMetadata?: boolean;
|
|
345
290
|
}): Promise<boolean>;
|
|
346
|
-
/** Delete a conversation and all its messages */
|
|
347
|
-
deleteConversation(conversationId: string): Promise<boolean>;
|
|
348
291
|
/**
|
|
349
292
|
* Get all memories from specific collections with optional metadata filtering
|
|
350
293
|
*
|
|
@@ -484,21 +427,11 @@ declare class Nebula {
|
|
|
484
427
|
*/
|
|
485
428
|
search(options: {
|
|
486
429
|
query: string;
|
|
487
|
-
collection_ids
|
|
430
|
+
collection_ids?: string | string[];
|
|
488
431
|
limit?: number;
|
|
489
432
|
filters?: Record<string, any>;
|
|
490
|
-
search_mode?: 'fast' | 'super';
|
|
491
433
|
searchSettings?: Record<string, any>;
|
|
492
434
|
}): Promise<MemoryRecall>;
|
|
493
|
-
/**
|
|
494
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
495
|
-
*/
|
|
496
|
-
storeConversation(userMessage: string, assistantMessage: string, collectionId: string, sessionId: string): Promise<MemoryResponse>;
|
|
497
|
-
/**
|
|
498
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
499
|
-
* Now returns MemoryRecall with hierarchical memory structure
|
|
500
|
-
*/
|
|
501
|
-
searchConversations(query: string, collectionId: string, sessionId?: string, includeAllSessions?: boolean): Promise<MemoryRecall>;
|
|
502
435
|
healthCheck(): Promise<Record<string, any>>;
|
|
503
436
|
private _collectionFromDict;
|
|
504
437
|
private _memoryResponseFromDict;
|
|
@@ -508,4 +441,4 @@ declare class Nebula {
|
|
|
508
441
|
private _formDataFromObject;
|
|
509
442
|
}
|
|
510
443
|
|
|
511
|
-
export { type ActivatedEntity, type ActivatedFact, type
|
|
444
|
+
export { type ActivatedEntity, type ActivatedFact, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type GroundedUtterance, type Memory, type MemoryRecall, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type RecallFocus, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
package/dist/index.js
CHANGED
|
@@ -197,6 +197,9 @@ var Nebula = class {
|
|
|
197
197
|
limit: options?.limit ?? 100,
|
|
198
198
|
offset: options?.offset ?? 0
|
|
199
199
|
};
|
|
200
|
+
if (options?.name !== void 0) {
|
|
201
|
+
params.name = options.name;
|
|
202
|
+
}
|
|
200
203
|
const response = await this._makeRequest("GET", "/v1/collections", void 0, params);
|
|
201
204
|
let collections;
|
|
202
205
|
if (response.results) {
|
|
@@ -208,110 +211,6 @@ var Nebula = class {
|
|
|
208
211
|
}
|
|
209
212
|
return collections.map((collection) => this._collectionFromDict(collection));
|
|
210
213
|
}
|
|
211
|
-
// Conversations Methods
|
|
212
|
-
/**
|
|
213
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
214
|
-
*
|
|
215
|
-
* @param options - Configuration for listing conversations
|
|
216
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
217
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
218
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
219
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
220
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
221
|
-
*
|
|
222
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
223
|
-
*
|
|
224
|
-
* @example
|
|
225
|
-
* // Get all playground conversations
|
|
226
|
-
* const conversations = await client.listConversations({
|
|
227
|
-
* collection_ids: ['collection-id'],
|
|
228
|
-
* metadata_filters: {
|
|
229
|
-
* 'metadata.playground': { $eq: true }
|
|
230
|
-
* }
|
|
231
|
-
* });
|
|
232
|
-
*
|
|
233
|
-
* @example
|
|
234
|
-
* // Filter by session ID
|
|
235
|
-
* const conversations = await client.listConversations({
|
|
236
|
-
* metadata_filters: {
|
|
237
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
238
|
-
* }
|
|
239
|
-
* });
|
|
240
|
-
*/
|
|
241
|
-
async listConversations(options) {
|
|
242
|
-
const params = {
|
|
243
|
-
limit: options?.limit ?? 100,
|
|
244
|
-
offset: options?.offset ?? 0
|
|
245
|
-
};
|
|
246
|
-
if (options?.collection_ids && options.collection_ids.length > 0) {
|
|
247
|
-
params.collection_ids = options.collection_ids;
|
|
248
|
-
}
|
|
249
|
-
if (options?.metadata_filters) {
|
|
250
|
-
params.metadata_filters = JSON.stringify(options.metadata_filters);
|
|
251
|
-
}
|
|
252
|
-
const response = await this._makeRequest("GET", "/v1/memories", void 0, params);
|
|
253
|
-
let conversations;
|
|
254
|
-
if (response && response.results) {
|
|
255
|
-
conversations = response.results;
|
|
256
|
-
} else if (Array.isArray(response)) {
|
|
257
|
-
conversations = response;
|
|
258
|
-
} else {
|
|
259
|
-
conversations = response ? [response] : [];
|
|
260
|
-
}
|
|
261
|
-
return conversations;
|
|
262
|
-
}
|
|
263
|
-
async getConversationMessages(conversationIdOrIds) {
|
|
264
|
-
if (typeof conversationIdOrIds === "string") {
|
|
265
|
-
const batchResults = await this.getConversationMessages([conversationIdOrIds]);
|
|
266
|
-
return batchResults[conversationIdOrIds] || [];
|
|
267
|
-
}
|
|
268
|
-
if (!Array.isArray(conversationIdOrIds) || conversationIdOrIds.length === 0) {
|
|
269
|
-
return {};
|
|
270
|
-
}
|
|
271
|
-
const params = { ids: conversationIdOrIds };
|
|
272
|
-
const response = await this._makeRequest("GET", "/v1/memories", void 0, params);
|
|
273
|
-
const results = {};
|
|
274
|
-
if (response && response.results && Array.isArray(response.results)) {
|
|
275
|
-
for (const doc of response.results) {
|
|
276
|
-
const conversationId = doc.id;
|
|
277
|
-
if (!conversationId) {
|
|
278
|
-
continue;
|
|
279
|
-
}
|
|
280
|
-
if (Array.isArray(doc.chunks) && doc.chunks.length > 0) {
|
|
281
|
-
const messages = [];
|
|
282
|
-
for (let i = 0; i < doc.chunks.length; i++) {
|
|
283
|
-
const structuredChunk = doc.chunks[i];
|
|
284
|
-
if (!structuredChunk || typeof structuredChunk.text !== "string" || structuredChunk.text.length === 0) {
|
|
285
|
-
continue;
|
|
286
|
-
}
|
|
287
|
-
const text = structuredChunk.text;
|
|
288
|
-
const role = structuredChunk.role ?? "user";
|
|
289
|
-
messages.push({
|
|
290
|
-
id: `${doc.id}-${i}`,
|
|
291
|
-
content: text,
|
|
292
|
-
metadata: {
|
|
293
|
-
...doc.metadata,
|
|
294
|
-
// Copy engram metadata (playground, session_id, etc.)
|
|
295
|
-
role
|
|
296
|
-
// Add/override role for this specific message
|
|
297
|
-
},
|
|
298
|
-
created_at: doc.created_at,
|
|
299
|
-
collection_ids: doc.collection_ids || []
|
|
300
|
-
});
|
|
301
|
-
}
|
|
302
|
-
results[conversationId] = messages;
|
|
303
|
-
} else {
|
|
304
|
-
results[conversationId] = [];
|
|
305
|
-
}
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
for (const conversationId of conversationIdOrIds) {
|
|
309
|
-
if (!(conversationId in results)) {
|
|
310
|
-
results[conversationId] = [];
|
|
311
|
-
}
|
|
312
|
-
}
|
|
313
|
-
return results;
|
|
314
|
-
}
|
|
315
214
|
/** Update a collection */
|
|
316
215
|
async updateCollection(options) {
|
|
317
216
|
const data = {};
|
|
@@ -409,7 +308,7 @@ var Nebula = class {
|
|
|
409
308
|
}
|
|
410
309
|
const data2 = {
|
|
411
310
|
engram_type: "conversation",
|
|
412
|
-
|
|
311
|
+
collection_id: mem.collection_id,
|
|
413
312
|
name: name || "Conversation",
|
|
414
313
|
messages,
|
|
415
314
|
metadata: mem.metadata || {}
|
|
@@ -533,7 +432,7 @@ var Nebula = class {
|
|
|
533
432
|
if (key.startsWith("__new__::")) {
|
|
534
433
|
const data = {
|
|
535
434
|
engram_type: "conversation",
|
|
536
|
-
|
|
435
|
+
collection_id: collectionId,
|
|
537
436
|
name: "Conversation",
|
|
538
437
|
messages,
|
|
539
438
|
metadata: {}
|
|
@@ -674,18 +573,6 @@ var Nebula = class {
|
|
|
674
573
|
throw error;
|
|
675
574
|
}
|
|
676
575
|
}
|
|
677
|
-
/** Delete a conversation and all its messages */
|
|
678
|
-
async deleteConversation(conversationId) {
|
|
679
|
-
try {
|
|
680
|
-
await this._makeRequest("DELETE", `/v1/memories/${conversationId}`);
|
|
681
|
-
return true;
|
|
682
|
-
} catch (error) {
|
|
683
|
-
if (error instanceof Error) {
|
|
684
|
-
throw error;
|
|
685
|
-
}
|
|
686
|
-
throw new NebulaClientException(`Unknown error: ${String(error)}`);
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
576
|
/**
|
|
690
577
|
* Get all memories from specific collections with optional metadata filtering
|
|
691
578
|
*
|
|
@@ -855,29 +742,24 @@ var Nebula = class {
|
|
|
855
742
|
* https://docs.nebulacloud.app/guides/metadata-filtering
|
|
856
743
|
*/
|
|
857
744
|
async search(options) {
|
|
858
|
-
const collectionIds = Array.isArray(options.collection_ids) ? options.collection_ids : [options.collection_ids];
|
|
859
|
-
const validCollectionIds = collectionIds.filter((id) => id && id.trim() !== "");
|
|
860
|
-
if (!validCollectionIds.length) {
|
|
861
|
-
throw new NebulaClientException("collection_ids must be provided to search().");
|
|
862
|
-
}
|
|
863
|
-
const limit = options.limit ?? 10;
|
|
864
|
-
const searchMode = options.search_mode ?? "super";
|
|
865
|
-
const effectiveSettings = {
|
|
866
|
-
...options.searchSettings
|
|
867
|
-
};
|
|
868
|
-
effectiveSettings.limit = limit;
|
|
869
|
-
const userFilters = { ...effectiveSettings.filters };
|
|
870
|
-
if (options.filters) {
|
|
871
|
-
Object.assign(userFilters, options.filters);
|
|
872
|
-
}
|
|
873
|
-
userFilters.collection_ids = { $overlap: validCollectionIds };
|
|
874
|
-
effectiveSettings.filters = userFilters;
|
|
875
745
|
const data = {
|
|
876
746
|
query: options.query,
|
|
877
|
-
|
|
878
|
-
search_settings: effectiveSettings
|
|
747
|
+
limit: options.limit ?? 10
|
|
879
748
|
};
|
|
880
|
-
|
|
749
|
+
if (options.collection_ids) {
|
|
750
|
+
const collectionIds = Array.isArray(options.collection_ids) ? options.collection_ids : [options.collection_ids];
|
|
751
|
+
const validCollectionIds = collectionIds.filter((id) => id && id.trim() !== "");
|
|
752
|
+
if (validCollectionIds.length) {
|
|
753
|
+
data.collection_ids = validCollectionIds;
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
if (options.filters) {
|
|
757
|
+
data.filters = options.filters;
|
|
758
|
+
}
|
|
759
|
+
if (options.searchSettings) {
|
|
760
|
+
data.search_settings = options.searchSettings;
|
|
761
|
+
}
|
|
762
|
+
const response = await this._makeRequest("POST", "/v1/memories/search", data);
|
|
881
763
|
const memoryRecall = response.results || {
|
|
882
764
|
query: options.query,
|
|
883
765
|
entities: [],
|
|
@@ -889,31 +771,6 @@ var Nebula = class {
|
|
|
889
771
|
};
|
|
890
772
|
return memoryRecall;
|
|
891
773
|
}
|
|
892
|
-
/**
|
|
893
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
894
|
-
*/
|
|
895
|
-
async storeConversation(userMessage, assistantMessage, collectionId, sessionId) {
|
|
896
|
-
const content = `User: ${String(userMessage || "")}
|
|
897
|
-
Assistant: ${String(assistantMessage || "")}`;
|
|
898
|
-
const metadata = { session_id: sessionId, content_type: "conversation" };
|
|
899
|
-
return this.store(content, collectionId, metadata);
|
|
900
|
-
}
|
|
901
|
-
/**
|
|
902
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
903
|
-
* Now returns MemoryRecall with hierarchical memory structure
|
|
904
|
-
*/
|
|
905
|
-
async searchConversations(query, collectionId, sessionId, includeAllSessions = true) {
|
|
906
|
-
const filters = { "metadata.content_type": "conversation" };
|
|
907
|
-
if (sessionId && !includeAllSessions) {
|
|
908
|
-
filters["metadata.session_id"] = sessionId;
|
|
909
|
-
}
|
|
910
|
-
return this.search({
|
|
911
|
-
query,
|
|
912
|
-
collection_ids: [collectionId],
|
|
913
|
-
limit: 10,
|
|
914
|
-
filters
|
|
915
|
-
});
|
|
916
|
-
}
|
|
917
774
|
// Health Check
|
|
918
775
|
async healthCheck() {
|
|
919
776
|
return this._makeRequest("GET", "/v1/health");
|