@nebula-ai/sdk 1.1.4 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -7
- package/dist/index.d.mts +53 -63
- package/dist/index.d.ts +53 -63
- package/dist/index.js +22 -169
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +22 -169
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -137,23 +137,27 @@ const result = await client.delete(['id1', 'id2', 'id3']); // Returns detailed r
|
|
|
137
137
|
const conversationId = await client.storeMemory({
|
|
138
138
|
collection_id: collection.id,
|
|
139
139
|
content: 'What is machine learning?',
|
|
140
|
-
role: 'user'
|
|
140
|
+
role: 'user',
|
|
141
|
+
metadata: { content_type: 'conversation' }
|
|
141
142
|
});
|
|
142
143
|
|
|
143
144
|
await client.storeMemory({
|
|
144
145
|
collection_id: collection.id,
|
|
145
146
|
content: 'Machine learning is a subset of AI...',
|
|
146
147
|
role: 'assistant',
|
|
147
|
-
parent_id: conversationId
|
|
148
|
+
parent_id: conversationId,
|
|
149
|
+
metadata: { content_type: 'conversation' }
|
|
148
150
|
});
|
|
149
151
|
|
|
150
|
-
// List
|
|
151
|
-
const conversations = await client.
|
|
152
|
-
collection_ids: [collection.id]
|
|
152
|
+
// List conversation memories
|
|
153
|
+
const conversations = await client.listMemories({
|
|
154
|
+
collection_ids: [collection.id],
|
|
155
|
+
metadata_filters: { 'metadata.content_type': { $eq: 'conversation' } }
|
|
153
156
|
});
|
|
154
157
|
|
|
155
|
-
// Get messages
|
|
156
|
-
const
|
|
158
|
+
// Get messages from a conversation memory
|
|
159
|
+
const conversation = await client.getMemory(conversationId);
|
|
160
|
+
const messages = conversation.chunks ?? [];
|
|
157
161
|
```
|
|
158
162
|
|
|
159
163
|
## Error Handling
|
package/dist/index.d.mts
CHANGED
|
@@ -100,6 +100,56 @@ interface SearchOptions {
|
|
|
100
100
|
filters?: Record<string, any>;
|
|
101
101
|
search_mode?: 'fast' | 'super';
|
|
102
102
|
}
|
|
103
|
+
interface RecallFocus {
|
|
104
|
+
schema_weight: number;
|
|
105
|
+
fact_weight: number;
|
|
106
|
+
episodic_weight: number;
|
|
107
|
+
}
|
|
108
|
+
interface ActivatedEntity {
|
|
109
|
+
entity_id: string;
|
|
110
|
+
entity_name: string;
|
|
111
|
+
entity_category?: string;
|
|
112
|
+
activation_score: number;
|
|
113
|
+
activation_reason?: string;
|
|
114
|
+
traversal_depth: number;
|
|
115
|
+
profile?: Record<string, any>;
|
|
116
|
+
}
|
|
117
|
+
interface ActivatedFact {
|
|
118
|
+
fact_id: string;
|
|
119
|
+
entity_id?: string;
|
|
120
|
+
entity_name?: string;
|
|
121
|
+
facet_name?: string;
|
|
122
|
+
subject: string;
|
|
123
|
+
predicate: string;
|
|
124
|
+
object_value: string;
|
|
125
|
+
activation_score: number;
|
|
126
|
+
extraction_confidence: number;
|
|
127
|
+
corroboration_count: number;
|
|
128
|
+
source_chunk_ids: string[];
|
|
129
|
+
}
|
|
130
|
+
interface GroundedUtterance {
|
|
131
|
+
chunk_id: string;
|
|
132
|
+
text: string;
|
|
133
|
+
activation_score: number;
|
|
134
|
+
speaker_name?: string;
|
|
135
|
+
source_role?: string;
|
|
136
|
+
timestamp?: string;
|
|
137
|
+
display_name?: string;
|
|
138
|
+
supporting_fact_ids: string[];
|
|
139
|
+
metadata?: Record<string, any>;
|
|
140
|
+
}
|
|
141
|
+
interface MemoryRecall {
|
|
142
|
+
query: string;
|
|
143
|
+
entities: ActivatedEntity[];
|
|
144
|
+
facts: ActivatedFact[];
|
|
145
|
+
utterances: GroundedUtterance[];
|
|
146
|
+
focus?: RecallFocus;
|
|
147
|
+
fact_to_chunks: Record<string, string[]>;
|
|
148
|
+
entity_to_facts: Record<string, string[]>;
|
|
149
|
+
retrieved_at: string;
|
|
150
|
+
total_traversal_time_ms?: number;
|
|
151
|
+
query_intent?: string;
|
|
152
|
+
}
|
|
103
153
|
interface NebulaClientConfig {
|
|
104
154
|
apiKey: string;
|
|
105
155
|
baseUrl?: string;
|
|
@@ -166,55 +216,6 @@ declare class Nebula {
|
|
|
166
216
|
limit?: number;
|
|
167
217
|
offset?: number;
|
|
168
218
|
}): Promise<Collection[]>;
|
|
169
|
-
/**
|
|
170
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
171
|
-
*
|
|
172
|
-
* @param options - Configuration for listing conversations
|
|
173
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
174
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
175
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
176
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
177
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
178
|
-
*
|
|
179
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
180
|
-
*
|
|
181
|
-
* @example
|
|
182
|
-
* // Get all playground conversations
|
|
183
|
-
* const conversations = await client.listConversations({
|
|
184
|
-
* collection_ids: ['collection-id'],
|
|
185
|
-
* metadata_filters: {
|
|
186
|
-
* 'metadata.playground': { $eq: true }
|
|
187
|
-
* }
|
|
188
|
-
* });
|
|
189
|
-
*
|
|
190
|
-
* @example
|
|
191
|
-
* // Filter by session ID
|
|
192
|
-
* const conversations = await client.listConversations({
|
|
193
|
-
* metadata_filters: {
|
|
194
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
195
|
-
* }
|
|
196
|
-
* });
|
|
197
|
-
*/
|
|
198
|
-
listConversations(options?: {
|
|
199
|
-
limit?: number;
|
|
200
|
-
offset?: number;
|
|
201
|
-
collection_ids?: string[];
|
|
202
|
-
metadata_filters?: Record<string, any>;
|
|
203
|
-
}): Promise<any[]>;
|
|
204
|
-
/**
|
|
205
|
-
* Get conversation messages from the engrams API.
|
|
206
|
-
*
|
|
207
|
-
* This method retrieves conversation engrams and parses their chunks into structured messages.
|
|
208
|
-
* Expects conversation engrams to contain structured chunks with role metadata:
|
|
209
|
-
* `{text: string, role: 'user'|'assistant'|'system'}`.
|
|
210
|
-
* Converts chunks to `MemoryResponse` objects with proper role metadata.
|
|
211
|
-
*
|
|
212
|
-
* @param conversationId - Single conversation ID (returns array of messages)
|
|
213
|
-
* @param conversationIds - Multiple conversation IDs (returns map of conversation_id -> messages)
|
|
214
|
-
* @returns Messages for the requested conversation(s)
|
|
215
|
-
*/
|
|
216
|
-
getConversationMessages(conversationId: string): Promise<MemoryResponse[]>;
|
|
217
|
-
getConversationMessages(conversationIds: string[]): Promise<Record<string, MemoryResponse[]>>;
|
|
218
219
|
/** Update a collection */
|
|
219
220
|
updateCollection(options: {
|
|
220
221
|
collectionId: string;
|
|
@@ -293,8 +294,6 @@ declare class Nebula {
|
|
|
293
294
|
collectionIds?: string[];
|
|
294
295
|
mergeMetadata?: boolean;
|
|
295
296
|
}): Promise<boolean>;
|
|
296
|
-
/** Delete a conversation and all its messages */
|
|
297
|
-
deleteConversation(conversationId: string): Promise<boolean>;
|
|
298
297
|
/**
|
|
299
298
|
* Get all memories from specific collections with optional metadata filtering
|
|
300
299
|
*
|
|
@@ -434,20 +433,11 @@ declare class Nebula {
|
|
|
434
433
|
*/
|
|
435
434
|
search(options: {
|
|
436
435
|
query: string;
|
|
437
|
-
collection_ids
|
|
436
|
+
collection_ids?: string | string[];
|
|
438
437
|
limit?: number;
|
|
439
438
|
filters?: Record<string, any>;
|
|
440
|
-
search_mode?: 'fast' | 'super';
|
|
441
439
|
searchSettings?: Record<string, any>;
|
|
442
|
-
}): Promise<
|
|
443
|
-
/**
|
|
444
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
445
|
-
*/
|
|
446
|
-
storeConversation(userMessage: string, assistantMessage: string, collectionId: string, sessionId: string): Promise<MemoryResponse>;
|
|
447
|
-
/**
|
|
448
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
449
|
-
*/
|
|
450
|
-
searchConversations(query: string, collectionId: string, sessionId?: string, includeAllSessions?: boolean): Promise<SearchResult[]>;
|
|
440
|
+
}): Promise<MemoryRecall>;
|
|
451
441
|
healthCheck(): Promise<Record<string, any>>;
|
|
452
442
|
private _collectionFromDict;
|
|
453
443
|
private _memoryResponseFromDict;
|
|
@@ -457,4 +447,4 @@ declare class Nebula {
|
|
|
457
447
|
private _formDataFromObject;
|
|
458
448
|
}
|
|
459
449
|
|
|
460
|
-
export { type AgentResponse, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type Memory, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
|
450
|
+
export { type ActivatedEntity, type ActivatedFact, type AgentResponse, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type GroundedUtterance, type Memory, type MemoryRecall, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type RecallFocus, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
package/dist/index.d.ts
CHANGED
|
@@ -100,6 +100,56 @@ interface SearchOptions {
|
|
|
100
100
|
filters?: Record<string, any>;
|
|
101
101
|
search_mode?: 'fast' | 'super';
|
|
102
102
|
}
|
|
103
|
+
interface RecallFocus {
|
|
104
|
+
schema_weight: number;
|
|
105
|
+
fact_weight: number;
|
|
106
|
+
episodic_weight: number;
|
|
107
|
+
}
|
|
108
|
+
interface ActivatedEntity {
|
|
109
|
+
entity_id: string;
|
|
110
|
+
entity_name: string;
|
|
111
|
+
entity_category?: string;
|
|
112
|
+
activation_score: number;
|
|
113
|
+
activation_reason?: string;
|
|
114
|
+
traversal_depth: number;
|
|
115
|
+
profile?: Record<string, any>;
|
|
116
|
+
}
|
|
117
|
+
interface ActivatedFact {
|
|
118
|
+
fact_id: string;
|
|
119
|
+
entity_id?: string;
|
|
120
|
+
entity_name?: string;
|
|
121
|
+
facet_name?: string;
|
|
122
|
+
subject: string;
|
|
123
|
+
predicate: string;
|
|
124
|
+
object_value: string;
|
|
125
|
+
activation_score: number;
|
|
126
|
+
extraction_confidence: number;
|
|
127
|
+
corroboration_count: number;
|
|
128
|
+
source_chunk_ids: string[];
|
|
129
|
+
}
|
|
130
|
+
interface GroundedUtterance {
|
|
131
|
+
chunk_id: string;
|
|
132
|
+
text: string;
|
|
133
|
+
activation_score: number;
|
|
134
|
+
speaker_name?: string;
|
|
135
|
+
source_role?: string;
|
|
136
|
+
timestamp?: string;
|
|
137
|
+
display_name?: string;
|
|
138
|
+
supporting_fact_ids: string[];
|
|
139
|
+
metadata?: Record<string, any>;
|
|
140
|
+
}
|
|
141
|
+
interface MemoryRecall {
|
|
142
|
+
query: string;
|
|
143
|
+
entities: ActivatedEntity[];
|
|
144
|
+
facts: ActivatedFact[];
|
|
145
|
+
utterances: GroundedUtterance[];
|
|
146
|
+
focus?: RecallFocus;
|
|
147
|
+
fact_to_chunks: Record<string, string[]>;
|
|
148
|
+
entity_to_facts: Record<string, string[]>;
|
|
149
|
+
retrieved_at: string;
|
|
150
|
+
total_traversal_time_ms?: number;
|
|
151
|
+
query_intent?: string;
|
|
152
|
+
}
|
|
103
153
|
interface NebulaClientConfig {
|
|
104
154
|
apiKey: string;
|
|
105
155
|
baseUrl?: string;
|
|
@@ -166,55 +216,6 @@ declare class Nebula {
|
|
|
166
216
|
limit?: number;
|
|
167
217
|
offset?: number;
|
|
168
218
|
}): Promise<Collection[]>;
|
|
169
|
-
/**
|
|
170
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
171
|
-
*
|
|
172
|
-
* @param options - Configuration for listing conversations
|
|
173
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
174
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
175
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
176
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
177
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
178
|
-
*
|
|
179
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
180
|
-
*
|
|
181
|
-
* @example
|
|
182
|
-
* // Get all playground conversations
|
|
183
|
-
* const conversations = await client.listConversations({
|
|
184
|
-
* collection_ids: ['collection-id'],
|
|
185
|
-
* metadata_filters: {
|
|
186
|
-
* 'metadata.playground': { $eq: true }
|
|
187
|
-
* }
|
|
188
|
-
* });
|
|
189
|
-
*
|
|
190
|
-
* @example
|
|
191
|
-
* // Filter by session ID
|
|
192
|
-
* const conversations = await client.listConversations({
|
|
193
|
-
* metadata_filters: {
|
|
194
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
195
|
-
* }
|
|
196
|
-
* });
|
|
197
|
-
*/
|
|
198
|
-
listConversations(options?: {
|
|
199
|
-
limit?: number;
|
|
200
|
-
offset?: number;
|
|
201
|
-
collection_ids?: string[];
|
|
202
|
-
metadata_filters?: Record<string, any>;
|
|
203
|
-
}): Promise<any[]>;
|
|
204
|
-
/**
|
|
205
|
-
* Get conversation messages from the engrams API.
|
|
206
|
-
*
|
|
207
|
-
* This method retrieves conversation engrams and parses their chunks into structured messages.
|
|
208
|
-
* Expects conversation engrams to contain structured chunks with role metadata:
|
|
209
|
-
* `{text: string, role: 'user'|'assistant'|'system'}`.
|
|
210
|
-
* Converts chunks to `MemoryResponse` objects with proper role metadata.
|
|
211
|
-
*
|
|
212
|
-
* @param conversationId - Single conversation ID (returns array of messages)
|
|
213
|
-
* @param conversationIds - Multiple conversation IDs (returns map of conversation_id -> messages)
|
|
214
|
-
* @returns Messages for the requested conversation(s)
|
|
215
|
-
*/
|
|
216
|
-
getConversationMessages(conversationId: string): Promise<MemoryResponse[]>;
|
|
217
|
-
getConversationMessages(conversationIds: string[]): Promise<Record<string, MemoryResponse[]>>;
|
|
218
219
|
/** Update a collection */
|
|
219
220
|
updateCollection(options: {
|
|
220
221
|
collectionId: string;
|
|
@@ -293,8 +294,6 @@ declare class Nebula {
|
|
|
293
294
|
collectionIds?: string[];
|
|
294
295
|
mergeMetadata?: boolean;
|
|
295
296
|
}): Promise<boolean>;
|
|
296
|
-
/** Delete a conversation and all its messages */
|
|
297
|
-
deleteConversation(conversationId: string): Promise<boolean>;
|
|
298
297
|
/**
|
|
299
298
|
* Get all memories from specific collections with optional metadata filtering
|
|
300
299
|
*
|
|
@@ -434,20 +433,11 @@ declare class Nebula {
|
|
|
434
433
|
*/
|
|
435
434
|
search(options: {
|
|
436
435
|
query: string;
|
|
437
|
-
collection_ids
|
|
436
|
+
collection_ids?: string | string[];
|
|
438
437
|
limit?: number;
|
|
439
438
|
filters?: Record<string, any>;
|
|
440
|
-
search_mode?: 'fast' | 'super';
|
|
441
439
|
searchSettings?: Record<string, any>;
|
|
442
|
-
}): Promise<
|
|
443
|
-
/**
|
|
444
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
445
|
-
*/
|
|
446
|
-
storeConversation(userMessage: string, assistantMessage: string, collectionId: string, sessionId: string): Promise<MemoryResponse>;
|
|
447
|
-
/**
|
|
448
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
449
|
-
*/
|
|
450
|
-
searchConversations(query: string, collectionId: string, sessionId?: string, includeAllSessions?: boolean): Promise<SearchResult[]>;
|
|
440
|
+
}): Promise<MemoryRecall>;
|
|
451
441
|
healthCheck(): Promise<Record<string, any>>;
|
|
452
442
|
private _collectionFromDict;
|
|
453
443
|
private _memoryResponseFromDict;
|
|
@@ -457,4 +447,4 @@ declare class Nebula {
|
|
|
457
447
|
private _formDataFromObject;
|
|
458
448
|
}
|
|
459
449
|
|
|
460
|
-
export { type AgentResponse, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type Memory, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
|
450
|
+
export { type ActivatedEntity, type ActivatedFact, type AgentResponse, type Chunk, type Collection, type GraphCommunityResult, type GraphEntityResult, type GraphRelationshipResult, GraphSearchResultType, type GroundedUtterance, type Memory, type MemoryRecall, type MemoryResponse, Nebula, NebulaAuthenticationException, type NebulaClientConfig, NebulaClientException, NebulaCollectionNotFoundException, NebulaException, NebulaNotFoundException, NebulaRateLimitException, NebulaValidationException, type RecallFocus, type SearchOptions, type SearchResult, type StructuredChunk, Nebula as default };
|
package/dist/index.js
CHANGED
|
@@ -208,110 +208,6 @@ var Nebula = class {
|
|
|
208
208
|
}
|
|
209
209
|
return collections.map((collection) => this._collectionFromDict(collection));
|
|
210
210
|
}
|
|
211
|
-
// Conversations Methods
|
|
212
|
-
/**
|
|
213
|
-
* List conversations for the authenticated user with optional metadata filtering
|
|
214
|
-
*
|
|
215
|
-
* @param options - Configuration for listing conversations
|
|
216
|
-
* @param options.limit - Maximum number of conversations to return (default: 100)
|
|
217
|
-
* @param options.offset - Number of conversations to skip for pagination (default: 0)
|
|
218
|
-
* @param options.collection_ids - Optional list of collection IDs to filter conversations by
|
|
219
|
-
* @param options.metadata_filters - Optional metadata filters using MongoDB-like operators.
|
|
220
|
-
* Supported operators: $eq, $ne, $in, $nin, $exists, $and, $or
|
|
221
|
-
*
|
|
222
|
-
* @returns Promise resolving to array of conversation objects with fields: id, created_at, user_id, name, collection_ids
|
|
223
|
-
*
|
|
224
|
-
* @example
|
|
225
|
-
* // Get all playground conversations
|
|
226
|
-
* const conversations = await client.listConversations({
|
|
227
|
-
* collection_ids: ['collection-id'],
|
|
228
|
-
* metadata_filters: {
|
|
229
|
-
* 'metadata.playground': { $eq: true }
|
|
230
|
-
* }
|
|
231
|
-
* });
|
|
232
|
-
*
|
|
233
|
-
* @example
|
|
234
|
-
* // Filter by session ID
|
|
235
|
-
* const conversations = await client.listConversations({
|
|
236
|
-
* metadata_filters: {
|
|
237
|
-
* 'metadata.session_id': { $eq: 'session-123' }
|
|
238
|
-
* }
|
|
239
|
-
* });
|
|
240
|
-
*/
|
|
241
|
-
async listConversations(options) {
|
|
242
|
-
const params = {
|
|
243
|
-
limit: options?.limit ?? 100,
|
|
244
|
-
offset: options?.offset ?? 0
|
|
245
|
-
};
|
|
246
|
-
if (options?.collection_ids && options.collection_ids.length > 0) {
|
|
247
|
-
params.collection_ids = options.collection_ids;
|
|
248
|
-
}
|
|
249
|
-
if (options?.metadata_filters) {
|
|
250
|
-
params.metadata_filters = JSON.stringify(options.metadata_filters);
|
|
251
|
-
}
|
|
252
|
-
const response = await this._makeRequest("GET", "/v1/memories", void 0, params);
|
|
253
|
-
let conversations;
|
|
254
|
-
if (response && response.results) {
|
|
255
|
-
conversations = response.results;
|
|
256
|
-
} else if (Array.isArray(response)) {
|
|
257
|
-
conversations = response;
|
|
258
|
-
} else {
|
|
259
|
-
conversations = response ? [response] : [];
|
|
260
|
-
}
|
|
261
|
-
return conversations;
|
|
262
|
-
}
|
|
263
|
-
async getConversationMessages(conversationIdOrIds) {
|
|
264
|
-
if (typeof conversationIdOrIds === "string") {
|
|
265
|
-
const batchResults = await this.getConversationMessages([conversationIdOrIds]);
|
|
266
|
-
return batchResults[conversationIdOrIds] || [];
|
|
267
|
-
}
|
|
268
|
-
if (!Array.isArray(conversationIdOrIds) || conversationIdOrIds.length === 0) {
|
|
269
|
-
return {};
|
|
270
|
-
}
|
|
271
|
-
const params = { ids: conversationIdOrIds };
|
|
272
|
-
const response = await this._makeRequest("GET", "/v1/memories", void 0, params);
|
|
273
|
-
const results = {};
|
|
274
|
-
if (response && response.results && Array.isArray(response.results)) {
|
|
275
|
-
for (const doc of response.results) {
|
|
276
|
-
const conversationId = doc.id;
|
|
277
|
-
if (!conversationId) {
|
|
278
|
-
continue;
|
|
279
|
-
}
|
|
280
|
-
if (Array.isArray(doc.chunks) && doc.chunks.length > 0) {
|
|
281
|
-
const messages = [];
|
|
282
|
-
for (let i = 0; i < doc.chunks.length; i++) {
|
|
283
|
-
const structuredChunk = doc.chunks[i];
|
|
284
|
-
if (!structuredChunk || typeof structuredChunk.text !== "string" || structuredChunk.text.length === 0) {
|
|
285
|
-
continue;
|
|
286
|
-
}
|
|
287
|
-
const text = structuredChunk.text;
|
|
288
|
-
const role = structuredChunk.role ?? "user";
|
|
289
|
-
messages.push({
|
|
290
|
-
id: `${doc.id}-${i}`,
|
|
291
|
-
content: text,
|
|
292
|
-
metadata: {
|
|
293
|
-
...doc.metadata,
|
|
294
|
-
// Copy engram metadata (playground, session_id, etc.)
|
|
295
|
-
role
|
|
296
|
-
// Add/override role for this specific message
|
|
297
|
-
},
|
|
298
|
-
created_at: doc.created_at,
|
|
299
|
-
collection_ids: doc.collection_ids || []
|
|
300
|
-
});
|
|
301
|
-
}
|
|
302
|
-
results[conversationId] = messages;
|
|
303
|
-
} else {
|
|
304
|
-
results[conversationId] = [];
|
|
305
|
-
}
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
for (const conversationId of conversationIdOrIds) {
|
|
309
|
-
if (!(conversationId in results)) {
|
|
310
|
-
results[conversationId] = [];
|
|
311
|
-
}
|
|
312
|
-
}
|
|
313
|
-
return results;
|
|
314
|
-
}
|
|
315
211
|
/** Update a collection */
|
|
316
212
|
async updateCollection(options) {
|
|
317
213
|
const data = {};
|
|
@@ -674,18 +570,6 @@ var Nebula = class {
|
|
|
674
570
|
throw error;
|
|
675
571
|
}
|
|
676
572
|
}
|
|
677
|
-
/** Delete a conversation and all its messages */
|
|
678
|
-
async deleteConversation(conversationId) {
|
|
679
|
-
try {
|
|
680
|
-
await this._makeRequest("DELETE", `/v1/memories/${conversationId}`);
|
|
681
|
-
return true;
|
|
682
|
-
} catch (error) {
|
|
683
|
-
if (error instanceof Error) {
|
|
684
|
-
throw error;
|
|
685
|
-
}
|
|
686
|
-
throw new NebulaClientException(`Unknown error: ${String(error)}`);
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
573
|
/**
|
|
690
574
|
* Get all memories from specific collections with optional metadata filtering
|
|
691
575
|
*
|
|
@@ -855,65 +739,34 @@ var Nebula = class {
|
|
|
855
739
|
* https://docs.nebulacloud.app/guides/metadata-filtering
|
|
856
740
|
*/
|
|
857
741
|
async search(options) {
|
|
858
|
-
const collectionIds = Array.isArray(options.collection_ids) ? options.collection_ids : [options.collection_ids];
|
|
859
|
-
const validCollectionIds = collectionIds.filter((id) => id && id.trim() !== "");
|
|
860
|
-
if (!validCollectionIds.length) {
|
|
861
|
-
throw new NebulaClientException("collection_ids must be provided to search().");
|
|
862
|
-
}
|
|
863
|
-
const limit = options.limit ?? 10;
|
|
864
|
-
const searchMode = options.search_mode ?? "super";
|
|
865
|
-
const effectiveSettings = {
|
|
866
|
-
...options.searchSettings
|
|
867
|
-
};
|
|
868
|
-
effectiveSettings.limit = limit;
|
|
869
|
-
const userFilters = { ...effectiveSettings.filters };
|
|
870
|
-
if (options.filters) {
|
|
871
|
-
Object.assign(userFilters, options.filters);
|
|
872
|
-
}
|
|
873
|
-
userFilters.collection_ids = { $overlap: validCollectionIds };
|
|
874
|
-
effectiveSettings.filters = userFilters;
|
|
875
742
|
const data = {
|
|
876
743
|
query: options.query,
|
|
877
|
-
|
|
878
|
-
search_settings: effectiveSettings
|
|
744
|
+
limit: options.limit ?? 10
|
|
879
745
|
};
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
746
|
+
if (options.collection_ids) {
|
|
747
|
+
const collectionIds = Array.isArray(options.collection_ids) ? options.collection_ids : [options.collection_ids];
|
|
748
|
+
const validCollectionIds = collectionIds.filter((id) => id && id.trim() !== "");
|
|
749
|
+
if (validCollectionIds.length) {
|
|
750
|
+
data.collection_ids = validCollectionIds;
|
|
751
|
+
}
|
|
886
752
|
}
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
for (const g of graphResults) {
|
|
890
|
-
out.push(this._searchResultFromGraphDict(g));
|
|
753
|
+
if (options.filters) {
|
|
754
|
+
data.filters = options.filters;
|
|
891
755
|
}
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
/**
|
|
895
|
-
* Legacy wrapper: store a two-message conversation turn as a document
|
|
896
|
-
*/
|
|
897
|
-
async storeConversation(userMessage, assistantMessage, collectionId, sessionId) {
|
|
898
|
-
const content = `User: ${String(userMessage || "")}
|
|
899
|
-
Assistant: ${String(assistantMessage || "")}`;
|
|
900
|
-
const metadata = { session_id: sessionId, content_type: "conversation" };
|
|
901
|
-
return this.store(content, collectionId, metadata);
|
|
902
|
-
}
|
|
903
|
-
/**
|
|
904
|
-
* Legacy wrapper: search conversations optionally scoped by session
|
|
905
|
-
*/
|
|
906
|
-
async searchConversations(query, collectionId, sessionId, includeAllSessions = true) {
|
|
907
|
-
const filters = { "metadata.content_type": "conversation" };
|
|
908
|
-
if (sessionId && !includeAllSessions) {
|
|
909
|
-
filters["metadata.session_id"] = sessionId;
|
|
756
|
+
if (options.searchSettings) {
|
|
757
|
+
data.search_settings = options.searchSettings;
|
|
910
758
|
}
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
759
|
+
const response = await this._makeRequest("POST", "/v1/retrieval/search", data);
|
|
760
|
+
const memoryRecall = response.results || {
|
|
761
|
+
query: options.query,
|
|
762
|
+
entities: [],
|
|
763
|
+
facts: [],
|
|
764
|
+
utterances: [],
|
|
765
|
+
fact_to_chunks: {},
|
|
766
|
+
entity_to_facts: {},
|
|
767
|
+
retrieved_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
768
|
+
};
|
|
769
|
+
return memoryRecall;
|
|
917
770
|
}
|
|
918
771
|
// Health Check
|
|
919
772
|
async healthCheck() {
|