@robosystems/client 0.1.17 → 0.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/create-feature +91 -0
- package/extensions/CopyClient.d.ts +97 -0
- package/extensions/CopyClient.js +287 -0
- package/extensions/CopyClient.ts +438 -0
- package/extensions/hooks.d.ts +35 -0
- package/extensions/hooks.js +123 -0
- package/extensions/hooks.ts +139 -0
- package/extensions/index.d.ts +7 -2
- package/extensions/index.js +15 -1
- package/extensions/index.ts +23 -1
- package/package.json +7 -1
- package/sdk/sdk.gen.d.ts +36 -1
- package/sdk/sdk.gen.js +36 -1
- package/sdk/sdk.gen.ts +36 -1
- package/sdk/types.gen.d.ts +45 -4
- package/sdk/types.gen.ts +45 -4
- package/sdk-extensions/CopyClient.d.ts +97 -0
- package/sdk-extensions/CopyClient.js +287 -0
- package/sdk-extensions/CopyClient.ts +438 -0
- package/sdk-extensions/README.md +219 -0
- package/sdk-extensions/hooks.d.ts +35 -0
- package/sdk-extensions/hooks.js +123 -0
- package/sdk-extensions/hooks.ts +139 -0
- package/sdk-extensions/index.d.ts +7 -2
- package/sdk-extensions/index.js +15 -1
- package/sdk-extensions/index.ts +23 -1
- package/sdk.gen.d.ts +36 -1
- package/sdk.gen.js +36 -1
- package/sdk.gen.ts +36 -1
- package/types.gen.d.ts +45 -4
- package/types.gen.ts +45 -4
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
'use client'
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Enhanced Copy Client with SSE support
|
|
5
|
+
* Provides intelligent data copy operations with progress monitoring
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { copyDataToGraph } from '../sdk/sdk.gen'
|
|
9
|
+
import type {
|
|
10
|
+
CopyDataToGraphData,
|
|
11
|
+
CopyResponse,
|
|
12
|
+
DataFrameCopyRequest,
|
|
13
|
+
S3CopyRequest,
|
|
14
|
+
UrlCopyRequest,
|
|
15
|
+
} from '../sdk/types.gen'
|
|
16
|
+
import { EventType, SSEClient } from './SSEClient'
|
|
17
|
+
|
|
18
|
+
export type CopySourceType = 's3' | 'url' | 'dataframe'
|
|
19
|
+
|
|
20
|
+
export interface CopyOptions {
|
|
21
|
+
onProgress?: (message: string, progressPercent?: number) => void
|
|
22
|
+
onQueueUpdate?: (position: number, estimatedWait: number) => void
|
|
23
|
+
onWarning?: (warning: string) => void
|
|
24
|
+
timeout?: number
|
|
25
|
+
testMode?: boolean
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface CopyResult {
|
|
29
|
+
status: 'completed' | 'failed' | 'partial' | 'accepted'
|
|
30
|
+
rowsImported?: number
|
|
31
|
+
rowsSkipped?: number
|
|
32
|
+
bytesProcessed?: number
|
|
33
|
+
executionTimeMs?: number
|
|
34
|
+
warnings?: string[]
|
|
35
|
+
error?: string
|
|
36
|
+
operationId?: string
|
|
37
|
+
sseUrl?: string
|
|
38
|
+
message?: string
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export interface CopyStatistics {
|
|
42
|
+
totalRows: number
|
|
43
|
+
importedRows: number
|
|
44
|
+
skippedRows: number
|
|
45
|
+
bytesProcessed: number
|
|
46
|
+
duration: number
|
|
47
|
+
throughput: number // rows per second
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export class CopyClient {
|
|
51
|
+
private sseClient?: SSEClient
|
|
52
|
+
private config: {
|
|
53
|
+
baseUrl: string
|
|
54
|
+
credentials?: 'include' | 'same-origin' | 'omit'
|
|
55
|
+
headers?: Record<string, string>
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
constructor(config: {
|
|
59
|
+
baseUrl: string
|
|
60
|
+
credentials?: 'include' | 'same-origin' | 'omit'
|
|
61
|
+
headers?: Record<string, string>
|
|
62
|
+
}) {
|
|
63
|
+
this.config = config
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Copy data from S3 to graph database
|
|
68
|
+
*/
|
|
69
|
+
async copyFromS3(
|
|
70
|
+
graphId: string,
|
|
71
|
+
request: S3CopyRequest,
|
|
72
|
+
options: CopyOptions = {}
|
|
73
|
+
): Promise<CopyResult> {
|
|
74
|
+
return this.executeCopy(graphId, request, 's3', options)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Copy data from URL to graph database (when available)
|
|
79
|
+
*/
|
|
80
|
+
async copyFromUrl(
|
|
81
|
+
graphId: string,
|
|
82
|
+
request: UrlCopyRequest,
|
|
83
|
+
options: CopyOptions = {}
|
|
84
|
+
): Promise<CopyResult> {
|
|
85
|
+
return this.executeCopy(graphId, request, 'url', options)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Copy data from DataFrame to graph database (when available)
|
|
90
|
+
*/
|
|
91
|
+
async copyFromDataFrame(
|
|
92
|
+
graphId: string,
|
|
93
|
+
request: DataFrameCopyRequest,
|
|
94
|
+
options: CopyOptions = {}
|
|
95
|
+
): Promise<CopyResult> {
|
|
96
|
+
return this.executeCopy(graphId, request, 'dataframe', options)
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Execute copy operation with automatic SSE monitoring for long-running operations
|
|
101
|
+
*/
|
|
102
|
+
private async executeCopy(
|
|
103
|
+
graphId: string,
|
|
104
|
+
request: S3CopyRequest | UrlCopyRequest | DataFrameCopyRequest,
|
|
105
|
+
_sourceType: CopySourceType,
|
|
106
|
+
options: CopyOptions = {}
|
|
107
|
+
): Promise<CopyResult> {
|
|
108
|
+
const startTime = Date.now()
|
|
109
|
+
|
|
110
|
+
const data: CopyDataToGraphData = {
|
|
111
|
+
url: '/v1/{graph_id}/copy' as const,
|
|
112
|
+
path: { graph_id: graphId },
|
|
113
|
+
body: request,
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
// Execute the copy request
|
|
118
|
+
const response = await copyDataToGraph(data)
|
|
119
|
+
const responseData = response.data as CopyResponse
|
|
120
|
+
|
|
121
|
+
// Check if this is an accepted (async) operation
|
|
122
|
+
if (responseData.status === 'accepted' && responseData.operation_id) {
|
|
123
|
+
// This is a long-running operation with SSE monitoring
|
|
124
|
+
options.onProgress?.(`Copy operation started. Monitoring progress...`)
|
|
125
|
+
|
|
126
|
+
// If SSE URL is provided, use it for monitoring
|
|
127
|
+
if (responseData.sse_url) {
|
|
128
|
+
return this.monitorCopyOperation(responseData.operation_id, options, startTime)
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Otherwise return the accepted response
|
|
132
|
+
return {
|
|
133
|
+
status: 'accepted',
|
|
134
|
+
operationId: responseData.operation_id,
|
|
135
|
+
sseUrl: responseData.sse_url,
|
|
136
|
+
message: responseData.message,
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// This is a synchronous response - operation completed immediately
|
|
141
|
+
return this.buildCopyResult(responseData, Date.now() - startTime)
|
|
142
|
+
} catch (error) {
|
|
143
|
+
return {
|
|
144
|
+
status: 'failed',
|
|
145
|
+
error: error instanceof Error ? error.message : String(error),
|
|
146
|
+
executionTimeMs: Date.now() - startTime,
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Monitor a copy operation using SSE
|
|
153
|
+
*/
|
|
154
|
+
private async monitorCopyOperation(
|
|
155
|
+
operationId: string,
|
|
156
|
+
options: CopyOptions,
|
|
157
|
+
startTime: number
|
|
158
|
+
): Promise<CopyResult> {
|
|
159
|
+
return new Promise((resolve, reject) => {
|
|
160
|
+
const sseClient = new SSEClient(this.config)
|
|
161
|
+
const timeoutMs = options.timeout || 3600000 // Default 1 hour for copy operations
|
|
162
|
+
|
|
163
|
+
const timeoutHandle = setTimeout(() => {
|
|
164
|
+
sseClient.close()
|
|
165
|
+
reject(new Error(`Copy operation timeout after ${timeoutMs}ms`))
|
|
166
|
+
}, timeoutMs)
|
|
167
|
+
|
|
168
|
+
sseClient
|
|
169
|
+
.connect(operationId)
|
|
170
|
+
.then(() => {
|
|
171
|
+
let result: CopyResult = { status: 'failed' }
|
|
172
|
+
const warnings: string[] = []
|
|
173
|
+
|
|
174
|
+
// Listen for queue updates
|
|
175
|
+
sseClient.on(EventType.QUEUE_UPDATE, (data) => {
|
|
176
|
+
options.onQueueUpdate?.(
|
|
177
|
+
data.position || data.queue_position,
|
|
178
|
+
data.estimated_wait_seconds || 0
|
|
179
|
+
)
|
|
180
|
+
})
|
|
181
|
+
|
|
182
|
+
// Listen for progress updates
|
|
183
|
+
sseClient.on(EventType.OPERATION_PROGRESS, (data) => {
|
|
184
|
+
const message = data.message || data.status || 'Processing...'
|
|
185
|
+
const progressPercent = data.progress_percent || data.progress
|
|
186
|
+
|
|
187
|
+
options.onProgress?.(message, progressPercent)
|
|
188
|
+
|
|
189
|
+
// Check for warnings in progress updates
|
|
190
|
+
if (data.warnings) {
|
|
191
|
+
warnings.push(...data.warnings)
|
|
192
|
+
data.warnings.forEach((warning: string) => {
|
|
193
|
+
options.onWarning?.(warning)
|
|
194
|
+
})
|
|
195
|
+
}
|
|
196
|
+
})
|
|
197
|
+
|
|
198
|
+
// Listen for completion
|
|
199
|
+
sseClient.on(EventType.OPERATION_COMPLETED, (data) => {
|
|
200
|
+
clearTimeout(timeoutHandle)
|
|
201
|
+
|
|
202
|
+
const completionData = data.result || data
|
|
203
|
+
result = {
|
|
204
|
+
status: completionData.status || 'completed',
|
|
205
|
+
rowsImported: completionData.rows_imported,
|
|
206
|
+
rowsSkipped: completionData.rows_skipped,
|
|
207
|
+
bytesProcessed: completionData.bytes_processed,
|
|
208
|
+
executionTimeMs: Date.now() - startTime,
|
|
209
|
+
warnings: warnings.length > 0 ? warnings : completionData.warnings,
|
|
210
|
+
message: completionData.message,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
sseClient.close()
|
|
214
|
+
resolve(result)
|
|
215
|
+
})
|
|
216
|
+
|
|
217
|
+
// Listen for errors
|
|
218
|
+
sseClient.on(EventType.OPERATION_ERROR, (error) => {
|
|
219
|
+
clearTimeout(timeoutHandle)
|
|
220
|
+
|
|
221
|
+
result = {
|
|
222
|
+
status: 'failed',
|
|
223
|
+
error: error.message || error.error || 'Copy operation failed',
|
|
224
|
+
executionTimeMs: Date.now() - startTime,
|
|
225
|
+
warnings: warnings.length > 0 ? warnings : undefined,
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
sseClient.close()
|
|
229
|
+
resolve(result) // Resolve with error result, not reject
|
|
230
|
+
})
|
|
231
|
+
|
|
232
|
+
// Listen for cancellation
|
|
233
|
+
sseClient.on(EventType.OPERATION_CANCELLED, () => {
|
|
234
|
+
clearTimeout(timeoutHandle)
|
|
235
|
+
|
|
236
|
+
result = {
|
|
237
|
+
status: 'failed',
|
|
238
|
+
error: 'Copy operation cancelled',
|
|
239
|
+
executionTimeMs: Date.now() - startTime,
|
|
240
|
+
warnings: warnings.length > 0 ? warnings : undefined,
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
sseClient.close()
|
|
244
|
+
resolve(result)
|
|
245
|
+
})
|
|
246
|
+
})
|
|
247
|
+
.catch((error) => {
|
|
248
|
+
clearTimeout(timeoutHandle)
|
|
249
|
+
reject(error)
|
|
250
|
+
})
|
|
251
|
+
})
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
/**
|
|
255
|
+
* Build copy result from response data
|
|
256
|
+
*/
|
|
257
|
+
private buildCopyResult(responseData: CopyResponse, executionTimeMs: number): CopyResult {
|
|
258
|
+
return {
|
|
259
|
+
status: responseData.status,
|
|
260
|
+
rowsImported: responseData.rows_imported || undefined,
|
|
261
|
+
rowsSkipped: responseData.rows_skipped || undefined,
|
|
262
|
+
bytesProcessed: responseData.bytes_processed || undefined,
|
|
263
|
+
executionTimeMs: responseData.execution_time_ms || executionTimeMs,
|
|
264
|
+
warnings: responseData.warnings || undefined,
|
|
265
|
+
message: responseData.message,
|
|
266
|
+
error: responseData.error_details ? String(responseData.error_details) : undefined,
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
/**
|
|
271
|
+
* Calculate copy statistics from result
|
|
272
|
+
*/
|
|
273
|
+
calculateStatistics(result: CopyResult): CopyStatistics | null {
|
|
274
|
+
if (result.status === 'failed' || !result.rowsImported) {
|
|
275
|
+
return null
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
const totalRows = (result.rowsImported || 0) + (result.rowsSkipped || 0)
|
|
279
|
+
const duration = (result.executionTimeMs || 0) / 1000 // Convert to seconds
|
|
280
|
+
const throughput = duration > 0 ? (result.rowsImported || 0) / duration : 0
|
|
281
|
+
|
|
282
|
+
return {
|
|
283
|
+
totalRows,
|
|
284
|
+
importedRows: result.rowsImported || 0,
|
|
285
|
+
skippedRows: result.rowsSkipped || 0,
|
|
286
|
+
bytesProcessed: result.bytesProcessed || 0,
|
|
287
|
+
duration,
|
|
288
|
+
throughput,
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* Convenience method for simple S3 copy with default options
|
|
294
|
+
*/
|
|
295
|
+
async copyS3(
|
|
296
|
+
graphId: string,
|
|
297
|
+
tableName: string,
|
|
298
|
+
s3Uri: string,
|
|
299
|
+
accessKeyId: string,
|
|
300
|
+
secretAccessKey: string,
|
|
301
|
+
options?: {
|
|
302
|
+
region?: string
|
|
303
|
+
fileFormat?: 'csv' | 'parquet' | 'json' | 'delta' | 'iceberg'
|
|
304
|
+
ignoreErrors?: boolean
|
|
305
|
+
}
|
|
306
|
+
): Promise<CopyResult> {
|
|
307
|
+
const request: S3CopyRequest = {
|
|
308
|
+
table_name: tableName,
|
|
309
|
+
source_type: 's3',
|
|
310
|
+
s3_path: s3Uri,
|
|
311
|
+
s3_access_key_id: accessKeyId,
|
|
312
|
+
s3_secret_access_key: secretAccessKey,
|
|
313
|
+
s3_region: options?.region || 'us-east-1',
|
|
314
|
+
file_format: options?.fileFormat,
|
|
315
|
+
ignore_errors: options?.ignoreErrors || false,
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return this.copyFromS3(graphId, request)
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
/**
|
|
322
|
+
* Monitor multiple copy operations concurrently
|
|
323
|
+
*/
|
|
324
|
+
async monitorMultipleCopies(
|
|
325
|
+
operationIds: string[],
|
|
326
|
+
options: CopyOptions = {}
|
|
327
|
+
): Promise<Map<string, CopyResult>> {
|
|
328
|
+
const results = await Promise.all(
|
|
329
|
+
operationIds.map(async (id) => {
|
|
330
|
+
const result = await this.monitorCopyOperation(id, options, Date.now())
|
|
331
|
+
return [id, result] as [string, CopyResult]
|
|
332
|
+
})
|
|
333
|
+
)
|
|
334
|
+
return new Map(results)
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
/**
|
|
338
|
+
* Batch copy multiple tables from S3
|
|
339
|
+
*/
|
|
340
|
+
async batchCopyFromS3(
|
|
341
|
+
graphId: string,
|
|
342
|
+
copies: Array<{
|
|
343
|
+
request: S3CopyRequest
|
|
344
|
+
options?: CopyOptions
|
|
345
|
+
}>
|
|
346
|
+
): Promise<CopyResult[]> {
|
|
347
|
+
return Promise.all(
|
|
348
|
+
copies.map(({ request, options }) => this.copyFromS3(graphId, request, options || {}))
|
|
349
|
+
)
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Copy with retry logic for transient failures
|
|
354
|
+
*/
|
|
355
|
+
async copyWithRetry(
|
|
356
|
+
graphId: string,
|
|
357
|
+
request: S3CopyRequest | UrlCopyRequest | DataFrameCopyRequest,
|
|
358
|
+
sourceType: CopySourceType,
|
|
359
|
+
maxRetries: number = 3,
|
|
360
|
+
options: CopyOptions = {}
|
|
361
|
+
): Promise<CopyResult> {
|
|
362
|
+
let lastError: Error | undefined
|
|
363
|
+
let attempt = 0
|
|
364
|
+
|
|
365
|
+
while (attempt < maxRetries) {
|
|
366
|
+
attempt++
|
|
367
|
+
|
|
368
|
+
try {
|
|
369
|
+
const result = await this.executeCopy(graphId, request, sourceType, options)
|
|
370
|
+
|
|
371
|
+
// If successful or partially successful, return
|
|
372
|
+
if (result.status === 'completed' || result.status === 'partial') {
|
|
373
|
+
return result
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// If failed, check if it's retryable
|
|
377
|
+
if (result.status === 'failed') {
|
|
378
|
+
const isRetryable = this.isRetryableError(result.error)
|
|
379
|
+
if (!isRetryable || attempt === maxRetries) {
|
|
380
|
+
return result
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
// Wait before retry with exponential backoff
|
|
384
|
+
const waitTime = Math.min(1000 * Math.pow(2, attempt - 1), 30000)
|
|
385
|
+
options.onProgress?.(
|
|
386
|
+
`Retrying copy operation (attempt ${attempt}/${maxRetries}) in ${waitTime}ms...`
|
|
387
|
+
)
|
|
388
|
+
await new Promise((resolve) => setTimeout(resolve, waitTime))
|
|
389
|
+
}
|
|
390
|
+
} catch (error) {
|
|
391
|
+
lastError = error instanceof Error ? error : new Error(String(error))
|
|
392
|
+
|
|
393
|
+
if (attempt === maxRetries) {
|
|
394
|
+
throw lastError
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// Wait before retry
|
|
398
|
+
const waitTime = Math.min(1000 * Math.pow(2, attempt - 1), 30000)
|
|
399
|
+
options.onProgress?.(
|
|
400
|
+
`Retrying after error (attempt ${attempt}/${maxRetries}) in ${waitTime}ms...`
|
|
401
|
+
)
|
|
402
|
+
await new Promise((resolve) => setTimeout(resolve, waitTime))
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
throw lastError || new Error('Copy operation failed after all retries')
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* Check if an error is retryable
|
|
411
|
+
*/
|
|
412
|
+
private isRetryableError(error?: string): boolean {
|
|
413
|
+
if (!error) return false
|
|
414
|
+
|
|
415
|
+
const retryablePatterns = [
|
|
416
|
+
'timeout',
|
|
417
|
+
'network',
|
|
418
|
+
'connection',
|
|
419
|
+
'temporary',
|
|
420
|
+
'unavailable',
|
|
421
|
+
'rate limit',
|
|
422
|
+
'throttl',
|
|
423
|
+
]
|
|
424
|
+
|
|
425
|
+
const lowerError = error.toLowerCase()
|
|
426
|
+
return retryablePatterns.some((pattern) => lowerError.includes(pattern))
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
/**
|
|
430
|
+
* Cancel any active SSE connections
|
|
431
|
+
*/
|
|
432
|
+
close(): void {
|
|
433
|
+
if (this.sseClient) {
|
|
434
|
+
this.sseClient.close()
|
|
435
|
+
this.sseClient = undefined
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
}
|
package/sdk-extensions/README.md
CHANGED
|
@@ -11,6 +11,7 @@ The RoboSystems Typescript Client Extensions provide production-ready enhancemen
|
|
|
11
11
|
|
|
12
12
|
- **Server-Sent Events (SSE)** with automatic reconnection and event replay
|
|
13
13
|
- **Smart Query Execution** with automatic queueing and progress monitoring
|
|
14
|
+
- **Data Copy Operations** with S3 import and real-time progress tracking
|
|
14
15
|
- **Operation Monitoring** for long-running tasks with real-time updates
|
|
15
16
|
- **Connection Management** with rate limiting and circuit breaker patterns
|
|
16
17
|
- **React Hooks** for seamless UI integration
|
|
@@ -91,6 +92,53 @@ const result = await queryClient.executeWithProgress(
|
|
|
91
92
|
console.log(`Query completed with ${result.rowCount} results`)
|
|
92
93
|
```
|
|
93
94
|
|
|
95
|
+
### Data Copy Operations with Progress Monitoring
|
|
96
|
+
|
|
97
|
+
```typescript
|
|
98
|
+
import { CopyClient } from '@robosystems/client/extensions'
|
|
99
|
+
|
|
100
|
+
const copyClient = new CopyClient({
|
|
101
|
+
baseUrl: 'https://api.robosystems.ai',
|
|
102
|
+
headers: { 'X-API-Key': 'your-api-key' },
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
// Copy data from S3 with real-time progress
|
|
106
|
+
const result = await copyClient.copyFromS3(
|
|
107
|
+
'your-graph-id',
|
|
108
|
+
{
|
|
109
|
+
table_name: 'companies',
|
|
110
|
+
source_type: 's3',
|
|
111
|
+
s3_path: 's3://my-bucket/data/companies.csv',
|
|
112
|
+
s3_access_key_id: 'AWS_ACCESS_KEY',
|
|
113
|
+
s3_secret_access_key: 'AWS_SECRET_KEY',
|
|
114
|
+
s3_region: 'us-east-1',
|
|
115
|
+
file_format: 'csv',
|
|
116
|
+
ignore_errors: false, // Stop on first error
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
onProgress: (message, percent) => {
|
|
120
|
+
console.log(`Copy progress: ${message} (${percent}%)`)
|
|
121
|
+
updateProgressBar(percent)
|
|
122
|
+
},
|
|
123
|
+
onQueueUpdate: (position, estimatedWait) => {
|
|
124
|
+
console.log(`Queue position: ${position}, ETA: ${estimatedWait}s`)
|
|
125
|
+
},
|
|
126
|
+
onWarning: (warning) => {
|
|
127
|
+
console.warn(`Copy warning: ${warning}`)
|
|
128
|
+
},
|
|
129
|
+
}
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
// Check results
|
|
133
|
+
if (result.status === 'completed') {
|
|
134
|
+
console.log(`Successfully imported ${result.rowsImported} rows`)
|
|
135
|
+
const stats = copyClient.calculateStatistics(result)
|
|
136
|
+
console.log(`Throughput: ${stats.throughput.toFixed(2)} rows/second`)
|
|
137
|
+
} else if (result.status === 'partial') {
|
|
138
|
+
console.log(`Imported ${result.rowsImported} rows, skipped ${result.rowsSkipped}`)
|
|
139
|
+
}
|
|
140
|
+
```
|
|
141
|
+
|
|
94
142
|
## 📊 SSE Event Types
|
|
95
143
|
|
|
96
144
|
The SDK supports all RoboSystems SSE event types:
|
|
@@ -181,6 +229,113 @@ try {
|
|
|
181
229
|
}
|
|
182
230
|
```
|
|
183
231
|
|
|
232
|
+
## 📤 Data Copy Operations
|
|
233
|
+
|
|
234
|
+
### CopyClient for S3 Data Import
|
|
235
|
+
|
|
236
|
+
```typescript
|
|
237
|
+
import { CopyClient, CopySourceType } from '@robosystems/client/extensions'
|
|
238
|
+
|
|
239
|
+
const copyClient = new CopyClient({
|
|
240
|
+
baseUrl: 'https://api.robosystems.ai',
|
|
241
|
+
headers: { 'X-API-Key': 'your-api-key' },
|
|
242
|
+
})
|
|
243
|
+
|
|
244
|
+
// Batch copy multiple tables
|
|
245
|
+
const results = await copyClient.batchCopyFromS3('graph-id', [
|
|
246
|
+
{
|
|
247
|
+
request: {
|
|
248
|
+
table_name: 'companies',
|
|
249
|
+
source_type: 's3',
|
|
250
|
+
s3_path: 's3://bucket/companies.csv',
|
|
251
|
+
s3_access_key_id: 'KEY',
|
|
252
|
+
s3_secret_access_key: 'SECRET',
|
|
253
|
+
file_format: 'csv',
|
|
254
|
+
},
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
request: {
|
|
258
|
+
table_name: 'transactions',
|
|
259
|
+
source_type: 's3',
|
|
260
|
+
s3_path: 's3://bucket/transactions.parquet',
|
|
261
|
+
s3_access_key_id: 'KEY',
|
|
262
|
+
s3_secret_access_key: 'SECRET',
|
|
263
|
+
file_format: 'parquet',
|
|
264
|
+
ignore_errors: true, // Continue on errors
|
|
265
|
+
},
|
|
266
|
+
},
|
|
267
|
+
])
|
|
268
|
+
|
|
269
|
+
results.forEach((result, index) => {
|
|
270
|
+
console.log(`Table ${index}: ${result.status}`)
|
|
271
|
+
if (result.rowsImported) {
|
|
272
|
+
console.log(` Imported: ${result.rowsImported} rows`)
|
|
273
|
+
}
|
|
274
|
+
})
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
### Copy with Retry Logic
|
|
278
|
+
|
|
279
|
+
```typescript
|
|
280
|
+
// Retry failed copy operations with exponential backoff
|
|
281
|
+
const result = await copyClient.copyWithRetry(
|
|
282
|
+
'graph-id',
|
|
283
|
+
{
|
|
284
|
+
table_name: 'large_dataset',
|
|
285
|
+
source_type: 's3',
|
|
286
|
+
s3_path: 's3://bucket/large-dataset.csv',
|
|
287
|
+
s3_access_key_id: 'KEY',
|
|
288
|
+
s3_secret_access_key: 'SECRET',
|
|
289
|
+
max_file_size_gb: 50,
|
|
290
|
+
extended_timeout: true,
|
|
291
|
+
},
|
|
292
|
+
CopySourceType.S3,
|
|
293
|
+
3, // Max retries
|
|
294
|
+
{
|
|
295
|
+
onProgress: (message, percent) => {
|
|
296
|
+
console.log(`Progress: ${message} (${percent}%)`)
|
|
297
|
+
},
|
|
298
|
+
}
|
|
299
|
+
)
|
|
300
|
+
```
|
|
301
|
+
|
|
302
|
+
### Monitor Multiple Copy Operations
|
|
303
|
+
|
|
304
|
+
```typescript
|
|
305
|
+
// Start multiple copy operations
|
|
306
|
+
const operations = await Promise.all([
|
|
307
|
+
copyClient.copyFromS3('graph1', request1),
|
|
308
|
+
copyClient.copyFromS3('graph2', request2),
|
|
309
|
+
])
|
|
310
|
+
|
|
311
|
+
// Extract operation IDs for monitoring
|
|
312
|
+
const operationIds = operations
|
|
313
|
+
.filter((op) => op.status === 'accepted')
|
|
314
|
+
.map((op) => op.operationId!)
|
|
315
|
+
|
|
316
|
+
// Monitor all operations concurrently
|
|
317
|
+
const results = await copyClient.monitorMultipleCopies(operationIds, {
|
|
318
|
+
onProgress: (message) => console.log(`Progress: ${message}`),
|
|
319
|
+
})
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
### Copy Statistics
|
|
323
|
+
|
|
324
|
+
```typescript
|
|
325
|
+
const result = await copyClient.copyFromS3('graph-id', request)
|
|
326
|
+
|
|
327
|
+
// Calculate performance statistics
|
|
328
|
+
const stats = copyClient.calculateStatistics(result)
|
|
329
|
+
if (stats) {
|
|
330
|
+
console.log(`Total Rows: ${stats.totalRows}`)
|
|
331
|
+
console.log(`Imported: ${stats.importedRows}`)
|
|
332
|
+
console.log(`Skipped: ${stats.skippedRows}`)
|
|
333
|
+
console.log(`Throughput: ${stats.throughput.toFixed(2)} rows/sec`)
|
|
334
|
+
console.log(`Duration: ${stats.duration.toFixed(2)} seconds`)
|
|
335
|
+
console.log(`Bytes Processed: ${(stats.bytesProcessed / 1024 / 1024).toFixed(2)} MB`)
|
|
336
|
+
}
|
|
337
|
+
```
|
|
338
|
+
|
|
184
339
|
## 🎯 Operation Monitoring
|
|
185
340
|
|
|
186
341
|
### OperationClient for Long-Running Tasks
|
|
@@ -366,6 +521,68 @@ function QueryRunner() {
|
|
|
366
521
|
}
|
|
367
522
|
```
|
|
368
523
|
|
|
524
|
+
### useCopy Hook for Data Import
|
|
525
|
+
|
|
526
|
+
```typescript
|
|
527
|
+
import { useCopy } from '@robosystems/client/extensions/hooks'
|
|
528
|
+
|
|
529
|
+
function DataImporter({ graphId }: { graphId: string }) {
|
|
530
|
+
const {
|
|
531
|
+
copyFromS3,
|
|
532
|
+
loading,
|
|
533
|
+
progress,
|
|
534
|
+
error,
|
|
535
|
+
result,
|
|
536
|
+
queuePosition
|
|
537
|
+
} = useCopy(graphId)
|
|
538
|
+
|
|
539
|
+
const handleImport = async () => {
|
|
540
|
+
const importResult = await copyFromS3({
|
|
541
|
+
table_name: 'products',
|
|
542
|
+
source_type: 's3',
|
|
543
|
+
s3_path: 's3://data-bucket/products.csv',
|
|
544
|
+
s3_access_key_id: process.env.NEXT_PUBLIC_AWS_KEY!,
|
|
545
|
+
s3_secret_access_key: process.env.NEXT_PUBLIC_AWS_SECRET!,
|
|
546
|
+
file_format: 'csv',
|
|
547
|
+
})
|
|
548
|
+
|
|
549
|
+
if (importResult?.status === 'completed') {
|
|
550
|
+
alert(`Successfully imported ${importResult.rowsImported} products`)
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
return (
|
|
555
|
+
<div>
|
|
556
|
+
<button onClick={handleImport} disabled={loading}>
|
|
557
|
+
{loading ? 'Importing...' : 'Import Products'}
|
|
558
|
+
</button>
|
|
559
|
+
|
|
560
|
+
{progress && (
|
|
561
|
+
<div>
|
|
562
|
+
<p>{progress.message}</p>
|
|
563
|
+
{progress.percent && (
|
|
564
|
+
<progress value={progress.percent} max={100} />
|
|
565
|
+
)}
|
|
566
|
+
</div>
|
|
567
|
+
)}
|
|
568
|
+
|
|
569
|
+
{queuePosition && (
|
|
570
|
+
<p>Queue position: {queuePosition}</p>
|
|
571
|
+
)}
|
|
572
|
+
|
|
573
|
+
{error && <p style={{ color: 'red' }}>Error: {error.message}</p>}
|
|
574
|
+
|
|
575
|
+
{result && result.status === 'completed' && (
|
|
576
|
+
<div style={{ color: 'green' }}>
|
|
577
|
+
<p>✅ Successfully imported {result.rowsImported} rows</p>
|
|
578
|
+
<p>Execution time: {(result.executionTimeMs / 1000).toFixed(2)}s</p>
|
|
579
|
+
</div>
|
|
580
|
+
)}
|
|
581
|
+
</div>
|
|
582
|
+
)
|
|
583
|
+
}
|
|
584
|
+
```
|
|
585
|
+
|
|
369
586
|
## 🛡️ Error Handling & Resilience
|
|
370
587
|
|
|
371
588
|
### Circuit Breaker Pattern
|
|
@@ -572,6 +789,7 @@ describe('SSE Integration', () => {
|
|
|
572
789
|
|
|
573
790
|
- **`SSEClient`** - Server-Sent Events client with auto-reconnection
|
|
574
791
|
- **`QueryClient`** - Enhanced query execution with SSE support
|
|
792
|
+
- **`CopyClient`** - Data copy operations with S3 import and progress monitoring
|
|
575
793
|
- **`OperationClient`** - Long-running operation monitoring
|
|
576
794
|
- **`StreamProcessor`** - Efficient stream processing for large datasets
|
|
577
795
|
|
|
@@ -586,6 +804,7 @@ describe('SSE Integration', () => {
|
|
|
586
804
|
|
|
587
805
|
- **`useSSE`** - Hook for SSE connection management
|
|
588
806
|
- **`useQueryWithSSE`** - Hook for queries with progress
|
|
807
|
+
- **`useCopy`** - Hook for data copy operations with progress
|
|
589
808
|
- **`useOperation`** - Hook for operation monitoring
|
|
590
809
|
|
|
591
810
|
### Utilities
|