@robosystems/client 0.1.17 → 0.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/create-feature +91 -0
- package/extensions/CopyClient.d.ts +97 -0
- package/extensions/CopyClient.js +287 -0
- package/extensions/CopyClient.ts +438 -0
- package/extensions/hooks.d.ts +35 -0
- package/extensions/hooks.js +123 -0
- package/extensions/hooks.ts +139 -0
- package/extensions/index.d.ts +7 -2
- package/extensions/index.js +15 -1
- package/extensions/index.ts +23 -1
- package/package.json +7 -1
- package/sdk/sdk.gen.d.ts +36 -1
- package/sdk/sdk.gen.js +36 -1
- package/sdk/sdk.gen.ts +36 -1
- package/sdk/types.gen.d.ts +45 -4
- package/sdk/types.gen.ts +45 -4
- package/sdk-extensions/CopyClient.d.ts +97 -0
- package/sdk-extensions/CopyClient.js +287 -0
- package/sdk-extensions/CopyClient.ts +438 -0
- package/sdk-extensions/README.md +219 -0
- package/sdk-extensions/hooks.d.ts +35 -0
- package/sdk-extensions/hooks.js +123 -0
- package/sdk-extensions/hooks.ts +139 -0
- package/sdk-extensions/index.d.ts +7 -2
- package/sdk-extensions/index.js +15 -1
- package/sdk-extensions/index.ts +23 -1
- package/sdk.gen.d.ts +36 -1
- package/sdk.gen.js +36 -1
- package/sdk.gen.ts +36 -1
- package/types.gen.d.ts +45 -4
- package/types.gen.ts +45 -4
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
# Create feature branch script using GitHub Actions
|
|
5
|
+
# Creates a new feature/bugfix/hotfix branch from specified base
|
|
6
|
+
# Usage: ./bin/create-feature [feature|bugfix|hotfix|chore|refactor] [branch-name] [base-branch]
|
|
7
|
+
|
|
8
|
+
# Default values
|
|
9
|
+
BRANCH_TYPE=${1:-feature}
|
|
10
|
+
BRANCH_NAME=${2:-}
|
|
11
|
+
BASE_BRANCH=${3:-main}
|
|
12
|
+
|
|
13
|
+
# Validate branch type
|
|
14
|
+
if [[ "$BRANCH_TYPE" != "feature" && "$BRANCH_TYPE" != "bugfix" && "$BRANCH_TYPE" != "hotfix" && "$BRANCH_TYPE" != "chore" && "$BRANCH_TYPE" != "refactor" ]]; then
|
|
15
|
+
echo "❌ Invalid branch type: $BRANCH_TYPE"
|
|
16
|
+
echo "Usage: $0 [feature|bugfix|hotfix|chore|refactor] [branch-name] [base-branch]"
|
|
17
|
+
exit 1
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
# Check if branch name is provided
|
|
21
|
+
if [ -z "$BRANCH_NAME" ]; then
|
|
22
|
+
echo "❌ Branch name is required"
|
|
23
|
+
echo "Usage: $0 [feature|bugfix|hotfix|chore|refactor] [branch-name] [base-branch]"
|
|
24
|
+
exit 1
|
|
25
|
+
fi
|
|
26
|
+
|
|
27
|
+
# Sanitize branch name
|
|
28
|
+
SANITIZED_NAME=$(echo "$BRANCH_NAME" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/--*/-/g' | sed 's/^-//;s/-$//' | tr '[:upper:]' '[:lower:]')
|
|
29
|
+
FULL_BRANCH_NAME="$BRANCH_TYPE/$SANITIZED_NAME"
|
|
30
|
+
|
|
31
|
+
echo "🌿 Creating feature branch..."
|
|
32
|
+
echo "📋 Branch Details:"
|
|
33
|
+
echo " Type: $BRANCH_TYPE"
|
|
34
|
+
echo " Name: $SANITIZED_NAME"
|
|
35
|
+
echo " Full Name: $FULL_BRANCH_NAME"
|
|
36
|
+
echo " Base Branch: $BASE_BRANCH"
|
|
37
|
+
echo ""
|
|
38
|
+
|
|
39
|
+
# Check for uncommitted changes
|
|
40
|
+
if ! git diff --quiet || ! git diff --cached --quiet; then
|
|
41
|
+
echo "⚠️ You have uncommitted changes. Please commit or stash them first."
|
|
42
|
+
echo ""
|
|
43
|
+
echo "Uncommitted files:"
|
|
44
|
+
git status --porcelain
|
|
45
|
+
exit 1
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
echo "🚀 Triggering GitHub Actions workflow..."
|
|
49
|
+
gh workflow run create-feature.yml \
|
|
50
|
+
--field branch_type="$BRANCH_TYPE" \
|
|
51
|
+
--field branch_name="$BRANCH_NAME" \
|
|
52
|
+
--field base_branch="$BASE_BRANCH"
|
|
53
|
+
|
|
54
|
+
echo "⏳ Waiting for branch to be created..."
|
|
55
|
+
|
|
56
|
+
# Wait for the branch to be created (check every 5 seconds for up to 1 minute)
|
|
57
|
+
MAX_ATTEMPTS=12
|
|
58
|
+
ATTEMPT=1
|
|
59
|
+
|
|
60
|
+
while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
|
|
61
|
+
echo "Attempt $ATTEMPT/$MAX_ATTEMPTS: Checking if branch exists..."
|
|
62
|
+
|
|
63
|
+
# Fetch latest changes from remote
|
|
64
|
+
git fetch origin --quiet
|
|
65
|
+
|
|
66
|
+
# Check if the branch exists on remote
|
|
67
|
+
if git show-ref --verify --quiet refs/remotes/origin/$FULL_BRANCH_NAME; then
|
|
68
|
+
echo "✅ Branch $FULL_BRANCH_NAME found! Checking it out..."
|
|
69
|
+
git checkout -b $FULL_BRANCH_NAME origin/$FULL_BRANCH_NAME 2>/dev/null || git checkout $FULL_BRANCH_NAME
|
|
70
|
+
|
|
71
|
+
echo "🎉 Successfully created and switched to $FULL_BRANCH_NAME"
|
|
72
|
+
echo ""
|
|
73
|
+
echo "📝 Next steps:"
|
|
74
|
+
echo " 1. Make your changes and commit them"
|
|
75
|
+
echo " 2. Push your changes: git push"
|
|
76
|
+
echo " 3. Create a PR when ready: npm run pr:create"
|
|
77
|
+
|
|
78
|
+
exit 0
|
|
79
|
+
fi
|
|
80
|
+
|
|
81
|
+
if [ $ATTEMPT -eq $MAX_ATTEMPTS ]; then
|
|
82
|
+
echo "❌ Timeout: Branch $FULL_BRANCH_NAME was not created after 1 minute"
|
|
83
|
+
echo "Check the GitHub Actions workflow status:"
|
|
84
|
+
echo " gh run list --workflow=create-feature.yml"
|
|
85
|
+
exit 1
|
|
86
|
+
fi
|
|
87
|
+
|
|
88
|
+
echo "Branch not yet available, waiting 5 seconds..."
|
|
89
|
+
sleep 5
|
|
90
|
+
ATTEMPT=$((ATTEMPT + 1))
|
|
91
|
+
done
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import type { DataFrameCopyRequest, S3CopyRequest, UrlCopyRequest } from '../types.gen';
|
|
2
|
+
export type CopySourceType = 's3' | 'url' | 'dataframe';
|
|
3
|
+
export interface CopyOptions {
|
|
4
|
+
onProgress?: (message: string, progressPercent?: number) => void;
|
|
5
|
+
onQueueUpdate?: (position: number, estimatedWait: number) => void;
|
|
6
|
+
onWarning?: (warning: string) => void;
|
|
7
|
+
timeout?: number;
|
|
8
|
+
testMode?: boolean;
|
|
9
|
+
}
|
|
10
|
+
export interface CopyResult {
|
|
11
|
+
status: 'completed' | 'failed' | 'partial' | 'accepted';
|
|
12
|
+
rowsImported?: number;
|
|
13
|
+
rowsSkipped?: number;
|
|
14
|
+
bytesProcessed?: number;
|
|
15
|
+
executionTimeMs?: number;
|
|
16
|
+
warnings?: string[];
|
|
17
|
+
error?: string;
|
|
18
|
+
operationId?: string;
|
|
19
|
+
sseUrl?: string;
|
|
20
|
+
message?: string;
|
|
21
|
+
}
|
|
22
|
+
export interface CopyStatistics {
|
|
23
|
+
totalRows: number;
|
|
24
|
+
importedRows: number;
|
|
25
|
+
skippedRows: number;
|
|
26
|
+
bytesProcessed: number;
|
|
27
|
+
duration: number;
|
|
28
|
+
throughput: number;
|
|
29
|
+
}
|
|
30
|
+
export declare class CopyClient {
|
|
31
|
+
private sseClient?;
|
|
32
|
+
private config;
|
|
33
|
+
constructor(config: {
|
|
34
|
+
baseUrl: string;
|
|
35
|
+
credentials?: 'include' | 'same-origin' | 'omit';
|
|
36
|
+
headers?: Record<string, string>;
|
|
37
|
+
});
|
|
38
|
+
/**
|
|
39
|
+
* Copy data from S3 to graph database
|
|
40
|
+
*/
|
|
41
|
+
copyFromS3(graphId: string, request: S3CopyRequest, options?: CopyOptions): Promise<CopyResult>;
|
|
42
|
+
/**
|
|
43
|
+
* Copy data from URL to graph database (when available)
|
|
44
|
+
*/
|
|
45
|
+
copyFromUrl(graphId: string, request: UrlCopyRequest, options?: CopyOptions): Promise<CopyResult>;
|
|
46
|
+
/**
|
|
47
|
+
* Copy data from DataFrame to graph database (when available)
|
|
48
|
+
*/
|
|
49
|
+
copyFromDataFrame(graphId: string, request: DataFrameCopyRequest, options?: CopyOptions): Promise<CopyResult>;
|
|
50
|
+
/**
|
|
51
|
+
* Execute copy operation with automatic SSE monitoring for long-running operations
|
|
52
|
+
*/
|
|
53
|
+
private executeCopy;
|
|
54
|
+
/**
|
|
55
|
+
* Monitor a copy operation using SSE
|
|
56
|
+
*/
|
|
57
|
+
private monitorCopyOperation;
|
|
58
|
+
/**
|
|
59
|
+
* Build copy result from response data
|
|
60
|
+
*/
|
|
61
|
+
private buildCopyResult;
|
|
62
|
+
/**
|
|
63
|
+
* Calculate copy statistics from result
|
|
64
|
+
*/
|
|
65
|
+
calculateStatistics(result: CopyResult): CopyStatistics | null;
|
|
66
|
+
/**
|
|
67
|
+
* Convenience method for simple S3 copy with default options
|
|
68
|
+
*/
|
|
69
|
+
copyS3(graphId: string, tableName: string, s3Uri: string, accessKeyId: string, secretAccessKey: string, options?: {
|
|
70
|
+
region?: string;
|
|
71
|
+
fileFormat?: 'csv' | 'parquet' | 'json' | 'delta' | 'iceberg';
|
|
72
|
+
ignoreErrors?: boolean;
|
|
73
|
+
}): Promise<CopyResult>;
|
|
74
|
+
/**
|
|
75
|
+
* Monitor multiple copy operations concurrently
|
|
76
|
+
*/
|
|
77
|
+
monitorMultipleCopies(operationIds: string[], options?: CopyOptions): Promise<Map<string, CopyResult>>;
|
|
78
|
+
/**
|
|
79
|
+
* Batch copy multiple tables from S3
|
|
80
|
+
*/
|
|
81
|
+
batchCopyFromS3(graphId: string, copies: Array<{
|
|
82
|
+
request: S3CopyRequest;
|
|
83
|
+
options?: CopyOptions;
|
|
84
|
+
}>): Promise<CopyResult[]>;
|
|
85
|
+
/**
|
|
86
|
+
* Copy with retry logic for transient failures
|
|
87
|
+
*/
|
|
88
|
+
copyWithRetry(graphId: string, request: S3CopyRequest | UrlCopyRequest | DataFrameCopyRequest, sourceType: CopySourceType, maxRetries?: number, options?: CopyOptions): Promise<CopyResult>;
|
|
89
|
+
/**
|
|
90
|
+
* Check if an error is retryable
|
|
91
|
+
*/
|
|
92
|
+
private isRetryableError;
|
|
93
|
+
/**
|
|
94
|
+
* Cancel any active SSE connections
|
|
95
|
+
*/
|
|
96
|
+
close(): void;
|
|
97
|
+
}
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
"use strict";
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.CopyClient = void 0;
|
|
5
|
+
/**
|
|
6
|
+
* Enhanced Copy Client with SSE support
|
|
7
|
+
* Provides intelligent data copy operations with progress monitoring
|
|
8
|
+
*/
|
|
9
|
+
const sdk_gen_1 = require("../sdk.gen");
|
|
10
|
+
const SSEClient_1 = require("./SSEClient");
|
|
11
|
+
class CopyClient {
|
|
12
|
+
constructor(config) {
|
|
13
|
+
this.config = config;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Copy data from S3 to graph database
|
|
17
|
+
*/
|
|
18
|
+
async copyFromS3(graphId, request, options = {}) {
|
|
19
|
+
return this.executeCopy(graphId, request, 's3', options);
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Copy data from URL to graph database (when available)
|
|
23
|
+
*/
|
|
24
|
+
async copyFromUrl(graphId, request, options = {}) {
|
|
25
|
+
return this.executeCopy(graphId, request, 'url', options);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Copy data from DataFrame to graph database (when available)
|
|
29
|
+
*/
|
|
30
|
+
async copyFromDataFrame(graphId, request, options = {}) {
|
|
31
|
+
return this.executeCopy(graphId, request, 'dataframe', options);
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Execute copy operation with automatic SSE monitoring for long-running operations
|
|
35
|
+
*/
|
|
36
|
+
async executeCopy(graphId, request, _sourceType, options = {}) {
|
|
37
|
+
const startTime = Date.now();
|
|
38
|
+
const data = {
|
|
39
|
+
url: '/v1/{graph_id}/copy',
|
|
40
|
+
path: { graph_id: graphId },
|
|
41
|
+
body: request,
|
|
42
|
+
};
|
|
43
|
+
try {
|
|
44
|
+
// Execute the copy request
|
|
45
|
+
const response = await (0, sdk_gen_1.copyDataToGraph)(data);
|
|
46
|
+
const responseData = response.data;
|
|
47
|
+
// Check if this is an accepted (async) operation
|
|
48
|
+
if (responseData.status === 'accepted' && responseData.operation_id) {
|
|
49
|
+
// This is a long-running operation with SSE monitoring
|
|
50
|
+
options.onProgress?.(`Copy operation started. Monitoring progress...`);
|
|
51
|
+
// If SSE URL is provided, use it for monitoring
|
|
52
|
+
if (responseData.sse_url) {
|
|
53
|
+
return this.monitorCopyOperation(responseData.operation_id, options, startTime);
|
|
54
|
+
}
|
|
55
|
+
// Otherwise return the accepted response
|
|
56
|
+
return {
|
|
57
|
+
status: 'accepted',
|
|
58
|
+
operationId: responseData.operation_id,
|
|
59
|
+
sseUrl: responseData.sse_url,
|
|
60
|
+
message: responseData.message,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
// This is a synchronous response - operation completed immediately
|
|
64
|
+
return this.buildCopyResult(responseData, Date.now() - startTime);
|
|
65
|
+
}
|
|
66
|
+
catch (error) {
|
|
67
|
+
return {
|
|
68
|
+
status: 'failed',
|
|
69
|
+
error: error instanceof Error ? error.message : String(error),
|
|
70
|
+
executionTimeMs: Date.now() - startTime,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Monitor a copy operation using SSE
|
|
76
|
+
*/
|
|
77
|
+
async monitorCopyOperation(operationId, options, startTime) {
|
|
78
|
+
return new Promise((resolve, reject) => {
|
|
79
|
+
const sseClient = new SSEClient_1.SSEClient(this.config);
|
|
80
|
+
const timeoutMs = options.timeout || 3600000; // Default 1 hour for copy operations
|
|
81
|
+
const timeoutHandle = setTimeout(() => {
|
|
82
|
+
sseClient.close();
|
|
83
|
+
reject(new Error(`Copy operation timeout after ${timeoutMs}ms`));
|
|
84
|
+
}, timeoutMs);
|
|
85
|
+
sseClient
|
|
86
|
+
.connect(operationId)
|
|
87
|
+
.then(() => {
|
|
88
|
+
let result = { status: 'failed' };
|
|
89
|
+
const warnings = [];
|
|
90
|
+
// Listen for queue updates
|
|
91
|
+
sseClient.on(SSEClient_1.EventType.QUEUE_UPDATE, (data) => {
|
|
92
|
+
options.onQueueUpdate?.(data.position || data.queue_position, data.estimated_wait_seconds || 0);
|
|
93
|
+
});
|
|
94
|
+
// Listen for progress updates
|
|
95
|
+
sseClient.on(SSEClient_1.EventType.OPERATION_PROGRESS, (data) => {
|
|
96
|
+
const message = data.message || data.status || 'Processing...';
|
|
97
|
+
const progressPercent = data.progress_percent || data.progress;
|
|
98
|
+
options.onProgress?.(message, progressPercent);
|
|
99
|
+
// Check for warnings in progress updates
|
|
100
|
+
if (data.warnings) {
|
|
101
|
+
warnings.push(...data.warnings);
|
|
102
|
+
data.warnings.forEach((warning) => {
|
|
103
|
+
options.onWarning?.(warning);
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
// Listen for completion
|
|
108
|
+
sseClient.on(SSEClient_1.EventType.OPERATION_COMPLETED, (data) => {
|
|
109
|
+
clearTimeout(timeoutHandle);
|
|
110
|
+
const completionData = data.result || data;
|
|
111
|
+
result = {
|
|
112
|
+
status: completionData.status || 'completed',
|
|
113
|
+
rowsImported: completionData.rows_imported,
|
|
114
|
+
rowsSkipped: completionData.rows_skipped,
|
|
115
|
+
bytesProcessed: completionData.bytes_processed,
|
|
116
|
+
executionTimeMs: Date.now() - startTime,
|
|
117
|
+
warnings: warnings.length > 0 ? warnings : completionData.warnings,
|
|
118
|
+
message: completionData.message,
|
|
119
|
+
};
|
|
120
|
+
sseClient.close();
|
|
121
|
+
resolve(result);
|
|
122
|
+
});
|
|
123
|
+
// Listen for errors
|
|
124
|
+
sseClient.on(SSEClient_1.EventType.OPERATION_ERROR, (error) => {
|
|
125
|
+
clearTimeout(timeoutHandle);
|
|
126
|
+
result = {
|
|
127
|
+
status: 'failed',
|
|
128
|
+
error: error.message || error.error || 'Copy operation failed',
|
|
129
|
+
executionTimeMs: Date.now() - startTime,
|
|
130
|
+
warnings: warnings.length > 0 ? warnings : undefined,
|
|
131
|
+
};
|
|
132
|
+
sseClient.close();
|
|
133
|
+
resolve(result); // Resolve with error result, not reject
|
|
134
|
+
});
|
|
135
|
+
// Listen for cancellation
|
|
136
|
+
sseClient.on(SSEClient_1.EventType.OPERATION_CANCELLED, () => {
|
|
137
|
+
clearTimeout(timeoutHandle);
|
|
138
|
+
result = {
|
|
139
|
+
status: 'failed',
|
|
140
|
+
error: 'Copy operation cancelled',
|
|
141
|
+
executionTimeMs: Date.now() - startTime,
|
|
142
|
+
warnings: warnings.length > 0 ? warnings : undefined,
|
|
143
|
+
};
|
|
144
|
+
sseClient.close();
|
|
145
|
+
resolve(result);
|
|
146
|
+
});
|
|
147
|
+
})
|
|
148
|
+
.catch((error) => {
|
|
149
|
+
clearTimeout(timeoutHandle);
|
|
150
|
+
reject(error);
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Build copy result from response data
|
|
156
|
+
*/
|
|
157
|
+
buildCopyResult(responseData, executionTimeMs) {
|
|
158
|
+
return {
|
|
159
|
+
status: responseData.status,
|
|
160
|
+
rowsImported: responseData.rows_imported || undefined,
|
|
161
|
+
rowsSkipped: responseData.rows_skipped || undefined,
|
|
162
|
+
bytesProcessed: responseData.bytes_processed || undefined,
|
|
163
|
+
executionTimeMs: responseData.execution_time_ms || executionTimeMs,
|
|
164
|
+
warnings: responseData.warnings || undefined,
|
|
165
|
+
message: responseData.message,
|
|
166
|
+
error: responseData.error_details ? String(responseData.error_details) : undefined,
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Calculate copy statistics from result
|
|
171
|
+
*/
|
|
172
|
+
calculateStatistics(result) {
|
|
173
|
+
if (result.status === 'failed' || !result.rowsImported) {
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
const totalRows = (result.rowsImported || 0) + (result.rowsSkipped || 0);
|
|
177
|
+
const duration = (result.executionTimeMs || 0) / 1000; // Convert to seconds
|
|
178
|
+
const throughput = duration > 0 ? (result.rowsImported || 0) / duration : 0;
|
|
179
|
+
return {
|
|
180
|
+
totalRows,
|
|
181
|
+
importedRows: result.rowsImported || 0,
|
|
182
|
+
skippedRows: result.rowsSkipped || 0,
|
|
183
|
+
bytesProcessed: result.bytesProcessed || 0,
|
|
184
|
+
duration,
|
|
185
|
+
throughput,
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Convenience method for simple S3 copy with default options
|
|
190
|
+
*/
|
|
191
|
+
async copyS3(graphId, tableName, s3Uri, accessKeyId, secretAccessKey, options) {
|
|
192
|
+
const request = {
|
|
193
|
+
table_name: tableName,
|
|
194
|
+
source_type: 's3',
|
|
195
|
+
s3_path: s3Uri,
|
|
196
|
+
s3_access_key_id: accessKeyId,
|
|
197
|
+
s3_secret_access_key: secretAccessKey,
|
|
198
|
+
s3_region: options?.region || 'us-east-1',
|
|
199
|
+
file_format: options?.fileFormat,
|
|
200
|
+
ignore_errors: options?.ignoreErrors || false,
|
|
201
|
+
};
|
|
202
|
+
return this.copyFromS3(graphId, request);
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Monitor multiple copy operations concurrently
|
|
206
|
+
*/
|
|
207
|
+
async monitorMultipleCopies(operationIds, options = {}) {
|
|
208
|
+
const results = await Promise.all(operationIds.map(async (id) => {
|
|
209
|
+
const result = await this.monitorCopyOperation(id, options, Date.now());
|
|
210
|
+
return [id, result];
|
|
211
|
+
}));
|
|
212
|
+
return new Map(results);
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Batch copy multiple tables from S3
|
|
216
|
+
*/
|
|
217
|
+
async batchCopyFromS3(graphId, copies) {
|
|
218
|
+
return Promise.all(copies.map(({ request, options }) => this.copyFromS3(graphId, request, options || {})));
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Copy with retry logic for transient failures
|
|
222
|
+
*/
|
|
223
|
+
async copyWithRetry(graphId, request, sourceType, maxRetries = 3, options = {}) {
|
|
224
|
+
let lastError;
|
|
225
|
+
let attempt = 0;
|
|
226
|
+
while (attempt < maxRetries) {
|
|
227
|
+
attempt++;
|
|
228
|
+
try {
|
|
229
|
+
const result = await this.executeCopy(graphId, request, sourceType, options);
|
|
230
|
+
// If successful or partially successful, return
|
|
231
|
+
if (result.status === 'completed' || result.status === 'partial') {
|
|
232
|
+
return result;
|
|
233
|
+
}
|
|
234
|
+
// If failed, check if it's retryable
|
|
235
|
+
if (result.status === 'failed') {
|
|
236
|
+
const isRetryable = this.isRetryableError(result.error);
|
|
237
|
+
if (!isRetryable || attempt === maxRetries) {
|
|
238
|
+
return result;
|
|
239
|
+
}
|
|
240
|
+
// Wait before retry with exponential backoff
|
|
241
|
+
const waitTime = Math.min(1000 * Math.pow(2, attempt - 1), 30000);
|
|
242
|
+
options.onProgress?.(`Retrying copy operation (attempt ${attempt}/${maxRetries}) in ${waitTime}ms...`);
|
|
243
|
+
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
catch (error) {
|
|
247
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
248
|
+
if (attempt === maxRetries) {
|
|
249
|
+
throw lastError;
|
|
250
|
+
}
|
|
251
|
+
// Wait before retry
|
|
252
|
+
const waitTime = Math.min(1000 * Math.pow(2, attempt - 1), 30000);
|
|
253
|
+
options.onProgress?.(`Retrying after error (attempt ${attempt}/${maxRetries}) in ${waitTime}ms...`);
|
|
254
|
+
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
throw lastError || new Error('Copy operation failed after all retries');
|
|
258
|
+
}
|
|
259
|
+
/**
|
|
260
|
+
* Check if an error is retryable
|
|
261
|
+
*/
|
|
262
|
+
isRetryableError(error) {
|
|
263
|
+
if (!error)
|
|
264
|
+
return false;
|
|
265
|
+
const retryablePatterns = [
|
|
266
|
+
'timeout',
|
|
267
|
+
'network',
|
|
268
|
+
'connection',
|
|
269
|
+
'temporary',
|
|
270
|
+
'unavailable',
|
|
271
|
+
'rate limit',
|
|
272
|
+
'throttl',
|
|
273
|
+
];
|
|
274
|
+
const lowerError = error.toLowerCase();
|
|
275
|
+
return retryablePatterns.some((pattern) => lowerError.includes(pattern));
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Cancel any active SSE connections
|
|
279
|
+
*/
|
|
280
|
+
close() {
|
|
281
|
+
if (this.sseClient) {
|
|
282
|
+
this.sseClient.close();
|
|
283
|
+
this.sseClient = undefined;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
exports.CopyClient = CopyClient;
|