@uploadista/client-core 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/LICENSE +21 -0
- package/README.md +100 -0
- package/dist/auth/auth-http-client.d.ts +50 -0
- package/dist/auth/auth-http-client.d.ts.map +1 -0
- package/dist/auth/auth-http-client.js +110 -0
- package/dist/auth/direct-auth.d.ts +38 -0
- package/dist/auth/direct-auth.d.ts.map +1 -0
- package/dist/auth/direct-auth.js +95 -0
- package/dist/auth/index.d.ts +6 -0
- package/dist/auth/index.d.ts.map +1 -0
- package/dist/auth/index.js +5 -0
- package/dist/auth/no-auth.d.ts +26 -0
- package/dist/auth/no-auth.d.ts.map +1 -0
- package/dist/auth/no-auth.js +33 -0
- package/dist/auth/saas-auth.d.ts +80 -0
- package/dist/auth/saas-auth.d.ts.map +1 -0
- package/dist/auth/saas-auth.js +167 -0
- package/dist/auth/types.d.ts +101 -0
- package/dist/auth/types.d.ts.map +1 -0
- package/dist/auth/types.js +8 -0
- package/dist/chunk-buffer.d.ts +209 -0
- package/dist/chunk-buffer.d.ts.map +1 -0
- package/dist/chunk-buffer.js +236 -0
- package/dist/client/create-uploadista-client.d.ts +369 -0
- package/dist/client/create-uploadista-client.d.ts.map +1 -0
- package/dist/client/create-uploadista-client.js +518 -0
- package/dist/client/index.d.ts +4 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/index.js +3 -0
- package/dist/client/uploadista-api.d.ts +284 -0
- package/dist/client/uploadista-api.d.ts.map +1 -0
- package/dist/client/uploadista-api.js +444 -0
- package/dist/client/uploadista-websocket-manager.d.ts +110 -0
- package/dist/client/uploadista-websocket-manager.d.ts.map +1 -0
- package/dist/client/uploadista-websocket-manager.js +207 -0
- package/dist/error.d.ts +106 -0
- package/dist/error.d.ts.map +1 -0
- package/dist/error.js +69 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +12 -0
- package/dist/logger.d.ts +70 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +59 -0
- package/dist/mock-data-store.d.ts +30 -0
- package/dist/mock-data-store.d.ts.map +1 -0
- package/dist/mock-data-store.js +88 -0
- package/dist/network-monitor.d.ts +262 -0
- package/dist/network-monitor.d.ts.map +1 -0
- package/dist/network-monitor.js +291 -0
- package/dist/services/abort-controller-service.d.ts +19 -0
- package/dist/services/abort-controller-service.d.ts.map +1 -0
- package/dist/services/abort-controller-service.js +4 -0
- package/dist/services/checksum-service.d.ts +4 -0
- package/dist/services/checksum-service.d.ts.map +1 -0
- package/dist/services/checksum-service.js +1 -0
- package/dist/services/file-reader-service.d.ts +38 -0
- package/dist/services/file-reader-service.d.ts.map +1 -0
- package/dist/services/file-reader-service.js +4 -0
- package/dist/services/fingerprint-service.d.ts +4 -0
- package/dist/services/fingerprint-service.d.ts.map +1 -0
- package/dist/services/fingerprint-service.js +1 -0
- package/dist/services/http-client.d.ts +182 -0
- package/dist/services/http-client.d.ts.map +1 -0
- package/dist/services/http-client.js +1 -0
- package/dist/services/id-generation-service.d.ts +10 -0
- package/dist/services/id-generation-service.d.ts.map +1 -0
- package/dist/services/id-generation-service.js +1 -0
- package/dist/services/index.d.ts +11 -0
- package/dist/services/index.d.ts.map +1 -0
- package/dist/services/index.js +10 -0
- package/dist/services/platform-service.d.ts +48 -0
- package/dist/services/platform-service.d.ts.map +1 -0
- package/dist/services/platform-service.js +10 -0
- package/dist/services/service-container.d.ts +25 -0
- package/dist/services/service-container.d.ts.map +1 -0
- package/dist/services/service-container.js +1 -0
- package/dist/services/storage-service.d.ts +26 -0
- package/dist/services/storage-service.d.ts.map +1 -0
- package/dist/services/storage-service.js +1 -0
- package/dist/services/websocket-service.d.ts +36 -0
- package/dist/services/websocket-service.d.ts.map +1 -0
- package/dist/services/websocket-service.js +4 -0
- package/dist/smart-chunker.d.ts +72 -0
- package/dist/smart-chunker.d.ts.map +1 -0
- package/dist/smart-chunker.js +317 -0
- package/dist/storage/client-storage.d.ts +148 -0
- package/dist/storage/client-storage.d.ts.map +1 -0
- package/dist/storage/client-storage.js +62 -0
- package/dist/storage/in-memory-storage-service.d.ts +7 -0
- package/dist/storage/in-memory-storage-service.d.ts.map +1 -0
- package/dist/storage/in-memory-storage-service.js +24 -0
- package/dist/storage/index.d.ts +3 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/storage/index.js +2 -0
- package/dist/types/buffered-chunk.d.ts +6 -0
- package/dist/types/buffered-chunk.d.ts.map +1 -0
- package/dist/types/buffered-chunk.js +1 -0
- package/dist/types/chunk-metrics.d.ts +12 -0
- package/dist/types/chunk-metrics.d.ts.map +1 -0
- package/dist/types/chunk-metrics.js +1 -0
- package/dist/types/flow-result.d.ts +11 -0
- package/dist/types/flow-result.d.ts.map +1 -0
- package/dist/types/flow-result.js +1 -0
- package/dist/types/flow-upload-config.d.ts +54 -0
- package/dist/types/flow-upload-config.d.ts.map +1 -0
- package/dist/types/flow-upload-config.js +1 -0
- package/dist/types/flow-upload-item.d.ts +16 -0
- package/dist/types/flow-upload-item.d.ts.map +1 -0
- package/dist/types/flow-upload-item.js +1 -0
- package/dist/types/flow-upload-options.d.ts +41 -0
- package/dist/types/flow-upload-options.d.ts.map +1 -0
- package/dist/types/flow-upload-options.js +1 -0
- package/dist/types/index.d.ts +14 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +13 -0
- package/dist/types/multi-flow-upload-options.d.ts +33 -0
- package/dist/types/multi-flow-upload-options.d.ts.map +1 -0
- package/dist/types/multi-flow-upload-options.js +1 -0
- package/dist/types/multi-flow-upload-state.d.ts +9 -0
- package/dist/types/multi-flow-upload-state.d.ts.map +1 -0
- package/dist/types/multi-flow-upload-state.js +1 -0
- package/dist/types/performance-insights.d.ts +11 -0
- package/dist/types/performance-insights.d.ts.map +1 -0
- package/dist/types/performance-insights.js +1 -0
- package/dist/types/previous-upload.d.ts +20 -0
- package/dist/types/previous-upload.d.ts.map +1 -0
- package/dist/types/previous-upload.js +9 -0
- package/dist/types/upload-options.d.ts +40 -0
- package/dist/types/upload-options.d.ts.map +1 -0
- package/dist/types/upload-options.js +1 -0
- package/dist/types/upload-response.d.ts +6 -0
- package/dist/types/upload-response.d.ts.map +1 -0
- package/dist/types/upload-response.js +1 -0
- package/dist/types/upload-result.d.ts +57 -0
- package/dist/types/upload-result.d.ts.map +1 -0
- package/dist/types/upload-result.js +1 -0
- package/dist/types/upload-session-metrics.d.ts +16 -0
- package/dist/types/upload-session-metrics.d.ts.map +1 -0
- package/dist/types/upload-session-metrics.js +1 -0
- package/dist/upload/chunk-upload.d.ts +40 -0
- package/dist/upload/chunk-upload.d.ts.map +1 -0
- package/dist/upload/chunk-upload.js +82 -0
- package/dist/upload/flow-upload.d.ts +48 -0
- package/dist/upload/flow-upload.d.ts.map +1 -0
- package/dist/upload/flow-upload.js +240 -0
- package/dist/upload/index.d.ts +3 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +2 -0
- package/dist/upload/parallel-upload.d.ts +65 -0
- package/dist/upload/parallel-upload.d.ts.map +1 -0
- package/dist/upload/parallel-upload.js +231 -0
- package/dist/upload/single-upload.d.ts +118 -0
- package/dist/upload/single-upload.d.ts.map +1 -0
- package/dist/upload/single-upload.js +332 -0
- package/dist/upload/upload-manager.d.ts +30 -0
- package/dist/upload/upload-manager.d.ts.map +1 -0
- package/dist/upload/upload-manager.js +57 -0
- package/dist/upload/upload-metrics.d.ts +37 -0
- package/dist/upload/upload-metrics.d.ts.map +1 -0
- package/dist/upload/upload-metrics.js +236 -0
- package/dist/upload/upload-storage.d.ts +32 -0
- package/dist/upload/upload-storage.d.ts.map +1 -0
- package/dist/upload/upload-storage.js +46 -0
- package/dist/upload/upload-strategy.d.ts +66 -0
- package/dist/upload/upload-strategy.d.ts.map +1 -0
- package/dist/upload/upload-strategy.js +171 -0
- package/dist/upload/upload-utils.d.ts +26 -0
- package/dist/upload/upload-utils.d.ts.map +1 -0
- package/dist/upload/upload-utils.js +80 -0
- package/package.json +29 -0
- package/src/__tests__/smart-chunking.test.ts +399 -0
- package/src/auth/__tests__/auth-http-client.test.ts +327 -0
- package/src/auth/__tests__/direct-auth.test.ts +135 -0
- package/src/auth/__tests__/no-auth.test.ts +40 -0
- package/src/auth/__tests__/saas-auth.test.ts +337 -0
- package/src/auth/auth-http-client.ts +150 -0
- package/src/auth/direct-auth.ts +121 -0
- package/src/auth/index.ts +5 -0
- package/src/auth/no-auth.ts +39 -0
- package/src/auth/saas-auth.ts +218 -0
- package/src/auth/types.ts +105 -0
- package/src/chunk-buffer.ts +287 -0
- package/src/client/create-uploadista-client.ts +901 -0
- package/src/client/index.ts +3 -0
- package/src/client/uploadista-api.ts +857 -0
- package/src/client/uploadista-websocket-manager.ts +275 -0
- package/src/error.ts +149 -0
- package/src/index.ts +13 -0
- package/src/logger.ts +104 -0
- package/src/mock-data-store.ts +97 -0
- package/src/network-monitor.ts +445 -0
- package/src/services/abort-controller-service.ts +21 -0
- package/src/services/checksum-service.ts +3 -0
- package/src/services/file-reader-service.ts +44 -0
- package/src/services/fingerprint-service.ts +6 -0
- package/src/services/http-client.ts +229 -0
- package/src/services/id-generation-service.ts +9 -0
- package/src/services/index.ts +10 -0
- package/src/services/platform-service.ts +65 -0
- package/src/services/service-container.ts +24 -0
- package/src/services/storage-service.ts +29 -0
- package/src/services/websocket-service.ts +33 -0
- package/src/smart-chunker.ts +451 -0
- package/src/storage/client-storage.ts +186 -0
- package/src/storage/in-memory-storage-service.ts +33 -0
- package/src/storage/index.ts +2 -0
- package/src/types/buffered-chunk.ts +5 -0
- package/src/types/chunk-metrics.ts +11 -0
- package/src/types/flow-result.ts +14 -0
- package/src/types/flow-upload-config.ts +56 -0
- package/src/types/flow-upload-item.ts +16 -0
- package/src/types/flow-upload-options.ts +56 -0
- package/src/types/index.ts +13 -0
- package/src/types/multi-flow-upload-options.ts +39 -0
- package/src/types/multi-flow-upload-state.ts +9 -0
- package/src/types/performance-insights.ts +7 -0
- package/src/types/previous-upload.ts +22 -0
- package/src/types/upload-options.ts +56 -0
- package/src/types/upload-response.ts +6 -0
- package/src/types/upload-result.ts +60 -0
- package/src/types/upload-session-metrics.ts +15 -0
- package/src/upload/chunk-upload.ts +151 -0
- package/src/upload/flow-upload.ts +367 -0
- package/src/upload/index.ts +2 -0
- package/src/upload/parallel-upload.ts +387 -0
- package/src/upload/single-upload.ts +554 -0
- package/src/upload/upload-manager.ts +106 -0
- package/src/upload/upload-metrics.ts +340 -0
- package/src/upload/upload-storage.ts +87 -0
- package/src/upload/upload-strategy.ts +296 -0
- package/src/upload/upload-utils.ts +114 -0
- package/tsconfig.json +23 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
export class UploadMetrics {
|
|
2
|
+
constructor(config = {}) {
|
|
3
|
+
this.chunkHistory = [];
|
|
4
|
+
this.currentSession = {};
|
|
5
|
+
this.sessionStartTime = 0;
|
|
6
|
+
this.config = {
|
|
7
|
+
maxChunkHistory: config.maxChunkHistory ?? 1000,
|
|
8
|
+
enableDetailedMetrics: config.enableDetailedMetrics ?? true,
|
|
9
|
+
performanceThresholds: {
|
|
10
|
+
slowSpeed: 100 * 1024, // 100 KB/s
|
|
11
|
+
fastSpeed: 5 * 1024 * 1024, // 5 MB/s
|
|
12
|
+
highRetryRate: 0.2, // 20%
|
|
13
|
+
...config.performanceThresholds,
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
startSession(uploadId, totalSize, adaptiveChunkingEnabled) {
|
|
18
|
+
this.sessionStartTime = Date.now();
|
|
19
|
+
this.currentSession = {
|
|
20
|
+
uploadId,
|
|
21
|
+
totalSize,
|
|
22
|
+
chunksCompleted: 0,
|
|
23
|
+
chunksTotal: Math.ceil(totalSize / (1024 * 1024)), // rough estimate
|
|
24
|
+
totalDuration: 0,
|
|
25
|
+
totalRetries: 0,
|
|
26
|
+
adaptiveChunkingEnabled,
|
|
27
|
+
startTime: this.sessionStartTime,
|
|
28
|
+
};
|
|
29
|
+
this.chunkHistory = [];
|
|
30
|
+
}
|
|
31
|
+
recordChunk(metrics) {
|
|
32
|
+
const chunkMetrics = {
|
|
33
|
+
...metrics,
|
|
34
|
+
timestamp: Date.now(),
|
|
35
|
+
};
|
|
36
|
+
this.chunkHistory.push(chunkMetrics);
|
|
37
|
+
// Keep history within limits
|
|
38
|
+
if (this.chunkHistory.length > this.config.maxChunkHistory) {
|
|
39
|
+
this.chunkHistory = this.chunkHistory.slice(-this.config.maxChunkHistory);
|
|
40
|
+
}
|
|
41
|
+
// Update session metrics
|
|
42
|
+
if (this.currentSession && chunkMetrics.success) {
|
|
43
|
+
this.currentSession.chunksCompleted =
|
|
44
|
+
(this.currentSession.chunksCompleted || 0) + 1;
|
|
45
|
+
this.currentSession.totalDuration =
|
|
46
|
+
(this.currentSession.totalDuration || 0) + chunkMetrics.duration;
|
|
47
|
+
this.currentSession.totalRetries =
|
|
48
|
+
(this.currentSession.totalRetries || 0) + chunkMetrics.retryCount;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
endSession() {
|
|
52
|
+
if (!this.currentSession.uploadId) {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
const endTime = Date.now();
|
|
56
|
+
const totalDuration = endTime - this.sessionStartTime;
|
|
57
|
+
const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);
|
|
58
|
+
if (successfulChunks.length === 0) {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
const speeds = successfulChunks.map((chunk) => chunk.speed);
|
|
62
|
+
const averageSpeed = speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;
|
|
63
|
+
const peakSpeed = Math.max(...speeds);
|
|
64
|
+
const minSpeed = Math.min(...speeds);
|
|
65
|
+
const successRate = successfulChunks.length / this.chunkHistory.length;
|
|
66
|
+
const sessionMetrics = {
|
|
67
|
+
uploadId: this.currentSession.uploadId || "",
|
|
68
|
+
totalSize: this.currentSession.totalSize || 0,
|
|
69
|
+
totalDuration,
|
|
70
|
+
chunksCompleted: successfulChunks.length,
|
|
71
|
+
chunksTotal: this.chunkHistory.length,
|
|
72
|
+
averageSpeed,
|
|
73
|
+
peakSpeed,
|
|
74
|
+
minSpeed,
|
|
75
|
+
totalRetries: this.currentSession.totalRetries || 0,
|
|
76
|
+
successRate,
|
|
77
|
+
adaptiveChunkingEnabled: this.currentSession.adaptiveChunkingEnabled || false,
|
|
78
|
+
startTime: this.currentSession.startTime || 0,
|
|
79
|
+
endTime,
|
|
80
|
+
};
|
|
81
|
+
// Reset current session
|
|
82
|
+
this.currentSession = {};
|
|
83
|
+
return sessionMetrics;
|
|
84
|
+
}
|
|
85
|
+
getCurrentSessionMetrics() {
|
|
86
|
+
return { ...this.currentSession };
|
|
87
|
+
}
|
|
88
|
+
getChunkHistory(count) {
|
|
89
|
+
const history = this.chunkHistory.slice();
|
|
90
|
+
return count ? history.slice(-count) : history;
|
|
91
|
+
}
|
|
92
|
+
getPerformanceInsights() {
|
|
93
|
+
if (this.chunkHistory.length < 5) {
|
|
94
|
+
return {
|
|
95
|
+
overallEfficiency: 0,
|
|
96
|
+
chunkingEffectiveness: 0,
|
|
97
|
+
networkStability: 0,
|
|
98
|
+
recommendations: ["Insufficient data for analysis"],
|
|
99
|
+
optimalChunkSizeRange: { min: 256 * 1024, max: 2 * 1024 * 1024 },
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);
|
|
103
|
+
const speeds = successfulChunks.map((chunk) => chunk.speed);
|
|
104
|
+
// Calculate metrics
|
|
105
|
+
const averageSpeed = speeds.length > 0
|
|
106
|
+
? speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length
|
|
107
|
+
: 0;
|
|
108
|
+
const speedVariance = this.calculateVariance(speeds);
|
|
109
|
+
const speedStdDev = Math.sqrt(speedVariance);
|
|
110
|
+
const coefficientOfVariation = speedStdDev / averageSpeed;
|
|
111
|
+
// Overall efficiency based on speed and retry rate
|
|
112
|
+
const successRate = successfulChunks.length / this.chunkHistory.length;
|
|
113
|
+
const speedScore = Math.min(1, averageSpeed / this.config.performanceThresholds.fastSpeed);
|
|
114
|
+
const overallEfficiency = speedScore * 0.7 + successRate * 0.3;
|
|
115
|
+
// Network stability (lower coefficient of variation = higher stability)
|
|
116
|
+
const networkStability = Math.max(0, 1 - Math.min(1, coefficientOfVariation));
|
|
117
|
+
// Chunking effectiveness based on how well chunk sizes correlate with performance
|
|
118
|
+
const chunkingEffectiveness = this.calculateChunkingEffectiveness(successfulChunks);
|
|
119
|
+
// Generate recommendations
|
|
120
|
+
const recommendations = this.generateRecommendations(averageSpeed, successRate, coefficientOfVariation);
|
|
121
|
+
// Calculate optimal chunk size range
|
|
122
|
+
const optimalChunkSizeRange = this.calculateOptimalChunkSizeRange(successfulChunks);
|
|
123
|
+
return {
|
|
124
|
+
overallEfficiency,
|
|
125
|
+
chunkingEffectiveness,
|
|
126
|
+
networkStability,
|
|
127
|
+
recommendations,
|
|
128
|
+
optimalChunkSizeRange,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
exportMetrics() {
|
|
132
|
+
return {
|
|
133
|
+
session: this.getCurrentSessionMetrics(),
|
|
134
|
+
chunks: this.getChunkHistory(),
|
|
135
|
+
insights: this.getPerformanceInsights(),
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
reset() {
|
|
139
|
+
this.chunkHistory = [];
|
|
140
|
+
this.currentSession = {};
|
|
141
|
+
this.sessionStartTime = 0;
|
|
142
|
+
}
|
|
143
|
+
calculateVariance(values) {
|
|
144
|
+
if (values.length === 0)
|
|
145
|
+
return 0;
|
|
146
|
+
const mean = values.reduce((sum, value) => sum + value, 0) / values.length;
|
|
147
|
+
const squaredDifferences = values.map((value) => (value - mean) ** 2);
|
|
148
|
+
return (squaredDifferences.reduce((sum, diff) => sum + diff, 0) / values.length);
|
|
149
|
+
}
|
|
150
|
+
calculateChunkingEffectiveness(chunks) {
|
|
151
|
+
if (chunks.length < 3)
|
|
152
|
+
return 0.5;
|
|
153
|
+
// Look for correlation between chunk size and upload speed
|
|
154
|
+
// Better chunking should show consistent performance across different sizes
|
|
155
|
+
const sizeGroups = this.groupChunksBySize(chunks);
|
|
156
|
+
if (Object.keys(sizeGroups).length < 2)
|
|
157
|
+
return 0.5;
|
|
158
|
+
// Calculate coefficient of variation for each size group
|
|
159
|
+
const groupVariations = Object.values(sizeGroups).map((group) => {
|
|
160
|
+
const speeds = group.map((chunk) => chunk.speed);
|
|
161
|
+
const mean = speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;
|
|
162
|
+
const variance = this.calculateVariance(speeds);
|
|
163
|
+
return Math.sqrt(variance) / mean;
|
|
164
|
+
});
|
|
165
|
+
// Lower average variation indicates better chunking effectiveness
|
|
166
|
+
const averageVariation = groupVariations.reduce((sum, cv) => sum + cv, 0) / groupVariations.length;
|
|
167
|
+
return Math.max(0, 1 - Math.min(1, averageVariation));
|
|
168
|
+
}
|
|
169
|
+
groupChunksBySize(chunks) {
|
|
170
|
+
const groups = {};
|
|
171
|
+
chunks.forEach((chunk) => {
|
|
172
|
+
// Group by size ranges (64KB, 128KB, 256KB, 512KB, 1MB, 2MB, 4MB, 8MB+)
|
|
173
|
+
let sizeGroup;
|
|
174
|
+
if (chunk.size < 128 * 1024)
|
|
175
|
+
sizeGroup = "64KB";
|
|
176
|
+
else if (chunk.size < 256 * 1024)
|
|
177
|
+
sizeGroup = "128KB";
|
|
178
|
+
else if (chunk.size < 512 * 1024)
|
|
179
|
+
sizeGroup = "256KB";
|
|
180
|
+
else if (chunk.size < 1024 * 1024)
|
|
181
|
+
sizeGroup = "512KB";
|
|
182
|
+
else if (chunk.size < 2 * 1024 * 1024)
|
|
183
|
+
sizeGroup = "1MB";
|
|
184
|
+
else if (chunk.size < 4 * 1024 * 1024)
|
|
185
|
+
sizeGroup = "2MB";
|
|
186
|
+
else if (chunk.size < 8 * 1024 * 1024)
|
|
187
|
+
sizeGroup = "4MB";
|
|
188
|
+
else
|
|
189
|
+
sizeGroup = "8MB+";
|
|
190
|
+
if (!groups[sizeGroup])
|
|
191
|
+
groups[sizeGroup] = [];
|
|
192
|
+
const group = groups[sizeGroup];
|
|
193
|
+
if (group)
|
|
194
|
+
group.push(chunk);
|
|
195
|
+
});
|
|
196
|
+
return groups;
|
|
197
|
+
}
|
|
198
|
+
generateRecommendations(averageSpeed, successRate, coefficientOfVariation) {
|
|
199
|
+
const recommendations = [];
|
|
200
|
+
if (averageSpeed < this.config.performanceThresholds.slowSpeed) {
|
|
201
|
+
recommendations.push("Consider using smaller chunk sizes for better performance on slow connections");
|
|
202
|
+
}
|
|
203
|
+
if (averageSpeed > this.config.performanceThresholds.fastSpeed) {
|
|
204
|
+
recommendations.push("Network is fast - larger chunk sizes may improve efficiency");
|
|
205
|
+
}
|
|
206
|
+
if (successRate < 0.9) {
|
|
207
|
+
recommendations.push("High failure rate detected - consider more conservative chunking strategy");
|
|
208
|
+
}
|
|
209
|
+
if (coefficientOfVariation > 0.5) {
|
|
210
|
+
recommendations.push("Network appears unstable - smaller, more frequent chunks may be more reliable");
|
|
211
|
+
}
|
|
212
|
+
if (coefficientOfVariation < 0.2 &&
|
|
213
|
+
averageSpeed > this.config.performanceThresholds.slowSpeed) {
|
|
214
|
+
recommendations.push("Stable network detected - larger chunks may improve efficiency");
|
|
215
|
+
}
|
|
216
|
+
if (recommendations.length === 0) {
|
|
217
|
+
recommendations.push("Performance appears optimal with current configuration");
|
|
218
|
+
}
|
|
219
|
+
return recommendations;
|
|
220
|
+
}
|
|
221
|
+
calculateOptimalChunkSizeRange(chunks) {
|
|
222
|
+
if (chunks.length < 5) {
|
|
223
|
+
return { min: 256 * 1024, max: 2 * 1024 * 1024 };
|
|
224
|
+
}
|
|
225
|
+
// Find chunks with best performance (top 30% by speed)
|
|
226
|
+
const sortedBySpeed = chunks.slice().sort((a, b) => b.speed - a.speed);
|
|
227
|
+
const topPerformers = sortedBySpeed.slice(0, Math.ceil(chunks.length * 0.3));
|
|
228
|
+
const topSizes = topPerformers.map((chunk) => chunk.size);
|
|
229
|
+
const minOptimal = Math.min(...topSizes);
|
|
230
|
+
const maxOptimal = Math.max(...topSizes);
|
|
231
|
+
return {
|
|
232
|
+
min: Math.max(64 * 1024, minOptimal), // At least 64KB
|
|
233
|
+
max: Math.min(32 * 1024 * 1024, maxOptimal), // At most 32MB
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import type { IdGenerationService } from "../services/id-generation-service";
|
|
2
|
+
import type { ClientStorage } from "../storage/client-storage";
|
|
3
|
+
import type { PreviousUpload } from "../types/previous-upload";
|
|
4
|
+
/**
|
|
5
|
+
* Find previous uploads by fingerprint
|
|
6
|
+
*/
|
|
7
|
+
export declare function findPreviousUploads(clientStorage: ClientStorage, fingerprint: string): Promise<PreviousUpload[]>;
|
|
8
|
+
/**
|
|
9
|
+
* Resume from a previous upload
|
|
10
|
+
*/
|
|
11
|
+
export declare function resumeFromPreviousUpload(previousUpload: PreviousUpload): {
|
|
12
|
+
uploadId: string | null;
|
|
13
|
+
parallelUploadUrls: string[] | undefined;
|
|
14
|
+
clientStorageKey: string | null;
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Add the upload URL to the URL storage, if possible.
|
|
18
|
+
*/
|
|
19
|
+
export declare function saveUploadInClientStorage({ clientStorage, fingerprint, size, metadata, clientStorageKey, storeFingerprintForResuming, generateId, }: {
|
|
20
|
+
clientStorage: ClientStorage;
|
|
21
|
+
fingerprint: string;
|
|
22
|
+
size: number;
|
|
23
|
+
metadata: Record<string, string | number | boolean>;
|
|
24
|
+
clientStorageKey: string | null;
|
|
25
|
+
storeFingerprintForResuming: boolean;
|
|
26
|
+
generateId: IdGenerationService;
|
|
27
|
+
}): Promise<string | undefined>;
|
|
28
|
+
/**
|
|
29
|
+
* Remove the entry in the URL storage, if it has been saved before.
|
|
30
|
+
*/
|
|
31
|
+
export declare function removeFromClientStorage(clientStorage: ClientStorage, clientStorageKey: string): Promise<void>;
|
|
32
|
+
//# sourceMappingURL=upload-storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-storage.d.ts","sourceRoot":"","sources":["../../src/upload/upload-storage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAC7E,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC/D,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAE/D;;GAEG;AACH,wBAAsB,mBAAmB,CACvC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,cAAc,EAAE,CAAC,CAE3B;AAED;;GAEG;AACH,wBAAgB,wBAAwB,CAAC,cAAc,EAAE,cAAc,GAAG;IACxE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,kBAAkB,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;IACzC,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;CACjC,CAMA;AAED;;GAEG;AACH,wBAAsB,yBAAyB,CAAC,EAC9C,aAAa,EACb,WAAW,EACX,IAAI,EACJ,QAAQ,EACR,gBAAgB,EAChB,2BAA2B,EAC3B,UAAU,GACX,EAAE;IACD,aAAa,EAAE,aAAa,CAAC;IAC7B,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC,CAAC;IACpD,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;IAChC,2BAA2B,EAAE,OAAO,CAAC;IACrC,UAAU,EAAE,mBAAmB,CAAC;CACjC,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CA2B9B;AAED;;GAEG;AACH,wBAAsB,uBAAuB,CAC3C,aAAa,EAAE,aAAa,EAC5B,gBAAgB,EAAE,MAAM,GACvB,OAAO,CAAC,IAAI,CAAC,CAGf"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Find previous uploads by fingerprint
|
|
3
|
+
*/
|
|
4
|
+
export async function findPreviousUploads(clientStorage, fingerprint) {
|
|
5
|
+
return clientStorage.findUploadsByFingerprint(fingerprint);
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Resume from a previous upload
|
|
9
|
+
*/
|
|
10
|
+
export function resumeFromPreviousUpload(previousUpload) {
|
|
11
|
+
return {
|
|
12
|
+
uploadId: previousUpload.uploadId ?? null,
|
|
13
|
+
parallelUploadUrls: previousUpload.parallelUploadUrls,
|
|
14
|
+
clientStorageKey: previousUpload.clientStorageKey,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Add the upload URL to the URL storage, if possible.
|
|
19
|
+
*/
|
|
20
|
+
export async function saveUploadInClientStorage({ clientStorage, fingerprint, size, metadata, clientStorageKey, storeFingerprintForResuming, generateId, }) {
|
|
21
|
+
// We do not store the upload key
|
|
22
|
+
// - if it was disabled in the option, or
|
|
23
|
+
// - if no fingerprint was calculated for the input (i.e. a stream), or
|
|
24
|
+
// - if the key is already stored.
|
|
25
|
+
if (!storeFingerprintForResuming ||
|
|
26
|
+
!fingerprint ||
|
|
27
|
+
clientStorageKey != null) {
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
const storedUpload = {
|
|
31
|
+
size,
|
|
32
|
+
metadata,
|
|
33
|
+
creationTime: new Date().toString(),
|
|
34
|
+
clientStorageKey: fingerprint,
|
|
35
|
+
};
|
|
36
|
+
const newClientStorageKey = await clientStorage.addUpload(fingerprint, storedUpload, { generateId });
|
|
37
|
+
return newClientStorageKey;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Remove the entry in the URL storage, if it has been saved before.
|
|
41
|
+
*/
|
|
42
|
+
export async function removeFromClientStorage(clientStorage, clientStorageKey) {
|
|
43
|
+
if (!clientStorageKey)
|
|
44
|
+
return;
|
|
45
|
+
await clientStorage.removeUpload(clientStorageKey);
|
|
46
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import type { DataStoreCapabilities } from "@uploadista/core/types";
|
|
2
|
+
import { type NegotiatedStrategy, UploadStrategyNegotiator } from "@uploadista/core/upload";
|
|
3
|
+
import type { Logger } from "../logger";
|
|
4
|
+
import { MockClientDataStore } from "../mock-data-store";
|
|
5
|
+
import type { HttpClient } from "../services/http-client";
|
|
6
|
+
export type UploadStrategyConfig = {
|
|
7
|
+
preferredStrategy?: "single" | "parallel" | "auto";
|
|
8
|
+
minFileSizeForParallel?: number;
|
|
9
|
+
enableCapabilityNegotiation?: boolean;
|
|
10
|
+
onStrategySelected?: (strategy: {
|
|
11
|
+
chosen: "single" | "parallel";
|
|
12
|
+
chunkSize: number;
|
|
13
|
+
parallelUploads: number;
|
|
14
|
+
reasoning: string[];
|
|
15
|
+
warnings: string[];
|
|
16
|
+
}) => void;
|
|
17
|
+
};
|
|
18
|
+
export type UploadClientOptions = {
|
|
19
|
+
baseUrl: string;
|
|
20
|
+
uploadBasePath?: string;
|
|
21
|
+
storageId: string;
|
|
22
|
+
retryDelays?: number[];
|
|
23
|
+
chunkSize: number;
|
|
24
|
+
parallelUploads?: number;
|
|
25
|
+
parallelChunkSize?: number;
|
|
26
|
+
uploadStrategy?: UploadStrategyConfig;
|
|
27
|
+
};
|
|
28
|
+
export declare function createUploadStrategyNegotiator(dataStore: MockClientDataStore): UploadStrategyNegotiator;
|
|
29
|
+
/**
|
|
30
|
+
* Fetch capabilities from server
|
|
31
|
+
*/
|
|
32
|
+
export declare function fetchServerCapabilities(baseUrl: string, uploadBasePath: string, storageId: string, httpClient: HttpClient): Promise<DataStoreCapabilities>;
|
|
33
|
+
/**
|
|
34
|
+
* Negotiate upload strategy based on capabilities and options
|
|
35
|
+
*/
|
|
36
|
+
export declare function negotiateUploadStrategy({ capabilities, fileSize, chunkSize, parallelUploads, uploadLengthDeferred, strategyConfig, logger, }: {
|
|
37
|
+
capabilities: DataStoreCapabilities;
|
|
38
|
+
fileSize: number | null;
|
|
39
|
+
chunkSize: number;
|
|
40
|
+
parallelUploads: number;
|
|
41
|
+
uploadLengthDeferred?: boolean;
|
|
42
|
+
strategyConfig?: UploadStrategyConfig;
|
|
43
|
+
logger: Logger;
|
|
44
|
+
}): NegotiatedStrategy;
|
|
45
|
+
/**
|
|
46
|
+
* Validate upload client configuration against data store capabilities
|
|
47
|
+
*/
|
|
48
|
+
export declare function validateConfiguration(options: UploadClientOptions, capabilities: DataStoreCapabilities | undefined, logger: Logger): {
|
|
49
|
+
valid: boolean;
|
|
50
|
+
errors: string[];
|
|
51
|
+
warnings: string[];
|
|
52
|
+
};
|
|
53
|
+
/**
|
|
54
|
+
* Async configuration validation with server capabilities
|
|
55
|
+
*/
|
|
56
|
+
export declare function validateConfigurationAsync(options: UploadClientOptions, httpClient: HttpClient, logger: Logger): Promise<{
|
|
57
|
+
valid: boolean;
|
|
58
|
+
errors: string[];
|
|
59
|
+
warnings: string[];
|
|
60
|
+
capabilities: DataStoreCapabilities;
|
|
61
|
+
}>;
|
|
62
|
+
/**
|
|
63
|
+
* Validate options and throw if invalid
|
|
64
|
+
*/
|
|
65
|
+
export declare function validateAndThrow(options: UploadClientOptions, logger: Logger): void;
|
|
66
|
+
//# sourceMappingURL=upload-strategy.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-strategy.d.ts","sourceRoot":"","sources":["../../src/upload/upload-strategy.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,wBAAwB,CAAC;AACpE,OAAO,EACL,KAAK,kBAAkB,EACvB,wBAAwB,EAEzB,MAAM,yBAAyB,CAAC;AAEjC,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AACxC,OAAO,EAEL,mBAAmB,EACpB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAE1D,MAAM,MAAM,oBAAoB,GAAG;IACjC,iBAAiB,CAAC,EAAE,QAAQ,GAAG,UAAU,GAAG,MAAM,CAAC;IACnD,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAChC,2BAA2B,CAAC,EAAE,OAAO,CAAC;IACtC,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE;QAC9B,MAAM,EAAE,QAAQ,GAAG,UAAU,CAAC;QAC9B,SAAS,EAAE,MAAM,CAAC;QAClB,eAAe,EAAE,MAAM,CAAC;QACxB,SAAS,EAAE,MAAM,EAAE,CAAC;QACpB,QAAQ,EAAE,MAAM,EAAE,CAAC;KACpB,KAAK,IAAI,CAAC;CACZ,CAAC;AAEF,MAAM,MAAM,mBAAmB,GAAG;IAChC,OAAO,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,cAAc,CAAC,EAAE,oBAAoB,CAAC;CACvC,CAAC;AAEF,wBAAgB,8BAA8B,CAC5C,SAAS,EAAE,mBAAmB,GAC7B,wBAAwB,CAI1B;AAED;;GAEG;AACH,wBAAsB,uBAAuB,CAC3C,OAAO,EAAE,MAAM,EACf,cAAc,EAAE,MAAM,EACtB,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,UAAU,GACrB,OAAO,CAAC,qBAAqB,CAAC,CAuBhC;AAED;;GAEG;AACH,wBAAgB,uBAAuB,CAAC,EACtC,YAAY,EACZ,QAAQ,EACR,SAAS,EACT,eAAe,EACf,oBAAoB,EACpB,cAAc,EACd,MAAM,GACP,EAAE;IACD,YAAY,EAAE,qBAAqB,CAAC;IACpC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,EAAE,MAAM,CAAC;IACxB,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,cAAc,CAAC,EAAE,oBAAoB,CAAC;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,kBAAkB,CAyDrB;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,OAAO,EAAE,mBAAmB,EAC5B,YAAY,EAAE,qBAAqB,YAA4B,EAC/D,MAAM,EAAE,MAAM,GACb;IACD,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,QAAQ,EAAE,MAAM,EAAE,CAAC;CACpB,CA4DA;AAED;;GAEG;AACH,wBAAsB,0BAA0B,CAC9C,OAAO,EAAE,mBAAmB,EAC5B,UAAU,EAAE,UAAU,EACtB,MAAM,EAAE,MAAM,GACb,OAAO,CAAC;IACT,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,YAAY,EAAE,qBAAqB,CAAC;CACrC,CAAC,CA8BD;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,EAAE,MAAM,GACb,IAAI,CAeN"}
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import { UploadStrategyNegotiator, } from "@uploadista/core/upload";
|
|
2
|
+
import { UploadistaError } from "../error";
|
|
3
|
+
import { defaultClientCapabilities, MockClientDataStore, } from "../mock-data-store";
|
|
4
|
+
export function createUploadStrategyNegotiator(dataStore) {
|
|
5
|
+
return new UploadStrategyNegotiator(dataStore.getCapabilities(), (strategy) => dataStore.validateUploadStrategy(strategy));
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Fetch capabilities from server
|
|
9
|
+
*/
|
|
10
|
+
export async function fetchServerCapabilities(baseUrl, uploadBasePath, storageId, httpClient) {
|
|
11
|
+
const capabilitiesUrl = `${baseUrl}/${uploadBasePath}/capabilities?storageId=${encodeURIComponent(storageId)}`;
|
|
12
|
+
try {
|
|
13
|
+
const response = await httpClient.request(capabilitiesUrl, {
|
|
14
|
+
method: "GET",
|
|
15
|
+
headers: {
|
|
16
|
+
"Content-Type": "application/json",
|
|
17
|
+
},
|
|
18
|
+
});
|
|
19
|
+
if (!response.ok) {
|
|
20
|
+
throw new Error(`Failed to fetch capabilities: ${response.status} ${response.statusText}`);
|
|
21
|
+
}
|
|
22
|
+
const data = await response.json();
|
|
23
|
+
return data.capabilities;
|
|
24
|
+
}
|
|
25
|
+
catch (_error) {
|
|
26
|
+
// Fall back to default capabilities if server fetch fails
|
|
27
|
+
return defaultClientCapabilities;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Negotiate upload strategy based on capabilities and options
|
|
32
|
+
*/
|
|
33
|
+
export function negotiateUploadStrategy({ capabilities, fileSize, chunkSize, parallelUploads, uploadLengthDeferred, strategyConfig, logger, }) {
|
|
34
|
+
if (strategyConfig?.enableCapabilityNegotiation !== false) {
|
|
35
|
+
// Use capability negotiation with server-fetched capabilities
|
|
36
|
+
const mockDataStore = new MockClientDataStore(capabilities);
|
|
37
|
+
const negotiator = createUploadStrategyNegotiator(mockDataStore);
|
|
38
|
+
const negotiationOptions = {
|
|
39
|
+
fileSize: fileSize || 0,
|
|
40
|
+
preferredStrategy: strategyConfig?.preferredStrategy === "auto"
|
|
41
|
+
? undefined
|
|
42
|
+
: strategyConfig?.preferredStrategy,
|
|
43
|
+
preferredChunkSize: chunkSize,
|
|
44
|
+
parallelUploads,
|
|
45
|
+
minChunkSizeForParallel: strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024,
|
|
46
|
+
};
|
|
47
|
+
const negotiatedStrategy = negotiator.negotiateStrategy(negotiationOptions);
|
|
48
|
+
// Log negotiation results
|
|
49
|
+
logger.log(`Upload strategy negotiated: ${negotiatedStrategy.strategy}`);
|
|
50
|
+
for (const reason of negotiatedStrategy.reasoning) {
|
|
51
|
+
logger.log(` - ${reason}`);
|
|
52
|
+
}
|
|
53
|
+
for (const warning of negotiatedStrategy.warnings) {
|
|
54
|
+
logger.log(` Warning: ${warning}`);
|
|
55
|
+
}
|
|
56
|
+
// Notify client of strategy selection if callback provided
|
|
57
|
+
strategyConfig?.onStrategySelected?.({
|
|
58
|
+
chosen: negotiatedStrategy.strategy,
|
|
59
|
+
chunkSize: negotiatedStrategy.chunkSize,
|
|
60
|
+
parallelUploads: negotiatedStrategy.parallelUploads,
|
|
61
|
+
reasoning: negotiatedStrategy.reasoning,
|
|
62
|
+
warnings: negotiatedStrategy.warnings,
|
|
63
|
+
});
|
|
64
|
+
return negotiatedStrategy;
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
// Fallback to legacy logic
|
|
68
|
+
const shouldUseParallelUpload = parallelUploads > 1 &&
|
|
69
|
+
fileSize &&
|
|
70
|
+
fileSize > (strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024) &&
|
|
71
|
+
!uploadLengthDeferred;
|
|
72
|
+
return {
|
|
73
|
+
strategy: shouldUseParallelUpload ? "parallel" : "single",
|
|
74
|
+
chunkSize,
|
|
75
|
+
parallelUploads: shouldUseParallelUpload ? parallelUploads : 1,
|
|
76
|
+
reasoning: [
|
|
77
|
+
`Legacy strategy selection: ${shouldUseParallelUpload ? "parallel" : "single"}`,
|
|
78
|
+
],
|
|
79
|
+
warnings: [],
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Validate upload client configuration against data store capabilities
|
|
85
|
+
*/
|
|
86
|
+
export function validateConfiguration(options, capabilities = defaultClientCapabilities, logger) {
|
|
87
|
+
const errors = [];
|
|
88
|
+
const warnings = [];
|
|
89
|
+
// Validate against capabilities
|
|
90
|
+
const mockDataStore = new MockClientDataStore(capabilities);
|
|
91
|
+
const negotiator = createUploadStrategyNegotiator(mockDataStore);
|
|
92
|
+
const validation = negotiator.validateConfiguration({
|
|
93
|
+
fileSize: 0, // Placeholder for validation
|
|
94
|
+
preferredStrategy: options.uploadStrategy?.preferredStrategy === "auto"
|
|
95
|
+
? undefined
|
|
96
|
+
: options.uploadStrategy?.preferredStrategy,
|
|
97
|
+
preferredChunkSize: options.chunkSize,
|
|
98
|
+
parallelUploads: options.parallelUploads,
|
|
99
|
+
});
|
|
100
|
+
if (!validation.valid) {
|
|
101
|
+
errors.push(...validation.errors);
|
|
102
|
+
}
|
|
103
|
+
// Additional client-specific validations
|
|
104
|
+
if (options.parallelUploads && options.parallelUploads < 1) {
|
|
105
|
+
errors.push("parallelUploads must be at least 1");
|
|
106
|
+
}
|
|
107
|
+
if (options.chunkSize && options.chunkSize < 1024) {
|
|
108
|
+
warnings.push("Chunk size below 1KB may impact performance");
|
|
109
|
+
}
|
|
110
|
+
if (options.uploadStrategy?.preferredStrategy === "parallel" &&
|
|
111
|
+
!options.parallelUploads) {
|
|
112
|
+
warnings.push("Parallel strategy requested but parallelUploads not configured");
|
|
113
|
+
}
|
|
114
|
+
// Log validation results
|
|
115
|
+
if (errors.length > 0) {
|
|
116
|
+
logger.log("Configuration validation errors:");
|
|
117
|
+
for (const error of errors) {
|
|
118
|
+
logger.log(` Error: ${error}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
if (warnings.length > 0) {
|
|
122
|
+
logger.log("Configuration validation warnings:");
|
|
123
|
+
for (const warning of warnings) {
|
|
124
|
+
logger.log(` Warning: ${warning}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return {
|
|
128
|
+
valid: errors.length === 0,
|
|
129
|
+
errors,
|
|
130
|
+
warnings,
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Async configuration validation with server capabilities
|
|
135
|
+
*/
|
|
136
|
+
export async function validateConfigurationAsync(options, httpClient, logger) {
|
|
137
|
+
const errors = [];
|
|
138
|
+
const warnings = [];
|
|
139
|
+
let capabilities;
|
|
140
|
+
try {
|
|
141
|
+
capabilities = await fetchServerCapabilities(options.baseUrl, options.uploadBasePath || "api/upload", options.storageId, httpClient);
|
|
142
|
+
}
|
|
143
|
+
catch (error) {
|
|
144
|
+
logger.log(`Failed to fetch server capabilities for validation: ${error}`);
|
|
145
|
+
capabilities = defaultClientCapabilities;
|
|
146
|
+
warnings.push("Using default capabilities for validation - server unavailable");
|
|
147
|
+
}
|
|
148
|
+
const validation = validateConfiguration(options, capabilities, logger);
|
|
149
|
+
errors.push(...validation.errors);
|
|
150
|
+
warnings.push(...validation.warnings);
|
|
151
|
+
return {
|
|
152
|
+
valid: errors.length === 0,
|
|
153
|
+
errors,
|
|
154
|
+
warnings,
|
|
155
|
+
capabilities,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Validate options and throw if invalid
|
|
160
|
+
*/
|
|
161
|
+
export function validateAndThrow(options, logger) {
|
|
162
|
+
const validationResult = validateConfiguration(options, defaultClientCapabilities, logger);
|
|
163
|
+
if (!validationResult.valid) {
|
|
164
|
+
const errorMessage = `Upload client configuration validation failed: ${validationResult.errors.join(", ")}`;
|
|
165
|
+
logger.log(errorMessage);
|
|
166
|
+
throw new UploadistaError({
|
|
167
|
+
name: "UPLOAD_SIZE_NOT_SPECIFIED", // Reusing existing error type
|
|
168
|
+
message: errorMessage,
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Encodes metadata for upload headers
|
|
3
|
+
*/
|
|
4
|
+
export declare function encodeMetadata(metadata: Record<string, string | null>): string;
|
|
5
|
+
/**
|
|
6
|
+
* Checks whether a given status is in the range of the expected category.
|
|
7
|
+
* For example, only a status between 200 and 299 will satisfy the category 200.
|
|
8
|
+
*/
|
|
9
|
+
export declare function inStatusCategory(status: number, category: 100 | 200 | 300 | 400 | 500): boolean;
|
|
10
|
+
export type CalculateFileSizeOptions = {
|
|
11
|
+
uploadLengthDeferred?: boolean;
|
|
12
|
+
uploadSize?: number;
|
|
13
|
+
};
|
|
14
|
+
/**
|
|
15
|
+
* Calculate the final file size for upload based on options
|
|
16
|
+
*/
|
|
17
|
+
export declare function calculateFileSize(originalSize: number | null, { uploadLengthDeferred, uploadSize }: CalculateFileSizeOptions): number | null;
|
|
18
|
+
/**
|
|
19
|
+
* Calculate segments for parallel upload
|
|
20
|
+
*/
|
|
21
|
+
export declare function calculateSegments(fileSize: number, parallelUploads: number, parallelChunkSize?: number): {
|
|
22
|
+
startByte: number;
|
|
23
|
+
endByte: number;
|
|
24
|
+
segmentIndex: number;
|
|
25
|
+
}[];
|
|
26
|
+
//# sourceMappingURL=upload-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-utils.d.ts","sourceRoot":"","sources":["../../src/upload/upload-utils.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,wBAAgB,cAAc,CAC5B,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,GACtC,MAAM,CAIR;AAED;;;GAGG;AACH,wBAAgB,gBAAgB,CAC9B,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GACpC,OAAO,CAET;AAED,MAAM,MAAM,wBAAwB,GAAG;IACrC,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,YAAY,EAAE,MAAM,GAAG,IAAI,EAC3B,EAAE,oBAAoB,EAAE,UAAU,EAAE,EAAE,wBAAwB,GAC7D,MAAM,GAAG,IAAI,CAsBf;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,QAAQ,EAAE,MAAM,EAChB,eAAe,EAAE,MAAM,EACvB,iBAAiB,CAAC,EAAE,MAAM,GACzB;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAC;IAAC,YAAY,EAAE,MAAM,CAAA;CAAE,EAAE,CA8ChE"}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { Base64 } from "js-base64";
|
|
2
|
+
import { UploadistaError } from "../error";
|
|
3
|
+
/**
|
|
4
|
+
* Encodes metadata for upload headers
|
|
5
|
+
*/
|
|
6
|
+
export function encodeMetadata(metadata) {
|
|
7
|
+
return Object.entries(metadata)
|
|
8
|
+
.map(([key, value]) => `${key} ${Base64.encode(String(value))}`)
|
|
9
|
+
.join(",");
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Checks whether a given status is in the range of the expected category.
|
|
13
|
+
* For example, only a status between 200 and 299 will satisfy the category 200.
|
|
14
|
+
*/
|
|
15
|
+
export function inStatusCategory(status, category) {
|
|
16
|
+
return status >= category && status < category + 100;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Calculate the final file size for upload based on options
|
|
20
|
+
*/
|
|
21
|
+
export function calculateFileSize(originalSize, { uploadLengthDeferred, uploadSize }) {
|
|
22
|
+
// First, we look at the uploadLengthDeferred option.
|
|
23
|
+
// Next, we check if the caller has supplied a manual upload size.
|
|
24
|
+
// Finally, we try to use the calculated size from the source object.
|
|
25
|
+
if (uploadLengthDeferred) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
if (uploadSize != null) {
|
|
29
|
+
return uploadSize;
|
|
30
|
+
}
|
|
31
|
+
const size = originalSize;
|
|
32
|
+
if (size == null) {
|
|
33
|
+
throw new UploadistaError({
|
|
34
|
+
name: "UPLOAD_SIZE_NOT_SPECIFIED",
|
|
35
|
+
message: "cannot automatically derive upload's size from input. Specify it manually using the `uploadSize` option or use the `uploadLengthDeferred` option",
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
return size;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Calculate segments for parallel upload
|
|
42
|
+
*/
|
|
43
|
+
export function calculateSegments(fileSize, parallelUploads, parallelChunkSize) {
|
|
44
|
+
if (parallelUploads <= 1) {
|
|
45
|
+
return [{ startByte: 0, endByte: fileSize, segmentIndex: 0 }];
|
|
46
|
+
}
|
|
47
|
+
// Use parallelChunkSize if provided, otherwise divide file equally
|
|
48
|
+
const segments = [];
|
|
49
|
+
if (parallelChunkSize) {
|
|
50
|
+
// Fixed segment size approach
|
|
51
|
+
let currentByte = 0;
|
|
52
|
+
let segmentIndex = 0;
|
|
53
|
+
while (currentByte < fileSize) {
|
|
54
|
+
const endByte = Math.min(currentByte + parallelChunkSize, fileSize);
|
|
55
|
+
segments.push({
|
|
56
|
+
startByte: currentByte,
|
|
57
|
+
endByte,
|
|
58
|
+
segmentIndex,
|
|
59
|
+
});
|
|
60
|
+
currentByte = endByte;
|
|
61
|
+
segmentIndex++;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
// Equal division approach
|
|
66
|
+
const segmentSize = Math.ceil(fileSize / parallelUploads);
|
|
67
|
+
for (let i = 0; i < parallelUploads; i++) {
|
|
68
|
+
const startByte = i * segmentSize;
|
|
69
|
+
const endByte = Math.min(startByte + segmentSize, fileSize);
|
|
70
|
+
if (startByte < fileSize) {
|
|
71
|
+
segments.push({
|
|
72
|
+
startByte,
|
|
73
|
+
endByte,
|
|
74
|
+
segmentIndex: i,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return segments;
|
|
80
|
+
}
|