@arela/uploader 0.2.13 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +66 -0
- package/README.md +263 -62
- package/docs/API_ENDPOINTS_FOR_DETECTION.md +647 -0
- package/docs/QUICK_REFERENCE_API_DETECTION.md +264 -0
- package/docs/REFACTORING_SUMMARY_DETECT_PEDIMENTOS.md +200 -0
- package/package.json +3 -2
- package/scripts/cleanup-ds-store.js +109 -0
- package/scripts/cleanup-system-files.js +69 -0
- package/scripts/tests/phase-7-features.test.js +415 -0
- package/scripts/tests/signal-handling.test.js +275 -0
- package/scripts/tests/smart-watch-integration.test.js +554 -0
- package/scripts/tests/watch-service-integration.test.js +584 -0
- package/src/commands/UploadCommand.js +31 -4
- package/src/commands/WatchCommand.js +1342 -0
- package/src/config/config.js +270 -2
- package/src/document-type-shared.js +2 -0
- package/src/document-types/support-document.js +200 -0
- package/src/file-detection.js +9 -1
- package/src/index.js +163 -4
- package/src/services/AdvancedFilterService.js +505 -0
- package/src/services/AutoProcessingService.js +749 -0
- package/src/services/BenchmarkingService.js +381 -0
- package/src/services/DatabaseService.js +1019 -539
- package/src/services/ErrorMonitor.js +275 -0
- package/src/services/LoggingService.js +419 -1
- package/src/services/MonitoringService.js +401 -0
- package/src/services/PerformanceOptimizer.js +511 -0
- package/src/services/ReportingService.js +511 -0
- package/src/services/SignalHandler.js +255 -0
- package/src/services/SmartWatchDatabaseService.js +527 -0
- package/src/services/WatchService.js +783 -0
- package/src/services/upload/ApiUploadService.js +447 -3
- package/src/services/upload/MultiApiUploadService.js +233 -0
- package/src/services/upload/SupabaseUploadService.js +12 -5
- package/src/services/upload/UploadServiceFactory.js +24 -0
- package/src/utils/CleanupManager.js +262 -0
- package/src/utils/FileOperations.js +44 -0
- package/src/utils/WatchEventHandler.js +522 -0
- package/supabase/migrations/001_create_initial_schema.sql +366 -0
- package/supabase/migrations/002_align_with_arela_api_schema.sql +145 -0
- package/.envbackup +0 -37
- package/SUPABASE_UPLOAD_FIX.md +0 -157
- package/commands.md +0 -14
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
# Quick Reference: Using API Service for Pedimento Detection
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
|
|
5
|
+
The `detectPedimentosInDatabase` method now automatically uses API service when available, with seamless fallback to Supabase.
|
|
6
|
+
|
|
7
|
+
## How It Works
|
|
8
|
+
|
|
9
|
+
### Automatic Service Selection
|
|
10
|
+
|
|
11
|
+
```
|
|
12
|
+
1. Check if API service is available
|
|
13
|
+
├─ YES → Use API endpoints (faster, more secure)
|
|
14
|
+
└─ NO → Fall back to Supabase client (backward compatible)
|
|
15
|
+
|
|
16
|
+
2. Process detection in chunks
|
|
17
|
+
├─ Fetch records (API or Supabase)
|
|
18
|
+
├─ Detect pedimentos locally
|
|
19
|
+
└─ Batch update results (API or Supabase)
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
### Running Detection
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
# Standard command - uses API if available
|
|
26
|
+
node src/index.js detect --detect-pdfs
|
|
27
|
+
|
|
28
|
+
# With custom batch size
|
|
29
|
+
node src/index.js detect --detect-pdfs --batch-size 20
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### Log Output
|
|
33
|
+
|
|
34
|
+
**With API (New):**
|
|
35
|
+
```
|
|
36
|
+
✅ Connected to Arela API
|
|
37
|
+
Using API service for PDF detection...
|
|
38
|
+
Phase 2: Starting PDF detection for pedimento-simplificado documents...
|
|
39
|
+
Processing PDF files in chunks of 100 records...
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
**With Supabase (Fallback):**
|
|
43
|
+
```
|
|
44
|
+
⚠️ API connection failed, falling back to direct Supabase upload
|
|
45
|
+
✅ Connected to Supabase Storage (direct mode)
|
|
46
|
+
Phase 2: Starting PDF detection for pedimento-simplificado documents...
|
|
47
|
+
Processing PDF files in chunks of 100 records...
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Configuration
|
|
51
|
+
|
|
52
|
+
### Environment Variables
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
# API Mode (Preferred)
|
|
56
|
+
ARELA_API_URL=https://your-api.com
|
|
57
|
+
ARELA_API_TOKEN=your-api-token-here
|
|
58
|
+
|
|
59
|
+
# Supabase (Fallback)
|
|
60
|
+
SUPABASE_URL=https://your-project.supabase.co
|
|
61
|
+
SUPABASE_KEY=your-supabase-key
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### Performance Tuning
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
# Adjust concurrent API connections
|
|
68
|
+
MAX_API_CONNECTIONS=10
|
|
69
|
+
|
|
70
|
+
# Adjust API timeout
|
|
71
|
+
API_CONNECTION_TIMEOUT=60000
|
|
72
|
+
|
|
73
|
+
# Adjust processing batch size
|
|
74
|
+
BATCH_SIZE=50
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## API Endpoints Used
|
|
78
|
+
|
|
79
|
+
### 1. Fetch PDF Records
|
|
80
|
+
```http
|
|
81
|
+
GET /api/uploader/pdf-records?offset=0&limit=100&status=fs-stats&file_extension=pdf&is_like_simplificado=true
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
### 2. Batch Update Results
|
|
85
|
+
```http
|
|
86
|
+
PATCH /api/uploader/batch-update-detection
|
|
87
|
+
Content-Type: application/json
|
|
88
|
+
|
|
89
|
+
{
|
|
90
|
+
"updates": [
|
|
91
|
+
{
|
|
92
|
+
"id": "uuid",
|
|
93
|
+
"status": "detected",
|
|
94
|
+
"document_type": "pedimento-simplificado",
|
|
95
|
+
"num_pedimento": "23 12 3456 7890123",
|
|
96
|
+
"arela_path": "RFC/2023/3456/",
|
|
97
|
+
"year": "2023",
|
|
98
|
+
"rfc": "RFC123456789"
|
|
99
|
+
}
|
|
100
|
+
]
|
|
101
|
+
}
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Troubleshooting
|
|
105
|
+
|
|
106
|
+
### API Not Available
|
|
107
|
+
|
|
108
|
+
**Symptom:** See "falling back to direct Supabase upload" in logs
|
|
109
|
+
|
|
110
|
+
**Solutions:**
|
|
111
|
+
1. Check `ARELA_API_URL` and `ARELA_API_TOKEN` are set
|
|
112
|
+
2. Verify backend API is running and accessible
|
|
113
|
+
3. Check network connectivity to API endpoint
|
|
114
|
+
4. Review API logs for errors
|
|
115
|
+
|
|
116
|
+
**Verification:**
|
|
117
|
+
```bash
|
|
118
|
+
# Test API health endpoint
|
|
119
|
+
curl -H "x-api-key: YOUR_TOKEN" https://your-api.com/api/health
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
### Slow Performance
|
|
123
|
+
|
|
124
|
+
**Symptom:** Detection taking longer than expected
|
|
125
|
+
|
|
126
|
+
**Solutions:**
|
|
127
|
+
1. Increase `MAX_API_CONNECTIONS` (default: 10)
|
|
128
|
+
2. Adjust `BATCH_SIZE` (default: 50)
|
|
129
|
+
3. Check backend API performance
|
|
130
|
+
4. Verify database indexes exist
|
|
131
|
+
|
|
132
|
+
### Update Failures
|
|
133
|
+
|
|
134
|
+
**Symptom:** Some records not being updated
|
|
135
|
+
|
|
136
|
+
**Solutions:**
|
|
137
|
+
1. Check API response for specific errors
|
|
138
|
+
2. Verify record IDs exist in database
|
|
139
|
+
3. Review API logs for validation errors
|
|
140
|
+
4. Check network stability
|
|
141
|
+
|
|
142
|
+
## Benefits Comparison
|
|
143
|
+
|
|
144
|
+
| Feature | Direct Supabase | API Service |
|
|
145
|
+
|---------|----------------|-------------|
|
|
146
|
+
| **Security** | Client has DB credentials | API key only |
|
|
147
|
+
| **Updates** | Individual queries | Batch operations |
|
|
148
|
+
| **Monitoring** | Client-side only | Centralized logging |
|
|
149
|
+
| **Flexibility** | Client redeployment needed | Backend updates only |
|
|
150
|
+
| **Performance** | Multiple connections | Optimized batching |
|
|
151
|
+
| **Scalability** | Limited by client | Horizontal scaling |
|
|
152
|
+
|
|
153
|
+
## Code Usage
|
|
154
|
+
|
|
155
|
+
### In Your Code
|
|
156
|
+
|
|
157
|
+
The service is used transparently - no code changes needed:
|
|
158
|
+
|
|
159
|
+
```javascript
|
|
160
|
+
import databaseService from './services/DatabaseService.js';
|
|
161
|
+
|
|
162
|
+
// Automatically uses API if available
|
|
163
|
+
const result = await databaseService.detectPedimentosInDatabase({
|
|
164
|
+
batchSize: 10
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
console.log(`Detected: ${result.detectedCount}`);
|
|
168
|
+
console.log(`Processed: ${result.processedCount}`);
|
|
169
|
+
console.log(`Errors: ${result.errorCount}`);
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
### Custom Service Selection (Advanced)
|
|
173
|
+
|
|
174
|
+
```javascript
|
|
175
|
+
import uploadServiceFactory from './services/upload/UploadServiceFactory.js';
|
|
176
|
+
|
|
177
|
+
// Check what service is active
|
|
178
|
+
const isApiMode = await uploadServiceFactory.isApiModeAvailable();
|
|
179
|
+
console.log(`API Mode: ${isApiMode ? 'Yes' : 'No'}`);
|
|
180
|
+
|
|
181
|
+
// Force Supabase mode (bypass API)
|
|
182
|
+
const supabaseService = await uploadServiceFactory.getUploadService(true);
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
## Testing
|
|
186
|
+
|
|
187
|
+
### Test API Endpoints
|
|
188
|
+
|
|
189
|
+
```bash
|
|
190
|
+
# 1. Test fetch endpoint
|
|
191
|
+
curl -H "x-api-key: YOUR_TOKEN" \
|
|
192
|
+
"https://your-api.com/api/uploader/pdf-records?offset=0&limit=10&status=fs-stats&file_extension=pdf&is_like_simplificado=true"
|
|
193
|
+
|
|
194
|
+
# 2. Test batch update endpoint
|
|
195
|
+
curl -X PATCH \
|
|
196
|
+
-H "x-api-key: YOUR_TOKEN" \
|
|
197
|
+
-H "Content-Type: application/json" \
|
|
198
|
+
-d '{
|
|
199
|
+
"updates": [{
|
|
200
|
+
"id": "test-uuid",
|
|
201
|
+
"status": "detected",
|
|
202
|
+
"document_type": "pedimento-simplificado"
|
|
203
|
+
}]
|
|
204
|
+
}' \
|
|
205
|
+
https://your-api.com/api/uploader/batch-update-detection
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
### Integration Test
|
|
209
|
+
|
|
210
|
+
```bash
|
|
211
|
+
# Run detection with small batch for testing
|
|
212
|
+
node src/index.js detect --detect-pdfs --batch-size 5
|
|
213
|
+
|
|
214
|
+
# Check logs for:
|
|
215
|
+
# - "Using API service for PDF detection..." (API mode)
|
|
216
|
+
# - Successful chunk processing
|
|
217
|
+
# - No error messages
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
## Monitoring
|
|
221
|
+
|
|
222
|
+
### Key Metrics to Track
|
|
223
|
+
|
|
224
|
+
1. **Service Mode**: API vs Supabase usage percentage
|
|
225
|
+
2. **Response Times**: Average time per batch operation
|
|
226
|
+
3. **Error Rates**: Failed fetches or updates
|
|
227
|
+
4. **Throughput**: Records processed per minute
|
|
228
|
+
5. **API Health**: Uptime and availability
|
|
229
|
+
|
|
230
|
+
### Log Analysis
|
|
231
|
+
|
|
232
|
+
```bash
|
|
233
|
+
# Count API vs Supabase usage
|
|
234
|
+
grep "Using API service" logs/*.log | wc -l
|
|
235
|
+
grep "falling back to direct Supabase" logs/*.log | wc -l
|
|
236
|
+
|
|
237
|
+
# Check for errors
|
|
238
|
+
grep "ERROR" logs/*.log | grep "detection"
|
|
239
|
+
|
|
240
|
+
# Performance metrics
|
|
241
|
+
grep "Chunk.*completed" logs/*.log
|
|
242
|
+
```
|
|
243
|
+
|
|
244
|
+
## Support
|
|
245
|
+
|
|
246
|
+
For issues or questions:
|
|
247
|
+
|
|
248
|
+
1. **Check Logs**: Review application logs for detailed error messages
|
|
249
|
+
2. **Verify Config**: Ensure all environment variables are set correctly
|
|
250
|
+
3. **Test Endpoints**: Use curl to test API endpoints directly
|
|
251
|
+
4. **Consult Docs**: See `/docs/API_ENDPOINTS_FOR_DETECTION.md` for API specs
|
|
252
|
+
5. **Fallback**: System automatically falls back to Supabase if API fails
|
|
253
|
+
|
|
254
|
+
## Next Steps
|
|
255
|
+
|
|
256
|
+
1. ✅ Client code is ready
|
|
257
|
+
2. ⏳ Implement backend API endpoints (see `/docs/API_ENDPOINTS_FOR_DETECTION.md`)
|
|
258
|
+
3. ⏳ Test with production data
|
|
259
|
+
4. ⏳ Monitor and optimize performance
|
|
260
|
+
5. ⏳ Phase out direct Supabase access
|
|
261
|
+
|
|
262
|
+
---
|
|
263
|
+
|
|
264
|
+
**Current Status**: Client refactored and ready. Backend endpoints need implementation.
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
# Refactoring Summary: detectPedimentosInDatabase API Migration
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
|
|
5
|
+
Successfully refactored the `detectPedimentosInDatabase` method to use API service instead of direct Supabase client access. This provides better abstraction, security, and maintainability while maintaining backward compatibility.
|
|
6
|
+
|
|
7
|
+
## Changes Made
|
|
8
|
+
|
|
9
|
+
### 1. ApiUploadService.js - New API Methods
|
|
10
|
+
|
|
11
|
+
Added two new methods to communicate with backend API endpoints:
|
|
12
|
+
|
|
13
|
+
#### `fetchPdfRecordsForDetection(options)`
|
|
14
|
+
- **Purpose**: Fetch PDF records ready for pedimento detection
|
|
15
|
+
- **Endpoint**: `GET /api/uploader/pdf-records`
|
|
16
|
+
- **Parameters**:
|
|
17
|
+
- `offset`: Pagination offset (default: 0)
|
|
18
|
+
- `limit`: Number of records to fetch (default: 100)
|
|
19
|
+
- **Query Filters**:
|
|
20
|
+
- `status=fs-stats`
|
|
21
|
+
- `file_extension=pdf`
|
|
22
|
+
- `is_like_simplificado=true`
|
|
23
|
+
- **Returns**: `{ data: Array|null, error: Error|null }`
|
|
24
|
+
|
|
25
|
+
#### `batchUpdateDetectionResults(updates)`
|
|
26
|
+
- **Purpose**: Batch update detection results for multiple records
|
|
27
|
+
- **Endpoint**: `PATCH /api/uploader/batch-update-detection`
|
|
28
|
+
- **Parameters**:
|
|
29
|
+
- `updates`: Array of update objects with `{ id, status, document_type, num_pedimento, arela_path, year, rfc, message }`
|
|
30
|
+
- **Returns**: `{ success: boolean, updated: number, errors: Array }`
|
|
31
|
+
|
|
32
|
+
### 2. DatabaseService.js - Refactored Detection Logic
|
|
33
|
+
|
|
34
|
+
Updated `detectPedimentosInDatabase` method with the following improvements:
|
|
35
|
+
|
|
36
|
+
#### Dual-Mode Operation
|
|
37
|
+
- **Primary**: Uses API service when available
|
|
38
|
+
- **Fallback**: Uses direct Supabase client if API is unavailable
|
|
39
|
+
- **Detection**: Automatically checks which service is available at runtime
|
|
40
|
+
|
|
41
|
+
#### Key Changes
|
|
42
|
+
1. **Service Detection**:
|
|
43
|
+
```javascript
|
|
44
|
+
const uploadService = await uploadServiceFactory.getUploadService();
|
|
45
|
+
if (uploadService.getServiceName() === 'Arela API') {
|
|
46
|
+
apiService = uploadService;
|
|
47
|
+
useApi = true;
|
|
48
|
+
}
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
2. **Fetch Records** (API or Supabase):
|
|
52
|
+
```javascript
|
|
53
|
+
if (useApi && apiService) {
|
|
54
|
+
const { data, error } = await apiService.fetchPdfRecordsForDetection({
|
|
55
|
+
offset,
|
|
56
|
+
limit: queryBatchSize,
|
|
57
|
+
});
|
|
58
|
+
} else {
|
|
59
|
+
// Fallback to Supabase
|
|
60
|
+
}
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
3. **Batch Updates** (API or Supabase):
|
|
64
|
+
- Collects all updates in `batchUpdates` array
|
|
65
|
+
- Sends as single batch request to API
|
|
66
|
+
- Falls back to individual Supabase updates if API unavailable
|
|
67
|
+
|
|
68
|
+
#### Benefits
|
|
69
|
+
- **Reduced Database Load**: Single batch update instead of multiple individual queries
|
|
70
|
+
- **Better Error Handling**: Centralized error management in API layer
|
|
71
|
+
- **Security**: No direct database credentials in client
|
|
72
|
+
- **Maintainability**: Easier to modify query logic in backend
|
|
73
|
+
- **Monitoring**: Better tracking of database operations
|
|
74
|
+
|
|
75
|
+
### 3. Documentation
|
|
76
|
+
|
|
77
|
+
Created comprehensive API documentation at `/docs/API_ENDPOINTS_FOR_DETECTION.md` including:
|
|
78
|
+
- Endpoint specifications
|
|
79
|
+
- Request/response formats
|
|
80
|
+
- Security considerations
|
|
81
|
+
- Performance recommendations
|
|
82
|
+
- Testing checklist
|
|
83
|
+
- Migration path
|
|
84
|
+
|
|
85
|
+
## Backward Compatibility
|
|
86
|
+
|
|
87
|
+
The refactored code maintains full backward compatibility:
|
|
88
|
+
|
|
89
|
+
1. **Automatic Fallback**: If API service is not available, automatically falls back to Supabase
|
|
90
|
+
2. **No Breaking Changes**: Same method signature and return format
|
|
91
|
+
3. **Logging**: Clear indication of which service is being used
|
|
92
|
+
4. **Error Handling**: Robust error handling for both modes
|
|
93
|
+
|
|
94
|
+
## Required Backend Work
|
|
95
|
+
|
|
96
|
+
To fully enable API mode, implement these endpoints in the backend:
|
|
97
|
+
|
|
98
|
+
### 1. GET /api/uploader/pdf-records
|
|
99
|
+
Query parameters: `offset`, `limit`, `status`, `file_extension`, `is_like_simplificado`
|
|
100
|
+
|
|
101
|
+
### 2. PATCH /api/uploader/batch-update-detection
|
|
102
|
+
Request body: `{ updates: [...] }`
|
|
103
|
+
|
|
104
|
+
See `/docs/API_ENDPOINTS_FOR_DETECTION.md` for complete specifications.
|
|
105
|
+
|
|
106
|
+
## Testing Recommendations
|
|
107
|
+
|
|
108
|
+
### Unit Tests
|
|
109
|
+
- [ ] Test API service methods with mock responses
|
|
110
|
+
- [ ] Test fallback to Supabase when API unavailable
|
|
111
|
+
- [ ] Test batch update logic
|
|
112
|
+
|
|
113
|
+
### Integration Tests
|
|
114
|
+
- [ ] Test with real API endpoints
|
|
115
|
+
- [ ] Test pagination with large datasets
|
|
116
|
+
- [ ] Test error scenarios (network failures, invalid data)
|
|
117
|
+
- [ ] Test performance with 1000+ records
|
|
118
|
+
|
|
119
|
+
### Manual Testing
|
|
120
|
+
```bash
|
|
121
|
+
# Test detection with API mode
|
|
122
|
+
node src/index.js detect --detect-pdfs
|
|
123
|
+
|
|
124
|
+
# Monitor logs to verify API usage
|
|
125
|
+
# Should see: "Using API service for PDF detection..."
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
## Performance Improvements
|
|
129
|
+
|
|
130
|
+
Expected improvements with API mode:
|
|
131
|
+
1. **Batch Updates**: ~50% reduction in database connections
|
|
132
|
+
2. **Network Efficiency**: Single batch request vs multiple individual requests
|
|
133
|
+
3. **Database Load**: Reduced query overhead
|
|
134
|
+
4. **Scalability**: Better support for concurrent operations
|
|
135
|
+
|
|
136
|
+
## Migration Path
|
|
137
|
+
|
|
138
|
+
### Phase 1: Current State ✅
|
|
139
|
+
- Client code refactored
|
|
140
|
+
- API methods implemented
|
|
141
|
+
- Documentation created
|
|
142
|
+
- Backward compatibility maintained
|
|
143
|
+
|
|
144
|
+
### Phase 2: Backend Implementation (Next)
|
|
145
|
+
- Implement GET /api/uploader/pdf-records
|
|
146
|
+
- Implement PATCH /api/uploader/batch-update-detection
|
|
147
|
+
- Add authentication/authorization
|
|
148
|
+
- Add rate limiting
|
|
149
|
+
|
|
150
|
+
### Phase 3: Testing & Deployment
|
|
151
|
+
- Test endpoints with client
|
|
152
|
+
- Performance benchmarking
|
|
153
|
+
- Gradual rollout
|
|
154
|
+
- Monitor logs
|
|
155
|
+
|
|
156
|
+
### Phase 4: Optimization
|
|
157
|
+
- Fine-tune batch sizes
|
|
158
|
+
- Add caching if needed
|
|
159
|
+
- Optimize database queries
|
|
160
|
+
- Consider async processing for large batches
|
|
161
|
+
|
|
162
|
+
## Code Files Modified
|
|
163
|
+
|
|
164
|
+
1. `/src/services/upload/ApiUploadService.js`
|
|
165
|
+
- Added `fetchPdfRecordsForDetection()`
|
|
166
|
+
- Added `batchUpdateDetectionResults()`
|
|
167
|
+
|
|
168
|
+
2. `/src/services/DatabaseService.js`
|
|
169
|
+
- Refactored `detectPedimentosInDatabase()`
|
|
170
|
+
- Added API service detection and fallback logic
|
|
171
|
+
- Improved batch update handling
|
|
172
|
+
|
|
173
|
+
3. `/docs/API_ENDPOINTS_FOR_DETECTION.md` (new)
|
|
174
|
+
- Complete API endpoint documentation
|
|
175
|
+
|
|
176
|
+
## Next Steps
|
|
177
|
+
|
|
178
|
+
1. **Backend Team**: Implement the two required API endpoints
|
|
179
|
+
2. **Testing**: Set up test environment with backend endpoints
|
|
180
|
+
3. **Configuration**: Ensure `ARELA_API_URL` and `ARELA_API_TOKEN` are configured
|
|
181
|
+
4. **Monitoring**: Add metrics to track API vs Supabase usage
|
|
182
|
+
5. **Documentation**: Update user guide with new capabilities
|
|
183
|
+
|
|
184
|
+
## Questions or Issues?
|
|
185
|
+
|
|
186
|
+
If you encounter any issues or have questions:
|
|
187
|
+
1. Check logs for service selection messages
|
|
188
|
+
2. Verify API configuration in environment variables
|
|
189
|
+
3. Ensure backend endpoints are deployed and accessible
|
|
190
|
+
4. Review error messages for specific failure points
|
|
191
|
+
|
|
192
|
+
## Benefits Summary
|
|
193
|
+
|
|
194
|
+
✅ **Abstraction**: Database logic centralized in backend
|
|
195
|
+
✅ **Security**: No direct database credentials in client
|
|
196
|
+
✅ **Maintainability**: Easier to modify without client updates
|
|
197
|
+
✅ **Performance**: Batch operations reduce overhead
|
|
198
|
+
✅ **Monitoring**: Better tracking and observability
|
|
199
|
+
✅ **Backward Compatible**: Seamless fallback to Supabase
|
|
200
|
+
✅ **Future-Ready**: Prepared for microservices architecture
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@arela/uploader",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "1.0.0",
|
|
4
4
|
"description": "CLI to upload files/directories to Arela",
|
|
5
5
|
"bin": {
|
|
6
6
|
"arela": "./src/index.js"
|
|
@@ -29,6 +29,7 @@
|
|
|
29
29
|
"homepage": "https://github.com/inspiraCode/arela-uploader#readme",
|
|
30
30
|
"dependencies": {
|
|
31
31
|
"@supabase/supabase-js": "2.49.4",
|
|
32
|
+
"chokidar": "^4.0.3",
|
|
32
33
|
"cli-progress": "3.12.0",
|
|
33
34
|
"commander": "13.1.0",
|
|
34
35
|
"dotenv": "16.5.0",
|
|
@@ -43,4 +44,4 @@
|
|
|
43
44
|
"@trivago/prettier-plugin-sort-imports": "5.2.2",
|
|
44
45
|
"prettier": "3.5.3"
|
|
45
46
|
}
|
|
46
|
-
}
|
|
47
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* cleanup-ds-store.js
|
|
5
|
+
* Remove .DS_Store and other system file records from the uploader table
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { fileURLToPath } from 'url';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import logger from './src/services/LoggingService.js';
|
|
11
|
+
import { databaseService } from './src/services/DatabaseService.js';
|
|
12
|
+
|
|
13
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
14
|
+
const __dirname = path.dirname(__filename);
|
|
15
|
+
|
|
16
|
+
console.log('\n╔════════════════════════════════════════════════════════════════╗');
|
|
17
|
+
console.log('║ Cleaning System Files from Uploader Table ║');
|
|
18
|
+
console.log('╚════════════════════════════════════════════════════════════════╝\n');
|
|
19
|
+
|
|
20
|
+
async function cleanupSystemFiles() {
|
|
21
|
+
try {
|
|
22
|
+
const supabase = await databaseService.getSupabaseClient();
|
|
23
|
+
|
|
24
|
+
// System file patterns to remove
|
|
25
|
+
const systemFiles = [
|
|
26
|
+
'.DS_Store',
|
|
27
|
+
'Thumbs.db',
|
|
28
|
+
'desktop.ini',
|
|
29
|
+
'.directory',
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
let totalDeleted = 0;
|
|
33
|
+
|
|
34
|
+
for (const fileName of systemFiles) {
|
|
35
|
+
console.log(`\n🔍 Searching for ${fileName}...`);
|
|
36
|
+
|
|
37
|
+
// Find records with this filename
|
|
38
|
+
const { data: records, error: selectError } = await supabase
|
|
39
|
+
.from('uploader')
|
|
40
|
+
.select('*')
|
|
41
|
+
.eq('filename', fileName);
|
|
42
|
+
|
|
43
|
+
if (selectError) {
|
|
44
|
+
console.error(`❌ Error searching for ${fileName}: ${selectError.message}`);
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (!records || records.length === 0) {
|
|
49
|
+
console.log(` ✓ No records found for ${fileName}`);
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
console.log(` Found ${records.length} record(s) for ${fileName}`);
|
|
54
|
+
|
|
55
|
+
// Get the IDs to delete
|
|
56
|
+
const idsToDelete = records.map(r => r.id);
|
|
57
|
+
|
|
58
|
+
// Delete records
|
|
59
|
+
const { error: deleteError } = await supabase
|
|
60
|
+
.from('uploader')
|
|
61
|
+
.delete()
|
|
62
|
+
.in('id', idsToDelete);
|
|
63
|
+
|
|
64
|
+
if (deleteError) {
|
|
65
|
+
console.error(` ❌ Error deleting: ${deleteError.message}`);
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
console.log(` ✅ Deleted ${records.length} record(s)`);
|
|
70
|
+
totalDeleted += records.length;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Also delete any records with original_path containing .DS_Store
|
|
74
|
+
console.log(`\n🔍 Searching for records with .DS_Store in path...`);
|
|
75
|
+
const { data: pathRecords, error: pathError } = await supabase
|
|
76
|
+
.from('uploader')
|
|
77
|
+
.select('*')
|
|
78
|
+
.ilike('original_path', '%.DS_Store%');
|
|
79
|
+
|
|
80
|
+
if (!pathError && pathRecords && pathRecords.length > 0) {
|
|
81
|
+
console.log(` Found ${pathRecords.length} record(s)`);
|
|
82
|
+
const idsToDelete = pathRecords.map(r => r.id);
|
|
83
|
+
|
|
84
|
+
const { error: deleteError } = await supabase
|
|
85
|
+
.from('uploader')
|
|
86
|
+
.delete()
|
|
87
|
+
.in('id', idsToDelete);
|
|
88
|
+
|
|
89
|
+
if (!deleteError) {
|
|
90
|
+
console.log(` ✅ Deleted ${pathRecords.length} record(s)`);
|
|
91
|
+
totalDeleted += pathRecords.length;
|
|
92
|
+
} else {
|
|
93
|
+
console.error(` ❌ Error deleting: ${deleteError.message}`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
console.log('\n╔════════════════════════════════════════════════════════════════╗');
|
|
98
|
+
console.log(`║ ✅ Cleanup Complete: Deleted ${totalDeleted} system file record(s) ║`);
|
|
99
|
+
console.log('╚════════════════════════════════════════════════════════════════╝\n');
|
|
100
|
+
|
|
101
|
+
process.exit(0);
|
|
102
|
+
} catch (error) {
|
|
103
|
+
console.error('\n❌ ERROR');
|
|
104
|
+
console.error(error.message);
|
|
105
|
+
process.exit(1);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
cleanupSystemFiles();
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* cleanup-system-files.js
|
|
5
|
+
* Remove system files (.DS_Store, etc) from the uploader table
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { fileURLToPath } from 'url';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
const __dirname = path.dirname(__filename);
|
|
13
|
+
|
|
14
|
+
console.log('\n╔════════════════════════════════════════════════════════════════╗');
|
|
15
|
+
console.log('║ Cleaning System Files from Database ║');
|
|
16
|
+
console.log('╚════════════════════════════════════════════════════════════════╝\n');
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
const databaseService = (await import('./src/services/DatabaseService.js')).default;
|
|
20
|
+
const supabase = await databaseService.getSupabaseClient();
|
|
21
|
+
|
|
22
|
+
// System file patterns to remove
|
|
23
|
+
const systemFilePatterns = ['.DS_Store', '__pycache__', '.pyc', '.swp', '.swo', 'Thumbs.db', 'desktop.ini'];
|
|
24
|
+
|
|
25
|
+
console.log('🔍 Searching for system files...\n');
|
|
26
|
+
|
|
27
|
+
let totalRemoved = 0;
|
|
28
|
+
|
|
29
|
+
for (const pattern of systemFilePatterns) {
|
|
30
|
+
const { data: records, error } = await supabase
|
|
31
|
+
.from('uploader')
|
|
32
|
+
.select('id, filename')
|
|
33
|
+
.ilike('filename', `%${pattern}%`);
|
|
34
|
+
|
|
35
|
+
if (!error && records && records.length > 0) {
|
|
36
|
+
console.log(`📝 Found ${records.length} record(s) with "${pattern}"`);
|
|
37
|
+
records.forEach(r => {
|
|
38
|
+
console.log(` - ID: ${r.id}, Filename: ${r.filename}`);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// Delete them
|
|
42
|
+
const ids = records.map(r => r.id);
|
|
43
|
+
const { error: deleteError } = await supabase
|
|
44
|
+
.from('uploader')
|
|
45
|
+
.delete()
|
|
46
|
+
.in('id', ids);
|
|
47
|
+
|
|
48
|
+
if (!deleteError) {
|
|
49
|
+
console.log(` ✅ Deleted ${records.length} record(s)\n`);
|
|
50
|
+
totalRemoved += records.length;
|
|
51
|
+
} else {
|
|
52
|
+
console.log(` ❌ Error deleting records: ${deleteError.message}\n`);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
console.log('╔════════════════════════════════════════════════════════════════╗');
|
|
58
|
+
console.log(`║ ✅ CLEANUP COMPLETED ║`);
|
|
59
|
+
console.log(`║ ║`);
|
|
60
|
+
console.log(`║ Total system files removed: ${totalRemoved}`);
|
|
61
|
+
console.log('╚════════════════════════════════════════════════════════════════╝\n');
|
|
62
|
+
|
|
63
|
+
process.exit(0);
|
|
64
|
+
} catch (error) {
|
|
65
|
+
console.error('\n❌ CLEANUP FAILED');
|
|
66
|
+
console.error(error.message);
|
|
67
|
+
console.error(error.stack);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|