@arela/uploader 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/index.js +65 -10
package/package.json
CHANGED
package/src/index.js
CHANGED
|
@@ -122,16 +122,66 @@ const writeLog = (message) => {
|
|
|
122
122
|
}
|
|
123
123
|
};
|
|
124
124
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
? fs.readFileSync(logFilePath, 'utf-8').split('\n')
|
|
128
|
-
: [];
|
|
125
|
+
// Modified to fetch from Supabase first, then fallback to local log
|
|
126
|
+
const getProcessedPaths = async () => {
|
|
129
127
|
const processed = new Set();
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
128
|
+
|
|
129
|
+
// Try to fetch from Supabase first
|
|
130
|
+
try {
|
|
131
|
+
const { data, error } = await supabase
|
|
132
|
+
.from('upload_logs')
|
|
133
|
+
.select('path')
|
|
134
|
+
.in('status', ['success', 'skipped']);
|
|
135
|
+
|
|
136
|
+
if (error) {
|
|
137
|
+
console.warn(
|
|
138
|
+
`⚠️ Could not fetch processed paths from Supabase: ${error.message}. Falling back to local log.`,
|
|
139
|
+
);
|
|
140
|
+
// Fallback to local log if Supabase fetch fails
|
|
141
|
+
const lines = fs.existsSync(logFilePath)
|
|
142
|
+
? fs.readFileSync(logFilePath, 'utf-8').split('\\n')
|
|
143
|
+
: [];
|
|
144
|
+
for (const line of lines) {
|
|
145
|
+
const match = line.match(/(SUCCESS|SKIPPED): .*? -> (.+)/);
|
|
146
|
+
if (match) {
|
|
147
|
+
const [, , path] = match;
|
|
148
|
+
processed.add(path.trim());
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
} else if (data) {
|
|
152
|
+
data.forEach((log) => {
|
|
153
|
+
if (log.path) {
|
|
154
|
+
processed.add(log.path.trim());
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
// Also read from local log to ensure any paths logged before this change or during a Supabase outage are included
|
|
158
|
+
const lines = fs.existsSync(logFilePath)
|
|
159
|
+
? fs.readFileSync(logFilePath, 'utf-8').split('\\n')
|
|
160
|
+
: [];
|
|
161
|
+
for (const line of lines) {
|
|
162
|
+
const match = line.match(/(SUCCESS|SKIPPED): .*? -> (.+)/);
|
|
163
|
+
if (match) {
|
|
164
|
+
const [, , pathValue] = match;
|
|
165
|
+
if (pathValue) {
|
|
166
|
+
processed.add(pathValue.trim());
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
} catch (e) {
|
|
172
|
+
console.warn(
|
|
173
|
+
`⚠️ Error fetching from Supabase or reading local log: ${e.message}. Proceeding with an empty set of processed paths initially.`,
|
|
174
|
+
);
|
|
175
|
+
// Ensure local log is still attempted if Supabase connection itself fails
|
|
176
|
+
const lines = fs.existsSync(logFilePath)
|
|
177
|
+
? fs.readFileSync(logFilePath, 'utf-8').split('\\n')
|
|
178
|
+
: [];
|
|
179
|
+
for (const line of lines) {
|
|
180
|
+
const match = line.match(/(SUCCESS|SKIPPED): .*? -> (.+)/);
|
|
181
|
+
if (match) {
|
|
182
|
+
const [, , path] = match;
|
|
183
|
+
processed.add(path.trim());
|
|
184
|
+
}
|
|
135
185
|
}
|
|
136
186
|
}
|
|
137
187
|
return processed;
|
|
@@ -163,7 +213,7 @@ program
|
|
|
163
213
|
process.exit(1);
|
|
164
214
|
}
|
|
165
215
|
|
|
166
|
-
const processedPaths = getProcessedPaths();
|
|
216
|
+
const processedPaths = await getProcessedPaths();
|
|
167
217
|
let globalSuccess = 0;
|
|
168
218
|
let globalFailure = 0;
|
|
169
219
|
|
|
@@ -241,6 +291,11 @@ program
|
|
|
241
291
|
contentType,
|
|
242
292
|
metadata: {
|
|
243
293
|
originalName: path.basename(file),
|
|
294
|
+
clientPath: path.posix.join(
|
|
295
|
+
basePath,
|
|
296
|
+
folder,
|
|
297
|
+
path.relative(sourcePath, file).replace(/\\/g, '/'),
|
|
298
|
+
),
|
|
244
299
|
},
|
|
245
300
|
}),
|
|
246
301
|
);
|