@dboio/cli 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1161 -0
- package/bin/dbo.js +51 -0
- package/package.json +22 -0
- package/src/commands/add.js +374 -0
- package/src/commands/cache.js +49 -0
- package/src/commands/clone.js +742 -0
- package/src/commands/content.js +143 -0
- package/src/commands/deploy.js +89 -0
- package/src/commands/init.js +105 -0
- package/src/commands/input.js +111 -0
- package/src/commands/install.js +186 -0
- package/src/commands/instance.js +44 -0
- package/src/commands/login.js +97 -0
- package/src/commands/logout.js +22 -0
- package/src/commands/media.js +46 -0
- package/src/commands/message.js +28 -0
- package/src/commands/output.js +129 -0
- package/src/commands/pull.js +109 -0
- package/src/commands/push.js +309 -0
- package/src/commands/status.js +41 -0
- package/src/commands/update.js +168 -0
- package/src/commands/upload.js +37 -0
- package/src/lib/client.js +161 -0
- package/src/lib/columns.js +30 -0
- package/src/lib/config.js +269 -0
- package/src/lib/cookie-jar.js +104 -0
- package/src/lib/formatter.js +310 -0
- package/src/lib/input-parser.js +212 -0
- package/src/lib/logger.js +12 -0
- package/src/lib/save-to-disk.js +383 -0
- package/src/lib/structure.js +129 -0
- package/src/lib/timestamps.js +67 -0
- package/src/plugins/claudecommands/dbo.md +248 -0
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
import { writeFile, mkdir, access, readFile } from 'fs/promises';
|
|
2
|
+
import { join, dirname, basename, extname } from 'path';
|
|
3
|
+
import { log } from './logger.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Resolve a column value for file writing.
|
|
7
|
+
* Detects base64-encoded objects from the server and decodes them.
|
|
8
|
+
* Server returns these as: { bytes: N, value: "base64string", encoding: "base64" }
|
|
9
|
+
*/
|
|
10
|
+
function resolveContentValue(value) {
|
|
11
|
+
if (value && typeof value === 'object' && !Array.isArray(value)
|
|
12
|
+
&& value.encoding === 'base64' && typeof value.value === 'string') {
|
|
13
|
+
return Buffer.from(value.value, 'base64').toString('utf8');
|
|
14
|
+
}
|
|
15
|
+
return String(value);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Save-to-disk engine with @filename round-trip support.
|
|
20
|
+
*
|
|
21
|
+
* When multiple content columns are selected, each gets its own file with
|
|
22
|
+
* a column-based suffix: main-Content.css, main-Description.md
|
|
23
|
+
*
|
|
24
|
+
* Metadata files store @filename references for push compatibility.
|
|
25
|
+
* If a metadata.json already exists, its @filename mappings are reused.
|
|
26
|
+
*
|
|
27
|
+
* Options:
|
|
28
|
+
* entity: entity name stored as _entity in metadata
|
|
29
|
+
* saveFilename: column for the base filename
|
|
30
|
+
* savePath: column containing the directory path
|
|
31
|
+
* saveContent: column name(s) to save as files
|
|
32
|
+
* saveExtension: column name for extension, or literal extension
|
|
33
|
+
* nonInteractive: skip all prompts
|
|
34
|
+
* contentFileMap: object mapping column → { suffix, extension } per column
|
|
35
|
+
*/
|
|
36
|
+
export async function saveToDisk(rows, columns, options = {}) {
|
|
37
|
+
if (!rows || rows.length === 0) {
|
|
38
|
+
log.warn('No records to save.');
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
let filenameCol, pathCol, contentCols, extensionSource, contentFileMap, customPath = null;
|
|
43
|
+
|
|
44
|
+
if (options.saveFilename || options.nonInteractive) {
|
|
45
|
+
filenameCol = options.saveFilename || findDefault(columns, ['UID', 'Name', 'name', 'uid']);
|
|
46
|
+
pathCol = options.savePath || null;
|
|
47
|
+
contentCols = options.saveContent ? (Array.isArray(options.saveContent) ? options.saveContent : [options.saveContent]) : [];
|
|
48
|
+
extensionSource = options.saveExtension || null;
|
|
49
|
+
contentFileMap = options.contentFileMap || null;
|
|
50
|
+
} else {
|
|
51
|
+
const inquirer = (await import('inquirer')).default;
|
|
52
|
+
|
|
53
|
+
// 1. Filename column
|
|
54
|
+
const defaultFilename = findDefault(columns, ['Name', 'name', 'UID', 'uid']);
|
|
55
|
+
const { filename } = await inquirer.prompt([{
|
|
56
|
+
type: 'list', name: 'filename',
|
|
57
|
+
message: 'Which column should be used as the base filename?',
|
|
58
|
+
choices: columns, default: defaultFilename,
|
|
59
|
+
}]);
|
|
60
|
+
filenameCol = filename;
|
|
61
|
+
|
|
62
|
+
// 2. Path column or custom path
|
|
63
|
+
const pathChoices = [
|
|
64
|
+
'(None — save to current directory)',
|
|
65
|
+
'(Custom path — type a directory)',
|
|
66
|
+
...columns,
|
|
67
|
+
];
|
|
68
|
+
const defaultPath = columns.includes('Path') ? 'Path' : columns.includes('path') ? 'path' : pathChoices[0];
|
|
69
|
+
const { pathChoice } = await inquirer.prompt([{
|
|
70
|
+
type: 'list', name: 'pathChoice',
|
|
71
|
+
message: 'Where should files be saved?',
|
|
72
|
+
choices: pathChoices, default: defaultPath,
|
|
73
|
+
}]);
|
|
74
|
+
|
|
75
|
+
if (pathChoice.startsWith('(Custom')) {
|
|
76
|
+
const { dir } = await inquirer.prompt([{
|
|
77
|
+
type: 'input', name: 'dir',
|
|
78
|
+
message: 'Directory path (created if it does not exist):',
|
|
79
|
+
default: '.',
|
|
80
|
+
}]);
|
|
81
|
+
customPath = dir;
|
|
82
|
+
pathCol = null;
|
|
83
|
+
} else if (pathChoice.startsWith('(None')) {
|
|
84
|
+
pathCol = null;
|
|
85
|
+
} else {
|
|
86
|
+
pathCol = pathChoice;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// 3. Content columns — pick one at a time, configure extension immediately
|
|
90
|
+
const contentDefaults = findContentDefaults(columns);
|
|
91
|
+
const extCol = findColumn(columns, ['Extension', 'extension', 'ext']);
|
|
92
|
+
let useExtCol = false;
|
|
93
|
+
|
|
94
|
+
// Ask once about Extension column if it exists
|
|
95
|
+
if (extCol) {
|
|
96
|
+
const { useIt } = await inquirer.prompt([{
|
|
97
|
+
type: 'confirm', name: 'useIt',
|
|
98
|
+
message: `Use the "${extCol}" column value as file extension for content files?`,
|
|
99
|
+
default: true,
|
|
100
|
+
}]);
|
|
101
|
+
useExtCol = useIt;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
contentCols = [];
|
|
105
|
+
contentFileMap = {};
|
|
106
|
+
let availableColumns = [...columns];
|
|
107
|
+
|
|
108
|
+
// Loop: pick column → configure extension → ask for another
|
|
109
|
+
let keepAdding = true;
|
|
110
|
+
while (keepAdding) {
|
|
111
|
+
const choices = ['(Done — no more files)', ...availableColumns];
|
|
112
|
+
const defaultChoice = contentCols.length === 0
|
|
113
|
+
? (contentDefaults[0] || choices[0])
|
|
114
|
+
: choices[0];
|
|
115
|
+
|
|
116
|
+
const { col } = await inquirer.prompt([{
|
|
117
|
+
type: 'list', name: 'col',
|
|
118
|
+
message: contentCols.length === 0
|
|
119
|
+
? 'Save a column as file content?'
|
|
120
|
+
: 'Save another column as a file?',
|
|
121
|
+
choices,
|
|
122
|
+
default: defaultChoice,
|
|
123
|
+
}]);
|
|
124
|
+
|
|
125
|
+
if (col.startsWith('(Done')) {
|
|
126
|
+
keepAdding = false;
|
|
127
|
+
break;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Configure this column's extension immediately
|
|
131
|
+
if (useExtCol) {
|
|
132
|
+
contentFileMap[col] = { suffix: contentCols.length > 0 ? col.toLowerCase() : '', extensionSource: extCol };
|
|
133
|
+
} else {
|
|
134
|
+
const ext = await promptExtension(inquirer, col);
|
|
135
|
+
contentFileMap[col] = { suffix: contentCols.length > 0 ? col.toLowerCase() : '', extension: ext };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
contentCols.push(col);
|
|
139
|
+
availableColumns = availableColumns.filter(c => c !== col);
|
|
140
|
+
|
|
141
|
+
// If only 1 column so far, no suffix needed
|
|
142
|
+
// If we add a second, retroactively add suffix to the first
|
|
143
|
+
if (contentCols.length === 2) {
|
|
144
|
+
const firstCol = contentCols[0];
|
|
145
|
+
if (!contentFileMap[firstCol].suffix) {
|
|
146
|
+
contentFileMap[firstCol].suffix = firstCol.toLowerCase();
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
if (availableColumns.length === 0) break;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Single column: clear suffix (not needed)
|
|
154
|
+
if (contentCols.length === 1) {
|
|
155
|
+
contentFileMap[contentCols[0]].suffix = '';
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
extensionSource = useExtCol ? extCol : null;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
let savedCount = 0;
|
|
162
|
+
const usedNames = new Map(); // track dir+name to avoid collisions
|
|
163
|
+
|
|
164
|
+
for (const row of rows) {
|
|
165
|
+
const name = sanitizeFilename(String(row[filenameCol] ?? 'untitled'));
|
|
166
|
+
let dir = customPath || '.';
|
|
167
|
+
let finalName = name;
|
|
168
|
+
|
|
169
|
+
if (pathCol && row[pathCol]) {
|
|
170
|
+
const pathValue = String(row[pathCol]);
|
|
171
|
+
const pathHasFile = extname(pathValue) !== '';
|
|
172
|
+
|
|
173
|
+
if (pathHasFile) {
|
|
174
|
+
dir = dirname(pathValue);
|
|
175
|
+
const pathFilename = basename(pathValue, extname(pathValue));
|
|
176
|
+
if (options.nonInteractive) {
|
|
177
|
+
finalName = pathFilename;
|
|
178
|
+
} else {
|
|
179
|
+
const inquirer = (await import('inquirer')).default;
|
|
180
|
+
const { usePathName } = await inquirer.prompt([{
|
|
181
|
+
type: 'confirm', name: 'usePathName',
|
|
182
|
+
message: `Path "${pathValue}" contains filename "${basename(pathValue)}" — use this as the filename?`,
|
|
183
|
+
default: true,
|
|
184
|
+
}]);
|
|
185
|
+
if (usePathName) finalName = pathFilename;
|
|
186
|
+
}
|
|
187
|
+
} else {
|
|
188
|
+
dir = pathValue;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
dir = dir.replace(/^\/+|\/+$/g, '');
|
|
193
|
+
if (!dir) dir = '.';
|
|
194
|
+
if (dir !== '.') await mkdir(dir, { recursive: true });
|
|
195
|
+
|
|
196
|
+
// Deduplicate filenames when multiple records share the same name
|
|
197
|
+
const nameKey = `${dir}/${finalName}`;
|
|
198
|
+
const count = usedNames.get(nameKey) || 0;
|
|
199
|
+
usedNames.set(nameKey, count + 1);
|
|
200
|
+
if (count > 0) {
|
|
201
|
+
finalName = `${finalName}-${count + 1}`;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Check for existing metadata with @filename mappings to reuse
|
|
205
|
+
const metaPath = join(dir, `${finalName}.metadata.json`);
|
|
206
|
+
let existingMeta = null;
|
|
207
|
+
if (await fileExists(metaPath)) {
|
|
208
|
+
try {
|
|
209
|
+
existingMeta = JSON.parse(await readFile(metaPath, 'utf8'));
|
|
210
|
+
} catch { /* ignore parse errors */ }
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// Build metadata with @filename placeholders
|
|
214
|
+
const meta = { ...row };
|
|
215
|
+
if (options.entity) meta._entity = options.entity;
|
|
216
|
+
const contentColumnsList = [];
|
|
217
|
+
|
|
218
|
+
for (const col of contentCols) {
|
|
219
|
+
const content = row[col];
|
|
220
|
+
if (content === null || content === undefined || content === '') {
|
|
221
|
+
log.warn(`Column "${col}" is empty for "${finalName}" — no file created`);
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Determine filename for this column's content
|
|
226
|
+
let fileName;
|
|
227
|
+
|
|
228
|
+
// First check: reuse existing @filename from previous pull
|
|
229
|
+
if (existingMeta && existingMeta[col] && String(existingMeta[col]).startsWith('@')) {
|
|
230
|
+
fileName = String(existingMeta[col]).substring(1);
|
|
231
|
+
} else {
|
|
232
|
+
// Build filename from config
|
|
233
|
+
fileName = buildContentFileName(finalName, col, row, contentCols, contentFileMap, extensionSource, columns);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const filePath = join(dir, fileName);
|
|
237
|
+
|
|
238
|
+
// Prompt before overwriting
|
|
239
|
+
if (await fileExists(filePath) && !options.nonInteractive) {
|
|
240
|
+
const inquirer = (await import('inquirer')).default;
|
|
241
|
+
const { overwrite } = await inquirer.prompt([{
|
|
242
|
+
type: 'confirm', name: 'overwrite',
|
|
243
|
+
message: `Overwrite existing file "${filePath}"?`,
|
|
244
|
+
default: true,
|
|
245
|
+
}]);
|
|
246
|
+
if (!overwrite) {
|
|
247
|
+
log.dim(` → Skipped ${filePath}`);
|
|
248
|
+
meta[col] = `@${fileName}`;
|
|
249
|
+
contentColumnsList.push(col);
|
|
250
|
+
continue;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
await writeFile(filePath, resolveContentValue(content));
|
|
255
|
+
log.success(`Saved ${filePath}`);
|
|
256
|
+
savedCount++;
|
|
257
|
+
|
|
258
|
+
meta[col] = `@${fileName}`;
|
|
259
|
+
contentColumnsList.push(col);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
if (contentColumnsList.length > 0) {
|
|
263
|
+
meta._contentColumns = contentColumnsList;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// Save metadata — prompt if exists
|
|
267
|
+
if (await fileExists(metaPath) && !options.nonInteractive) {
|
|
268
|
+
const inquirer = (await import('inquirer')).default;
|
|
269
|
+
const { overwrite } = await inquirer.prompt([{
|
|
270
|
+
type: 'confirm', name: 'overwrite',
|
|
271
|
+
message: `Overwrite existing metadata "${metaPath}"?`,
|
|
272
|
+
default: true,
|
|
273
|
+
}]);
|
|
274
|
+
if (!overwrite) {
|
|
275
|
+
log.dim(` → Skipped ${metaPath}`);
|
|
276
|
+
continue;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
await writeFile(metaPath, JSON.stringify(meta, null, 2) + '\n');
|
|
281
|
+
log.dim(` → ${metaPath}`);
|
|
282
|
+
savedCount++;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
log.info(`Saved ${savedCount} file(s) for ${rows.length} record(s)`);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
function buildContentFileName(baseName, col, row, contentCols, contentFileMap, extensionSource, columns) {
|
|
289
|
+
let suffix = '';
|
|
290
|
+
let ext = 'txt';
|
|
291
|
+
|
|
292
|
+
if (contentFileMap && contentFileMap[col]) {
|
|
293
|
+
const cfg = contentFileMap[col];
|
|
294
|
+
suffix = cfg.suffix || '';
|
|
295
|
+
if (cfg.extensionSource && columns.includes(cfg.extensionSource)) {
|
|
296
|
+
ext = String(row[cfg.extensionSource] || 'txt').toLowerCase();
|
|
297
|
+
} else if (cfg.extension) {
|
|
298
|
+
ext = cfg.extension;
|
|
299
|
+
}
|
|
300
|
+
} else if (extensionSource && columns.includes(extensionSource)) {
|
|
301
|
+
ext = String(row[extensionSource] || 'txt').toLowerCase();
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
// Single column with no suffix: main.css
|
|
305
|
+
// Multiple columns or with suffix: main-suffix.css
|
|
306
|
+
if (suffix) {
|
|
307
|
+
return `${baseName}-${suffix}.${ext}`;
|
|
308
|
+
}
|
|
309
|
+
if (contentCols.length > 1) {
|
|
310
|
+
return `${baseName}-${col.toLowerCase()}.${ext}`;
|
|
311
|
+
}
|
|
312
|
+
return `${baseName}.${ext}`;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const EXTENSION_CHOICES = [
|
|
316
|
+
'html', 'css', 'js', 'mjs', 'json', 'md', 'txt',
|
|
317
|
+
'sql', 'xml', 'sh', 'curl', 'csv', 'yaml',
|
|
318
|
+
'(custom)',
|
|
319
|
+
];
|
|
320
|
+
|
|
321
|
+
async function promptExtension(inquirer, columnName) {
|
|
322
|
+
const guessed = guessExtension(columnName);
|
|
323
|
+
// Put the guessed extension first in the list
|
|
324
|
+
const choices = [guessed, ...EXTENSION_CHOICES.filter(c => c !== guessed)];
|
|
325
|
+
|
|
326
|
+
const { ext } = await inquirer.prompt([{
|
|
327
|
+
type: 'list', name: 'ext',
|
|
328
|
+
message: `File extension for "${columnName}":`,
|
|
329
|
+
choices,
|
|
330
|
+
default: guessed,
|
|
331
|
+
}]);
|
|
332
|
+
|
|
333
|
+
if (ext === '(custom)') {
|
|
334
|
+
const { custom } = await inquirer.prompt([{
|
|
335
|
+
type: 'input', name: 'custom',
|
|
336
|
+
message: 'Enter custom extension:',
|
|
337
|
+
default: 'txt',
|
|
338
|
+
}]);
|
|
339
|
+
return custom.replace(/^\./, '');
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
return ext;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
function guessExtension(columnName) {
|
|
346
|
+
const lower = columnName.toLowerCase();
|
|
347
|
+
if (lower.includes('css')) return 'css';
|
|
348
|
+
if (lower.includes('js') || lower.includes('script')) return 'js';
|
|
349
|
+
if (lower.includes('html')) return 'html';
|
|
350
|
+
if (lower.includes('sql')) return 'sql';
|
|
351
|
+
if (lower.includes('xml')) return 'xml';
|
|
352
|
+
if (lower.includes('md') || lower.includes('markdown')) return 'md';
|
|
353
|
+
if (lower.includes('json')) return 'json';
|
|
354
|
+
return 'txt';
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
async function fileExists(path) {
|
|
358
|
+
try { await access(path); return true; } catch { return false; }
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
function findDefault(columns, candidates) {
|
|
362
|
+
for (const c of candidates) {
|
|
363
|
+
if (columns.includes(c)) return c;
|
|
364
|
+
}
|
|
365
|
+
return columns[0];
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
/** Like findDefault but returns null if no match (no fallback) */
|
|
369
|
+
function findColumn(columns, candidates) {
|
|
370
|
+
for (const c of candidates) {
|
|
371
|
+
if (columns.includes(c)) return c;
|
|
372
|
+
}
|
|
373
|
+
return null;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
function findContentDefaults(columns) {
|
|
377
|
+
const candidates = ['Content', 'content', 'Text', 'text', 'Body', 'body'];
|
|
378
|
+
return columns.filter(c => candidates.includes(c));
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
function sanitizeFilename(name) {
|
|
382
|
+
return name.replace(/[/\\?%*:|"<>]/g, '-').replace(/\s+/g, '-').substring(0, 200);
|
|
383
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { readFile, writeFile, mkdir } from 'fs/promises';
|
|
2
|
+
import { join } from 'path';
|
|
3
|
+
|
|
4
|
+
const STRUCTURE_FILE = '.dbo/structure.json';
|
|
5
|
+
|
|
6
|
+
/** All bin-placed files go under this directory at project root */
|
|
7
|
+
export const BINS_DIR = 'Bins';
|
|
8
|
+
|
|
9
|
+
/** Default top-level directories created at project root during clone */
|
|
10
|
+
export const DEFAULT_PROJECT_DIRS = [
|
|
11
|
+
BINS_DIR,
|
|
12
|
+
'App Versions',
|
|
13
|
+
'Documentation',
|
|
14
|
+
'Sites',
|
|
15
|
+
'Extensions',
|
|
16
|
+
'Data Sources',
|
|
17
|
+
];
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Build a bin hierarchy from an array of bin objects.
|
|
21
|
+
* Filters by targetAppId and resolves full directory paths via ParentBinID traversal.
|
|
22
|
+
*
|
|
23
|
+
* The Path field may already contain the full path from root (e.g. "assets/css/vendor")
|
|
24
|
+
* or just a segment name. We use the Name field as the directory segment and build
|
|
25
|
+
* full paths by walking up the ParentBinID chain, since Path can be ambiguous.
|
|
26
|
+
*
|
|
27
|
+
* Returns a map: { [BinID]: { name, path, parentBinID, fullPath } }
|
|
28
|
+
*/
|
|
29
|
+
export function buildBinHierarchy(bins, targetAppId) {
|
|
30
|
+
if (!Array.isArray(bins) || bins.length === 0) return {};
|
|
31
|
+
|
|
32
|
+
// Filter by AppID
|
|
33
|
+
const filtered = targetAppId
|
|
34
|
+
? bins.filter(b => b.AppID === targetAppId)
|
|
35
|
+
: bins;
|
|
36
|
+
|
|
37
|
+
// Build lookup by BinID
|
|
38
|
+
const byId = {};
|
|
39
|
+
for (const bin of filtered) {
|
|
40
|
+
// Use Path as the segment name, but only the last part if it contains slashes
|
|
41
|
+
// (Path often stores the full path, e.g. "assets/css/vendor" for a bin named "vendor")
|
|
42
|
+
const rawPath = bin.Path || bin.Name;
|
|
43
|
+
const segment = rawPath.includes('/') ? rawPath.split('/').pop() : rawPath;
|
|
44
|
+
|
|
45
|
+
byId[bin.BinID] = {
|
|
46
|
+
name: bin.Name,
|
|
47
|
+
path: bin.Path,
|
|
48
|
+
segment,
|
|
49
|
+
parentBinID: bin.ParentBinID || null,
|
|
50
|
+
binId: bin.BinID,
|
|
51
|
+
uid: bin.UID,
|
|
52
|
+
fullPath: null,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Resolve full paths by walking up parent chain using segment names
|
|
57
|
+
function resolvePath(binId) {
|
|
58
|
+
const entry = byId[binId];
|
|
59
|
+
if (!entry) return null;
|
|
60
|
+
if (entry.fullPath !== null) return entry.fullPath;
|
|
61
|
+
|
|
62
|
+
if (entry.parentBinID && byId[entry.parentBinID]) {
|
|
63
|
+
const parentPath = resolvePath(entry.parentBinID);
|
|
64
|
+
entry.fullPath = parentPath ? `${parentPath}/${entry.segment}` : entry.segment;
|
|
65
|
+
} else {
|
|
66
|
+
entry.fullPath = entry.segment;
|
|
67
|
+
}
|
|
68
|
+
return entry.fullPath;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
for (const binId of Object.keys(byId)) {
|
|
72
|
+
resolvePath(binId);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
return byId;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Lookup the full directory path for a BinID in the structure map.
|
|
80
|
+
* Returns path prefixed with bins/ (e.g. "bins/app/assets/css").
|
|
81
|
+
*/
|
|
82
|
+
export function resolveBinPath(binId, structure) {
|
|
83
|
+
const entry = structure[binId];
|
|
84
|
+
return entry ? `${BINS_DIR}/${entry.fullPath}` : null;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Create all directories from the bin structure under bins/.
|
|
89
|
+
*/
|
|
90
|
+
export async function createDirectories(structure) {
|
|
91
|
+
const paths = Object.values(structure)
|
|
92
|
+
.map(e => `${BINS_DIR}/${e.fullPath}`)
|
|
93
|
+
.filter(Boolean)
|
|
94
|
+
.sort(); // sort so parents are created first
|
|
95
|
+
|
|
96
|
+
for (const p of paths) {
|
|
97
|
+
await mkdir(p, { recursive: true });
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return paths;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Save the bin structure to .dbo/structure.json.
|
|
105
|
+
*/
|
|
106
|
+
export async function saveStructureFile(structure) {
|
|
107
|
+
await mkdir('.dbo', { recursive: true });
|
|
108
|
+
await writeFile(STRUCTURE_FILE, JSON.stringify(structure, null, 2) + '\n');
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Load bin structure from .dbo/structure.json.
|
|
113
|
+
*/
|
|
114
|
+
export async function loadStructureFile() {
|
|
115
|
+
try {
|
|
116
|
+
const raw = await readFile(STRUCTURE_FILE, 'utf8');
|
|
117
|
+
return JSON.parse(raw);
|
|
118
|
+
} catch {
|
|
119
|
+
return {};
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Find the bin name for a given BinID from the structure.
|
|
125
|
+
*/
|
|
126
|
+
export function getBinName(binId, structure) {
|
|
127
|
+
const entry = structure[binId];
|
|
128
|
+
return entry ? entry.name : null;
|
|
129
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { utimes } from 'fs/promises';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Parse a server date string as being in the server's timezone.
|
|
5
|
+
* Server dates come as ISO-like strings (e.g. "2025-01-10T09:36:19Z")
|
|
6
|
+
* but the actual time is in the server's local timezone, not UTC.
|
|
7
|
+
*
|
|
8
|
+
* If serverTz is null/undefined, treats the date as UTC (no conversion).
|
|
9
|
+
* Returns a proper Date object in UTC.
|
|
10
|
+
*/
|
|
11
|
+
export function parseServerDate(dateStr, serverTz) {
|
|
12
|
+
if (!dateStr) return null;
|
|
13
|
+
|
|
14
|
+
// Strip trailing Z — the date may not actually be UTC
|
|
15
|
+
const clean = String(dateStr).replace(/Z$/, '');
|
|
16
|
+
const asUtc = new Date(clean + 'Z');
|
|
17
|
+
if (isNaN(asUtc.getTime())) return null;
|
|
18
|
+
|
|
19
|
+
// If no server timezone specified, treat as UTC
|
|
20
|
+
if (!serverTz || serverTz === 'UTC') return asUtc;
|
|
21
|
+
|
|
22
|
+
// Find offset: format this UTC instant in the server timezone,
|
|
23
|
+
// then compute the difference to determine the actual UTC time
|
|
24
|
+
const parts = new Intl.DateTimeFormat('en-US', {
|
|
25
|
+
timeZone: serverTz,
|
|
26
|
+
year: 'numeric', month: '2-digit', day: '2-digit',
|
|
27
|
+
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
|
28
|
+
hour12: false,
|
|
29
|
+
}).formatToParts(asUtc);
|
|
30
|
+
|
|
31
|
+
const p = {};
|
|
32
|
+
for (const { type, value } of parts) p[type] = value;
|
|
33
|
+
const tzLocal = new Date(`${p.year}-${p.month}-${p.day}T${p.hour}:${p.minute}:${p.second}Z`);
|
|
34
|
+
const offset = tzLocal - asUtc;
|
|
35
|
+
|
|
36
|
+
return new Date(asUtc.getTime() - offset);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Set a file's Created (birthtime) and Modified (mtime) timestamps
|
|
41
|
+
* based on server _CreatedOn and _LastUpdated values.
|
|
42
|
+
*
|
|
43
|
+
* On macOS: setting mtime to a time before the current birthtime
|
|
44
|
+
* causes the OS to update birthtime to match. So we:
|
|
45
|
+
* 1. First set mtime to _CreatedOn (the earlier date) → updates birthtime
|
|
46
|
+
* 2. Then set mtime to _LastUpdated (the later date) → birthtime stays
|
|
47
|
+
*
|
|
48
|
+
* Created ← _CreatedOn, Modified ← _LastUpdated
|
|
49
|
+
*/
|
|
50
|
+
export async function setFileTimestamps(filePath, createdOn, lastUpdated, serverTz) {
|
|
51
|
+
const created = parseServerDate(createdOn, serverTz);
|
|
52
|
+
const modified = parseServerDate(lastUpdated, serverTz);
|
|
53
|
+
|
|
54
|
+
if (!created && !modified) return;
|
|
55
|
+
|
|
56
|
+
// Step 1: Set mtime to the created date (earlier) to update birthtime
|
|
57
|
+
if (created) {
|
|
58
|
+
await utimes(filePath, created, created);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Step 2: Set mtime to the modified date (later) — birthtime stays at created
|
|
62
|
+
if (modified && created) {
|
|
63
|
+
await utimes(filePath, created, modified);
|
|
64
|
+
} else if (modified) {
|
|
65
|
+
await utimes(filePath, modified, modified);
|
|
66
|
+
}
|
|
67
|
+
}
|