tiendu 0.3.1 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -14
- package/bin/tiendu.js +6 -4
- package/bin/tiendu.mjs +1 -134
- package/lib/api.mjs +18 -50
- package/lib/archive.mjs +30 -0
- package/lib/assets.mjs +245 -0
- package/lib/build.mjs +299 -41
- package/lib/dev.mjs +239 -136
- package/lib/fs-utils.mjs +35 -0
- package/lib/local-preview.mjs +350 -0
- package/lib/postcss.mjs +166 -0
- package/lib/preview.mjs +19 -9
- package/lib/publish.mjs +12 -2
- package/lib/push.mjs +51 -52
- package/lib/retry.mjs +69 -0
- package/package.json +2 -2
package/lib/dev.mjs
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { watch } from "node:fs";
|
|
2
|
-
import { readFile,
|
|
2
|
+
import { readFile, stat } from "node:fs/promises";
|
|
3
3
|
import path from "node:path";
|
|
4
4
|
import * as p from "@clack/prompts";
|
|
5
|
-
import { zipSync } from "fflate";
|
|
6
5
|
import { loadConfigOrFail, writeConfig, isBuiltTheme, getDistDir } from "./config.mjs";
|
|
7
6
|
import {
|
|
7
|
+
buildPreviewUrl,
|
|
8
8
|
createPreview,
|
|
9
9
|
listPreviews,
|
|
10
10
|
resolveActivePreview,
|
|
@@ -12,45 +12,100 @@ import {
|
|
|
12
12
|
import {
|
|
13
13
|
deletePreviewFile,
|
|
14
14
|
uploadPreviewFileMultipart,
|
|
15
|
-
uploadPreviewZip,
|
|
16
15
|
} from "./api.mjs";
|
|
17
16
|
import { build } from "./build.mjs";
|
|
17
|
+
import { isDotfile } from "./fs-utils.mjs";
|
|
18
|
+
import { startLocalPreviewServer } from "./local-preview.mjs";
|
|
19
|
+
import { pushPreparedDirectoryToPreview } from "./push.mjs";
|
|
20
|
+
import { retryAsync } from "./retry.mjs";
|
|
18
21
|
|
|
19
|
-
const
|
|
20
|
-
|
|
21
|
-
const
|
|
22
|
-
const base = new URL(apiBaseUrl);
|
|
23
|
-
const hasExplicitPort = previewHostname.includes(":");
|
|
24
|
-
return `${base.protocol}//${previewHostname}${!hasExplicitPort && base.port ? `:${base.port}` : ""}/`;
|
|
25
|
-
};
|
|
22
|
+
const RETRY_ATTEMPTS = 3;
|
|
23
|
+
const MAX_SYNC_FILE_SIZE_BYTES = 20 * 1024 * 1024;
|
|
24
|
+
const IGNORED_ROOT_SEGMENTS = new Set(["node_modules", ".git"]);
|
|
26
25
|
|
|
27
26
|
const hasDotfileSegment = (relativePath) =>
|
|
28
27
|
relativePath.split(path.sep).some(isDotfile);
|
|
29
28
|
|
|
30
|
-
const
|
|
31
|
-
const
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
29
|
+
const shouldIgnoreWatchedPath = (relativePath, builtTheme) => {
|
|
30
|
+
const normalizedPath = relativePath.split(path.sep).join("/");
|
|
31
|
+
const segments = normalizedPath.split("/");
|
|
32
|
+
const basename = segments.at(-1) ?? "";
|
|
33
|
+
|
|
34
|
+
if (segments.some((segment) => IGNORED_ROOT_SEGMENTS.has(segment))) {
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (!builtTheme && segments[0] === "dist") {
|
|
39
|
+
return true;
|
|
41
40
|
}
|
|
42
|
-
|
|
41
|
+
|
|
42
|
+
return basename.endsWith("~") || /\.(swp|tmp|temp)$/i.test(basename);
|
|
43
43
|
};
|
|
44
44
|
|
|
45
|
-
const
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
45
|
+
const shouldRetrySyncResult = (result) =>
|
|
46
|
+
!result.ok && Boolean(result.retriable);
|
|
47
|
+
|
|
48
|
+
const uploadFileWithRetries = (
|
|
49
|
+
apiBaseUrl,
|
|
50
|
+
apiKey,
|
|
51
|
+
storeId,
|
|
52
|
+
previewKey,
|
|
53
|
+
relativePath,
|
|
54
|
+
content,
|
|
55
|
+
onRetry,
|
|
56
|
+
) =>
|
|
57
|
+
retryAsync(
|
|
58
|
+
() =>
|
|
59
|
+
uploadPreviewFileMultipart(
|
|
60
|
+
apiBaseUrl,
|
|
61
|
+
apiKey,
|
|
62
|
+
storeId,
|
|
63
|
+
previewKey,
|
|
64
|
+
relativePath,
|
|
65
|
+
content,
|
|
66
|
+
),
|
|
67
|
+
{
|
|
68
|
+
attempts: RETRY_ATTEMPTS,
|
|
69
|
+
shouldRetry: shouldRetrySyncResult,
|
|
70
|
+
onRetry,
|
|
71
|
+
},
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
const deleteFileWithRetries = (
|
|
75
|
+
apiBaseUrl,
|
|
76
|
+
apiKey,
|
|
77
|
+
storeId,
|
|
78
|
+
previewKey,
|
|
79
|
+
relativePath,
|
|
80
|
+
onRetry,
|
|
81
|
+
) =>
|
|
82
|
+
retryAsync(
|
|
83
|
+
() =>
|
|
84
|
+
deletePreviewFile(
|
|
85
|
+
apiBaseUrl,
|
|
86
|
+
apiKey,
|
|
87
|
+
storeId,
|
|
88
|
+
previewKey,
|
|
89
|
+
relativePath,
|
|
90
|
+
),
|
|
91
|
+
{
|
|
92
|
+
attempts: RETRY_ATTEMPTS,
|
|
93
|
+
shouldRetry: shouldRetrySyncResult,
|
|
94
|
+
onRetry,
|
|
95
|
+
},
|
|
96
|
+
);
|
|
97
|
+
|
|
98
|
+
const resolvePreviewForDev = (previews, configuredPreviewKey) => {
|
|
99
|
+
const activePreview = resolveActivePreview(previews, configuredPreviewKey);
|
|
100
|
+
if (activePreview) {
|
|
101
|
+
return { preview: activePreview, fallbackUsed: false };
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (configuredPreviewKey && previews.length === 1) {
|
|
105
|
+
return { preview: previews[0], fallbackUsed: true };
|
|
52
106
|
}
|
|
53
|
-
|
|
107
|
+
|
|
108
|
+
return { preview: null, fallbackUsed: false };
|
|
54
109
|
};
|
|
55
110
|
|
|
56
111
|
export const dev = async () => {
|
|
@@ -60,6 +115,7 @@ export const dev = async () => {
|
|
|
60
115
|
const builtTheme = await isBuiltTheme();
|
|
61
116
|
const rootDir = builtTheme ? getDistDir() : process.cwd();
|
|
62
117
|
let buildCleanup = null;
|
|
118
|
+
let localPreviewServer = null;
|
|
63
119
|
|
|
64
120
|
// For built themes, run the build first (with watch mode)
|
|
65
121
|
if (builtTheme) {
|
|
@@ -71,150 +127,197 @@ export const dev = async () => {
|
|
|
71
127
|
buildCleanup = buildResult.cleanup;
|
|
72
128
|
}
|
|
73
129
|
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
p.log.error(
|
|
130
|
+
const spinner = p.spinner();
|
|
131
|
+
spinner.start("Connecting to preview...");
|
|
132
|
+
|
|
133
|
+
const listResult = await listPreviews(apiBaseUrl, apiKey, storeId);
|
|
134
|
+
if (!listResult.ok) {
|
|
135
|
+
spinner.stop("Failed to connect.", 1);
|
|
136
|
+
p.log.error(listResult.error);
|
|
81
137
|
process.exit(1);
|
|
82
138
|
}
|
|
83
139
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
140
|
+
const previewResolution = resolvePreviewForDev(listResult.data, config.previewKey);
|
|
141
|
+
let activePreview = previewResolution.preview;
|
|
142
|
+
if (previewResolution.fallbackUsed && activePreview) {
|
|
143
|
+
p.log.warn(
|
|
144
|
+
`Stored preview ${config.previewKey} was not found. Using the only available preview ${activePreview.previewKey}.`,
|
|
145
|
+
);
|
|
146
|
+
}
|
|
88
147
|
|
|
89
|
-
if (!
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
148
|
+
if (!activePreview) {
|
|
149
|
+
if (config.previewKey) {
|
|
150
|
+
p.log.warn(
|
|
151
|
+
`Stored preview ${config.previewKey} was not found. Creating a new preview...`,
|
|
152
|
+
);
|
|
153
|
+
}
|
|
93
154
|
|
|
94
|
-
|
|
95
|
-
|
|
155
|
+
spinner.message("Creating preview...");
|
|
156
|
+
const previewResult = await createPreview(apiBaseUrl, apiKey, storeId, "Dev");
|
|
157
|
+
if (!previewResult.ok) {
|
|
96
158
|
spinner.stop("Failed to create preview.", 1);
|
|
97
|
-
p.log.error(
|
|
159
|
+
p.log.error(previewResult.error);
|
|
98
160
|
process.exit(1);
|
|
99
161
|
}
|
|
100
162
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
await writeConfig({ ...config, previewKey });
|
|
104
|
-
|
|
105
|
-
spinner.message("Uploading initial files...");
|
|
106
|
-
const zipBuffer = await createZipFromDirectory(rootDir);
|
|
107
|
-
const uploadResult = await uploadPreviewZip(
|
|
108
|
-
apiBaseUrl,
|
|
109
|
-
apiKey,
|
|
110
|
-
storeId,
|
|
111
|
-
previewKey,
|
|
112
|
-
zipBuffer,
|
|
113
|
-
);
|
|
163
|
+
activePreview = previewResult.data;
|
|
164
|
+
}
|
|
114
165
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
}
|
|
166
|
+
const previewKey = activePreview.previewKey;
|
|
167
|
+
if (config.previewKey !== previewKey) {
|
|
168
|
+
await writeConfig({ ...config, previewKey });
|
|
169
|
+
}
|
|
120
170
|
|
|
121
|
-
|
|
122
|
-
} else {
|
|
123
|
-
// ── Verify existing preview still exists ─────────────────────────────────
|
|
124
|
-
const spinner = p.spinner();
|
|
125
|
-
spinner.start("Connecting to preview...");
|
|
171
|
+
const previewUrl = buildPreviewUrl(apiBaseUrl, activePreview.previewHostname);
|
|
126
172
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
173
|
+
const uploadResult = await pushPreparedDirectoryToPreview({
|
|
174
|
+
apiBaseUrl,
|
|
175
|
+
apiKey,
|
|
176
|
+
storeId,
|
|
177
|
+
previewKey,
|
|
178
|
+
rootDir,
|
|
179
|
+
spinner,
|
|
180
|
+
packMessage: "Running initial push...",
|
|
181
|
+
retryMessage: (result, nextAttempt) =>
|
|
182
|
+
`Initial push failed. Retrying ${nextAttempt}/${RETRY_ATTEMPTS}... ${result.error}`,
|
|
183
|
+
});
|
|
133
184
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
? "No previews found for this store. A new preview will be created if you clear the local config and run tiendu dev again."
|
|
140
|
-
: "Run tiendu preview list and then set or recreate the preview.",
|
|
141
|
-
);
|
|
142
|
-
process.exit(1);
|
|
143
|
-
}
|
|
185
|
+
if (!uploadResult.ok) {
|
|
186
|
+
spinner.stop("Initial push failed.", 1);
|
|
187
|
+
p.log.error(uploadResult.error);
|
|
188
|
+
process.exit(1);
|
|
189
|
+
}
|
|
144
190
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
191
|
+
try {
|
|
192
|
+
localPreviewServer = await startLocalPreviewServer({
|
|
193
|
+
apiBaseUrl,
|
|
194
|
+
previewHostname: activePreview.previewHostname,
|
|
195
|
+
});
|
|
196
|
+
} catch (error) {
|
|
197
|
+
p.log.warn(`Could not start local live preview: ${error.message}`);
|
|
198
|
+
}
|
|
149
199
|
|
|
150
|
-
|
|
151
|
-
|
|
200
|
+
spinner.stop("Preview ready.");
|
|
201
|
+
if (localPreviewServer) {
|
|
202
|
+
p.log.message(`Local live preview: ${localPreviewServer.url}`);
|
|
152
203
|
}
|
|
204
|
+
p.log.message(`Sharable preview: ${previewUrl}`);
|
|
153
205
|
|
|
154
206
|
p.log.message("Watching for changes — press Ctrl+C to stop.");
|
|
155
207
|
|
|
156
208
|
// ── File watcher ──────────────────────────────────────────────────────────
|
|
157
209
|
/** @type {Map<string, NodeJS.Timeout>} */
|
|
158
210
|
const debounceMap = new Map();
|
|
211
|
+
const inFlightPaths = new Set();
|
|
212
|
+
const pendingResyncPaths = new Set();
|
|
159
213
|
const DEBOUNCE_MS = 300;
|
|
160
214
|
|
|
161
|
-
const
|
|
162
|
-
|
|
163
|
-
if (
|
|
164
|
-
|
|
165
|
-
const relativePath = filename.split(path.sep).join("/");
|
|
166
|
-
const existing = debounceMap.get(relativePath);
|
|
167
|
-
if (existing) clearTimeout(existing);
|
|
215
|
+
const queueSync = (relativePath) => {
|
|
216
|
+
const existingTimer = debounceMap.get(relativePath);
|
|
217
|
+
if (existingTimer) clearTimeout(existingTimer);
|
|
168
218
|
|
|
169
|
-
const timer = setTimeout(
|
|
219
|
+
const timer = setTimeout(() => {
|
|
170
220
|
debounceMap.delete(relativePath);
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
221
|
+
void syncPath(relativePath);
|
|
222
|
+
}, DEBOUNCE_MS);
|
|
223
|
+
|
|
224
|
+
debounceMap.set(relativePath, timer);
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
const syncPath = async (relativePath) => {
|
|
228
|
+
if (inFlightPaths.has(relativePath)) {
|
|
229
|
+
pendingResyncPaths.add(relativePath);
|
|
230
|
+
return;
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
inFlightPaths.add(relativePath);
|
|
234
|
+
|
|
235
|
+
try {
|
|
236
|
+
const absolutePath = path.join(rootDir, relativePath);
|
|
237
|
+
const fileStat = await stat(absolutePath).catch(() => null);
|
|
238
|
+
|
|
239
|
+
if (!fileStat || !fileStat.isFile()) {
|
|
240
|
+
if (!fileStat) {
|
|
241
|
+
console.log(`✕ ${relativePath}`);
|
|
242
|
+
const result = await deleteFileWithRetries(
|
|
243
|
+
apiBaseUrl,
|
|
244
|
+
apiKey,
|
|
245
|
+
storeId,
|
|
246
|
+
previewKey,
|
|
247
|
+
relativePath,
|
|
248
|
+
async (_, nextAttempt) => {
|
|
249
|
+
p.log.warn(
|
|
250
|
+
` Retry delete ${relativePath} (${nextAttempt}/${RETRY_ATTEMPTS})`,
|
|
251
|
+
);
|
|
252
|
+
},
|
|
253
|
+
);
|
|
254
|
+
|
|
255
|
+
if (!result.ok) {
|
|
256
|
+
p.log.warn(` Failed to delete after ${RETRY_ATTEMPTS} attempts: ${result.error}`);
|
|
257
|
+
} else {
|
|
258
|
+
localPreviewServer?.notifyReload();
|
|
189
259
|
}
|
|
190
|
-
return;
|
|
191
260
|
}
|
|
192
261
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
relativePath,
|
|
201
|
-
content,
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
console.log(`↑ ${relativePath}`);
|
|
266
|
+
if (fileStat.size > MAX_SYNC_FILE_SIZE_BYTES) {
|
|
267
|
+
p.log.warn(
|
|
268
|
+
` Skipping ${relativePath}: file is ${(fileStat.size / (1024 * 1024)).toFixed(1)} MB (limit ${(MAX_SYNC_FILE_SIZE_BYTES / (1024 * 1024)).toFixed(0)} MB).`,
|
|
202
269
|
);
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
203
272
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
273
|
+
const content = await readFile(absolutePath);
|
|
274
|
+
const result = await uploadFileWithRetries(
|
|
275
|
+
apiBaseUrl,
|
|
276
|
+
apiKey,
|
|
277
|
+
storeId,
|
|
278
|
+
previewKey,
|
|
279
|
+
relativePath,
|
|
280
|
+
content,
|
|
281
|
+
async (_, nextAttempt) => {
|
|
282
|
+
p.log.warn(
|
|
283
|
+
` Retry upload ${relativePath} (${nextAttempt}/${RETRY_ATTEMPTS})`,
|
|
284
|
+
);
|
|
285
|
+
},
|
|
286
|
+
);
|
|
287
|
+
|
|
288
|
+
if (!result.ok) {
|
|
289
|
+
p.log.warn(` Failed to upload after ${RETRY_ATTEMPTS} attempts: ${result.error}`);
|
|
290
|
+
} else {
|
|
291
|
+
localPreviewServer?.notifyReload();
|
|
209
292
|
}
|
|
210
|
-
}
|
|
293
|
+
} catch (error) {
|
|
294
|
+
p.log.warn(` Error processing ${relativePath}: ${error.message}`);
|
|
295
|
+
} finally {
|
|
296
|
+
inFlightPaths.delete(relativePath);
|
|
211
297
|
|
|
212
|
-
|
|
298
|
+
if (pendingResyncPaths.delete(relativePath)) {
|
|
299
|
+
queueSync(relativePath);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
const watcher = watch(rootDir, { recursive: true }, (eventType, filename) => {
|
|
305
|
+
if (!filename) return;
|
|
306
|
+
if (hasDotfileSegment(filename)) return;
|
|
307
|
+
if (shouldIgnoreWatchedPath(filename, builtTheme)) return;
|
|
308
|
+
|
|
309
|
+
const relativePath = filename.split(path.sep).join("/");
|
|
310
|
+
queueSync(relativePath);
|
|
213
311
|
});
|
|
214
312
|
|
|
313
|
+
let cleanedUp = false;
|
|
215
314
|
const cleanup = async () => {
|
|
315
|
+
if (cleanedUp) return;
|
|
316
|
+
cleanedUp = true;
|
|
317
|
+
|
|
216
318
|
watcher.close();
|
|
217
319
|
for (const timer of debounceMap.values()) clearTimeout(timer);
|
|
320
|
+
if (localPreviewServer) await localPreviewServer.close();
|
|
218
321
|
if (buildCleanup) await buildCleanup();
|
|
219
322
|
p.outro("Dev mode stopped.");
|
|
220
323
|
process.exit(0);
|
package/lib/fs-utils.mjs
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { access, readdir } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
export const isDotfile = (name) => name.startsWith(".");
|
|
5
|
+
|
|
6
|
+
export const fileExists = async (filePath) => {
|
|
7
|
+
try {
|
|
8
|
+
await access(filePath);
|
|
9
|
+
return true;
|
|
10
|
+
} catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
export const listFilesRecursive = async (absoluteDir) => {
|
|
16
|
+
const entries = await readdir(absoluteDir, { withFileTypes: true });
|
|
17
|
+
const files = [];
|
|
18
|
+
|
|
19
|
+
for (const entry of entries) {
|
|
20
|
+
if (isDotfile(entry.name)) continue;
|
|
21
|
+
|
|
22
|
+
const absolutePath = path.join(absoluteDir, entry.name);
|
|
23
|
+
|
|
24
|
+
if (entry.isDirectory()) {
|
|
25
|
+
files.push(...(await listFilesRecursive(absolutePath)));
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (entry.isFile()) {
|
|
30
|
+
files.push(absolutePath);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return files.sort((left, right) => left.localeCompare(right));
|
|
35
|
+
};
|