slice-machine-ui 2.19.1 → 2.19.2-alpha.lg-import-slices.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/out/404.html +1 -1
- package/out/_next/static/Q_2lm2CqEzK8hMGWeR30R/_buildManifest.js +1 -0
- package/out/_next/static/chunks/130-e1a16d2f94fb2b64.js +1 -0
- package/out/_next/static/chunks/{429-aab52070cad2884b.js → 429-e5d7e39160de9f5e.js} +1 -1
- package/out/_next/static/chunks/489-d6580011169630a1.js +1 -0
- package/out/_next/static/chunks/585-c89bb2471e85b9f8.js +1 -0
- package/out/_next/static/chunks/928-ebbe58b08e1e70b1.js +1 -0
- package/out/_next/static/chunks/954-bedaaabf664584a0.js +1 -0
- package/out/_next/static/chunks/pages/{_app-664e26e8e0083aaa.js → _app-a8345a65a46d8112.js} +1 -1
- package/out/_next/static/chunks/pages/custom-types/{[customTypeId]-af9376721beb489e.js → [customTypeId]-97120b65616617fb.js} +1 -1
- package/out/_next/static/chunks/pages/page-types/{[pageTypeId]-a24665e91b882169.js → [pageTypeId]-b358358b5c2c1c43.js} +1 -1
- package/out/_next/static/chunks/pages/slices/[lib]/[sliceName]/[variation]/{simulator-faeb6d2f77d97096.js → simulator-8c70298caf51bed0.js} +1 -1
- package/out/_next/static/chunks/pages/slices-d5a2b0fcb7490565.js +1 -0
- package/out/changelog.html +1 -1
- package/out/changes.html +1 -1
- package/out/custom-types/[customTypeId].html +1 -1
- package/out/custom-types.html +1 -1
- package/out/index.html +1 -1
- package/out/labs.html +1 -1
- package/out/page-types/[pageTypeId].html +1 -1
- package/out/slices/[lib]/[sliceName]/[variation]/simulator.html +1 -1
- package/out/slices/[lib]/[sliceName]/[variation].html +1 -1
- package/out/slices.html +1 -1
- package/package.json +5 -4
- package/src/features/customTypes/customTypesBuilder/CreateSliceFromImageModal/CreateSliceFromImageModal.tsx +3 -36
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/ImportSlicesFromLibraryModal.tsx +291 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/SliceCard.tsx +48 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/hooks/useImportSlicesFromGithub.ts +93 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/index.tsx +1 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/types.ts +28 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/addSlices.ts +193 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/github.ts +630 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/mapWithConcurrency.ts +28 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/sliceWithoutConflicts.ts +51 -0
- package/src/features/customTypes/customTypesBuilder/SliceZoneBlankSlate.tsx +11 -0
- package/src/features/customTypes/customTypesBuilder/shared/getSubmitButtonLabel.ts +12 -0
- package/src/features/customTypes/customTypesBuilder/shared/useExistingSlices.ts +26 -0
- package/src/features/customTypes/customTypesBuilder/sliceCreationOptions.tsx +14 -0
- package/src/legacy/lib/builders/CustomTypeBuilder/SliceZone/index.tsx +51 -0
- package/src/pages/slices.tsx +30 -0
- package/out/_next/static/chunks/422-c9192a1dbdd2ae0e.js +0 -1
- package/out/_next/static/chunks/489-ce3053e1d81ade83.js +0 -1
- package/out/_next/static/chunks/907-180eb33eefccc237.js +0 -1
- package/out/_next/static/chunks/pages/slices-81c1c3f1bcad60f4.js +0 -1
- package/out/_next/static/mWW0JPKbrqF9bfSpOlAsb/_buildManifest.js +0 -1
- /package/out/_next/static/{mWW0JPKbrqF9bfSpOlAsb → Q_2lm2CqEzK8hMGWeR30R}/_ssgManifest.js +0 -0
package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/github.ts
ADDED
|
@@ -0,0 +1,630 @@
|
|
|
1
|
+
import { SharedSliceContent } from "@prismicio/types-internal/lib/content";
|
|
2
|
+
import { SharedSlice } from "@prismicio/types-internal/lib/customtypes";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
|
|
5
|
+
import { SliceFile, SliceImport } from "../types";
|
|
6
|
+
import { mapWithConcurrency } from "./mapWithConcurrency";
|
|
7
|
+
|
|
8
|
+
export const parseGithubUrl = (
|
|
9
|
+
githubUrl: string,
|
|
10
|
+
): {
|
|
11
|
+
owner: string;
|
|
12
|
+
repo: string;
|
|
13
|
+
} => {
|
|
14
|
+
const urlMatch = githubUrl.match(/github\.com\/([^\/]+)\/([^\/]+)(?:\/|$)/);
|
|
15
|
+
if (!urlMatch) {
|
|
16
|
+
throw new Error("Invalid GitHub URL format");
|
|
17
|
+
}
|
|
18
|
+
const [, owner, repoRaw] = urlMatch;
|
|
19
|
+
const repo = repoRaw.replace(/\.git$/, "");
|
|
20
|
+
return { owner, repo };
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export const getDefaultBranch = async ({
|
|
24
|
+
owner,
|
|
25
|
+
repo,
|
|
26
|
+
}: {
|
|
27
|
+
owner: string;
|
|
28
|
+
repo: string;
|
|
29
|
+
}) => {
|
|
30
|
+
const rawUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
|
31
|
+
const response = await fetch(rawUrl);
|
|
32
|
+
|
|
33
|
+
if (!response.ok) {
|
|
34
|
+
throw new Error(`Failed to fetch branches: ${response.statusText}`);
|
|
35
|
+
}
|
|
36
|
+
const json = z
|
|
37
|
+
.object({ default_branch: z.string() })
|
|
38
|
+
.parse(await response.json());
|
|
39
|
+
|
|
40
|
+
return json.default_branch;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
export const getSliceLibraries = async ({
|
|
44
|
+
owner,
|
|
45
|
+
repo,
|
|
46
|
+
branch,
|
|
47
|
+
}: {
|
|
48
|
+
owner: string;
|
|
49
|
+
repo: string;
|
|
50
|
+
branch: string;
|
|
51
|
+
}) => {
|
|
52
|
+
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/slicemachine.config.json`;
|
|
53
|
+
const response = await fetch(rawUrl);
|
|
54
|
+
|
|
55
|
+
if (!response.ok) {
|
|
56
|
+
throw new Error(
|
|
57
|
+
`Failed to fetch slicemachine.config.json: ${response.statusText}`,
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
const json = z
|
|
61
|
+
.object({ libraries: z.array(z.string()) })
|
|
62
|
+
.parse(await response.json());
|
|
63
|
+
|
|
64
|
+
let libraries = json.libraries;
|
|
65
|
+
|
|
66
|
+
if (libraries.length === 0) {
|
|
67
|
+
// Fallback: single Contents API call (defaults to default branch if no ref is provided)
|
|
68
|
+
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/slicemachine.config.json`;
|
|
69
|
+
const response = await fetch(apiUrl);
|
|
70
|
+
|
|
71
|
+
if (response.ok) {
|
|
72
|
+
const data = z
|
|
73
|
+
.object({ content: z.string().optional() })
|
|
74
|
+
.parse(await response.json());
|
|
75
|
+
if (typeof data.content === "string") {
|
|
76
|
+
const decodedContent = atob(data.content.replace(/\s/g, ""));
|
|
77
|
+
const config = z
|
|
78
|
+
.object({ libraries: z.array(z.string()) })
|
|
79
|
+
.parse(JSON.parse(decodedContent));
|
|
80
|
+
libraries = config.libraries;
|
|
81
|
+
}
|
|
82
|
+
} else {
|
|
83
|
+
throw new Error(`Failed to fetch the SM config: ${response.statusText}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return libraries;
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
export const fetchSlicesFromLibraries = async ({
|
|
91
|
+
owner,
|
|
92
|
+
repo,
|
|
93
|
+
branch,
|
|
94
|
+
libraries,
|
|
95
|
+
}: {
|
|
96
|
+
owner: string;
|
|
97
|
+
repo: string;
|
|
98
|
+
branch: string;
|
|
99
|
+
libraries: string[];
|
|
100
|
+
}) => {
|
|
101
|
+
const fetchedSlices: SliceImport[] = [];
|
|
102
|
+
|
|
103
|
+
console.log(
|
|
104
|
+
`Fetching slices from ${libraries.length} library/libraries:`,
|
|
105
|
+
libraries,
|
|
106
|
+
);
|
|
107
|
+
|
|
108
|
+
for (const libraryPath of libraries) {
|
|
109
|
+
// Normalize library path (remove leading ./ if present)
|
|
110
|
+
const normalizedPath = libraryPath.replace(/^\.\//, "");
|
|
111
|
+
|
|
112
|
+
let sliceDirectories: Array<{
|
|
113
|
+
name: string;
|
|
114
|
+
path: string;
|
|
115
|
+
}> = [];
|
|
116
|
+
|
|
117
|
+
// Try GitHub API first
|
|
118
|
+
const libraryApiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${normalizedPath}`;
|
|
119
|
+
let apiFailed = false;
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
const libraryResponse = await fetch(libraryApiUrl, {
|
|
123
|
+
headers: {
|
|
124
|
+
Accept: "application/vnd.github.v3+json",
|
|
125
|
+
},
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
if (libraryResponse.ok) {
|
|
129
|
+
const libraryContents = z
|
|
130
|
+
.array(
|
|
131
|
+
z.object({
|
|
132
|
+
name: z.string(),
|
|
133
|
+
type: z.string(),
|
|
134
|
+
path: z.string(),
|
|
135
|
+
}),
|
|
136
|
+
)
|
|
137
|
+
.parse(await libraryResponse.json());
|
|
138
|
+
sliceDirectories = libraryContents
|
|
139
|
+
.filter((item) => item.type === "dir")
|
|
140
|
+
.map((item) => ({
|
|
141
|
+
name: item.name,
|
|
142
|
+
path: item.path,
|
|
143
|
+
}));
|
|
144
|
+
} else if (libraryResponse.status === 403) {
|
|
145
|
+
apiFailed = true;
|
|
146
|
+
console.warn(
|
|
147
|
+
`GitHub API returned 403 for ${libraryPath}, trying direct discovery...`,
|
|
148
|
+
);
|
|
149
|
+
} else {
|
|
150
|
+
console.warn(
|
|
151
|
+
`Failed to fetch library directory: ${libraryPath}`,
|
|
152
|
+
libraryResponse.statusText,
|
|
153
|
+
);
|
|
154
|
+
continue;
|
|
155
|
+
}
|
|
156
|
+
} catch (error) {
|
|
157
|
+
apiFailed = true;
|
|
158
|
+
console.warn(
|
|
159
|
+
`GitHub API error for ${libraryPath}, trying direct discovery...`,
|
|
160
|
+
error instanceof Error ? error.message : String(error),
|
|
161
|
+
);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// If API failed, use GitHub Search API to find all model.json files in this library path
|
|
165
|
+
if (apiFailed && sliceDirectories.length === 0) {
|
|
166
|
+
console.log(
|
|
167
|
+
`Attempting to discover slices using GitHub Search API for ${libraryPath}...`,
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
try {
|
|
171
|
+
// Use GitHub Search API to find all model.json files in the library path
|
|
172
|
+
// Format: q=path:libraryPath filename:model.json repo:owner/repo
|
|
173
|
+
const searchQuery = `path:${normalizedPath} filename:model.json repo:${owner}/${repo}`;
|
|
174
|
+
const searchUrl = `https://api.github.com/search/code?q=${encodeURIComponent(
|
|
175
|
+
searchQuery,
|
|
176
|
+
)}`;
|
|
177
|
+
|
|
178
|
+
const searchResponse = await fetch(searchUrl, {
|
|
179
|
+
headers: {
|
|
180
|
+
Accept: "application/vnd.github.v3+json",
|
|
181
|
+
},
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
if (searchResponse.ok) {
|
|
185
|
+
const searchData = (await searchResponse.json()) as {
|
|
186
|
+
items?: Array<{
|
|
187
|
+
path: string;
|
|
188
|
+
name: string;
|
|
189
|
+
}>;
|
|
190
|
+
total_count?: number;
|
|
191
|
+
};
|
|
192
|
+
|
|
193
|
+
if (searchData.items && searchData.items.length > 0) {
|
|
194
|
+
// Extract slice directory names from the paths
|
|
195
|
+
// Path format: slices/marketing/slice-name/model.json
|
|
196
|
+
const foundSlices = new Set<string>();
|
|
197
|
+
for (const item of searchData.items) {
|
|
198
|
+
// Extract the slice directory name from the path
|
|
199
|
+
// e.g., "slices/marketing/hero/model.json" -> "hero"
|
|
200
|
+
const pathParts = item.path.split("/");
|
|
201
|
+
// The slice name should be the second-to-last part (before "model.json")
|
|
202
|
+
if (pathParts.length >= 2) {
|
|
203
|
+
const sliceName = pathParts[pathParts.length - 2];
|
|
204
|
+
if (sliceName && !foundSlices.has(sliceName)) {
|
|
205
|
+
foundSlices.add(sliceName);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Convert to slice directories format
|
|
211
|
+
sliceDirectories = Array.from(foundSlices).map((sliceName) => ({
|
|
212
|
+
name: sliceName,
|
|
213
|
+
path: `${normalizedPath}/${sliceName}`,
|
|
214
|
+
}));
|
|
215
|
+
|
|
216
|
+
console.log(
|
|
217
|
+
`Discovered ${sliceDirectories.length} slice(s) via GitHub Search API for library ${libraryPath}`,
|
|
218
|
+
);
|
|
219
|
+
} else {
|
|
220
|
+
console.warn(
|
|
221
|
+
`GitHub Search API found no model.json files in ${libraryPath}`,
|
|
222
|
+
);
|
|
223
|
+
}
|
|
224
|
+
} else if (searchResponse.status === 403) {
|
|
225
|
+
console.warn(
|
|
226
|
+
`GitHub Search API also returned 403. Cannot discover slices without API access.`,
|
|
227
|
+
);
|
|
228
|
+
} else {
|
|
229
|
+
console.warn(
|
|
230
|
+
`GitHub Search API failed: ${searchResponse.statusText}`,
|
|
231
|
+
);
|
|
232
|
+
}
|
|
233
|
+
} catch (error) {
|
|
234
|
+
console.warn(
|
|
235
|
+
`Error using GitHub Search API for ${libraryPath}:`,
|
|
236
|
+
error instanceof Error ? error.message : String(error),
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
if (sliceDirectories.length === 0) {
|
|
242
|
+
console.warn(
|
|
243
|
+
`No slices found in library ${libraryPath}. The repository may be private or require authentication.`,
|
|
244
|
+
);
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
console.log(
|
|
249
|
+
`Processing ${sliceDirectories.length} slice(s) in library ${libraryPath}`,
|
|
250
|
+
);
|
|
251
|
+
|
|
252
|
+
// Fetch each slice's model.json, screenshot, and all other files with bounded concurrency
|
|
253
|
+
const perSlice = async (sliceDir: { name: string; path: string }) => {
|
|
254
|
+
try {
|
|
255
|
+
const modelUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${sliceDir.path}/model.json`;
|
|
256
|
+
const modelResponse = await fetch(modelUrl);
|
|
257
|
+
|
|
258
|
+
if (!modelResponse.ok) {
|
|
259
|
+
console.warn(
|
|
260
|
+
`Failed to fetch model.json for slice: ${sliceDir.name}`,
|
|
261
|
+
);
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
const modelResult = SharedSlice.decode(await modelResponse.json());
|
|
266
|
+
if (modelResult._tag === "Left") {
|
|
267
|
+
console.warn(
|
|
268
|
+
`Failed to decode model.json for slice: ${sliceDir.name}`,
|
|
269
|
+
);
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
const model = modelResult.right;
|
|
273
|
+
|
|
274
|
+
// Fetch all files from the slice directory
|
|
275
|
+
// Wrap in try-catch to prevent failures from blocking other slices
|
|
276
|
+
let sliceFiles: SliceFile[] = [];
|
|
277
|
+
try {
|
|
278
|
+
sliceFiles = await fetchAllFilesFromDirectory({
|
|
279
|
+
owner,
|
|
280
|
+
repo,
|
|
281
|
+
branch,
|
|
282
|
+
directoryPath: sliceDir.path,
|
|
283
|
+
});
|
|
284
|
+
console.log(
|
|
285
|
+
`Fetched ${sliceFiles.length} file(s) for slice ${sliceDir.name}:`,
|
|
286
|
+
sliceFiles.map((f) => `${f.path}${f.isBinary ? " (binary)" : ""}`),
|
|
287
|
+
);
|
|
288
|
+
} catch (error) {
|
|
289
|
+
console.warn(
|
|
290
|
+
`Failed to fetch files for slice ${sliceDir.name}:`,
|
|
291
|
+
error instanceof Error ? error.message : String(error),
|
|
292
|
+
);
|
|
293
|
+
// Continue with empty sliceFiles array
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// Extract component contents and mocks
|
|
297
|
+
let componentContents: string | undefined;
|
|
298
|
+
let mocks: SharedSliceContent[] | undefined;
|
|
299
|
+
|
|
300
|
+
for (const file of sliceFiles) {
|
|
301
|
+
if (
|
|
302
|
+
componentContents === undefined &&
|
|
303
|
+
file.path.match(/^index\.(tsx?|jsx?|vue|svelte)$/)
|
|
304
|
+
) {
|
|
305
|
+
if (typeof file.contents === "string") {
|
|
306
|
+
componentContents = file.contents;
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
if (file.path === "mocks.json" && typeof file.contents === "string") {
|
|
311
|
+
try {
|
|
312
|
+
const parsedMocksResult = SharedSliceContent.decode(
|
|
313
|
+
JSON.parse(file.contents),
|
|
314
|
+
);
|
|
315
|
+
if (parsedMocksResult._tag === "Left") {
|
|
316
|
+
console.warn(
|
|
317
|
+
`Failed to decode mocks.json for slice: ${sliceDir.name}`,
|
|
318
|
+
);
|
|
319
|
+
} else {
|
|
320
|
+
const parsedMocks = parsedMocksResult.right;
|
|
321
|
+
if (Array.isArray(parsedMocks) && parsedMocks.length > 0) {
|
|
322
|
+
mocks = parsedMocks;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
} catch {
|
|
326
|
+
console.warn(
|
|
327
|
+
`Failed to decode mocks.json for slice: ${sliceDir.name}`,
|
|
328
|
+
);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// Fetch screenshots for all variations (usually few); keep unbounded or lightly bounded
|
|
334
|
+
let thumbnailUrl: string | undefined;
|
|
335
|
+
let screenshotFile: File | undefined;
|
|
336
|
+
const screenshots: Record<string, File> = {};
|
|
337
|
+
|
|
338
|
+
if (model.variations !== undefined && model.variations.length > 0) {
|
|
339
|
+
const screenshotResults = await Promise.allSettled(
|
|
340
|
+
model.variations.map(async (variation) => {
|
|
341
|
+
try {
|
|
342
|
+
const screenshotUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${sliceDir.path}/screenshot-${variation.id}.png`;
|
|
343
|
+
const screenshotResponse = await fetch(screenshotUrl);
|
|
344
|
+
|
|
345
|
+
if (screenshotResponse.ok) {
|
|
346
|
+
const blob = await screenshotResponse.blob();
|
|
347
|
+
const file = new File(
|
|
348
|
+
[blob],
|
|
349
|
+
`screenshot-${variation.id}.png`,
|
|
350
|
+
{
|
|
351
|
+
type: "image/png",
|
|
352
|
+
},
|
|
353
|
+
);
|
|
354
|
+
screenshots[variation.id] = file;
|
|
355
|
+
|
|
356
|
+
if (
|
|
357
|
+
thumbnailUrl === undefined &&
|
|
358
|
+
model.variations[0] !== undefined &&
|
|
359
|
+
variation.id === model.variations[0].id
|
|
360
|
+
) {
|
|
361
|
+
thumbnailUrl = URL.createObjectURL(blob);
|
|
362
|
+
screenshotFile = file;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
} catch (error) {
|
|
366
|
+
throw error;
|
|
367
|
+
}
|
|
368
|
+
}),
|
|
369
|
+
);
|
|
370
|
+
|
|
371
|
+
screenshotResults.forEach((result, index) => {
|
|
372
|
+
if (result.status === "rejected") {
|
|
373
|
+
console.warn(
|
|
374
|
+
`Failed to fetch screenshot for variation ${model.variations[index]?.id}:`,
|
|
375
|
+
result.reason instanceof Error
|
|
376
|
+
? result.reason.message
|
|
377
|
+
: String(result.reason),
|
|
378
|
+
);
|
|
379
|
+
}
|
|
380
|
+
});
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
const backupImageFile =
|
|
384
|
+
screenshotFile ??
|
|
385
|
+
new File([], `${model.name}.json`, {
|
|
386
|
+
type: "application/json",
|
|
387
|
+
});
|
|
388
|
+
|
|
389
|
+
const sliceData: SliceImport = {
|
|
390
|
+
image: backupImageFile,
|
|
391
|
+
thumbnailUrl: thumbnailUrl ?? URL.createObjectURL(backupImageFile),
|
|
392
|
+
model,
|
|
393
|
+
files: sliceFiles,
|
|
394
|
+
componentContents,
|
|
395
|
+
mocks,
|
|
396
|
+
screenshots:
|
|
397
|
+
Object.keys(screenshots).length > 0 ? screenshots : undefined,
|
|
398
|
+
};
|
|
399
|
+
fetchedSlices.push(sliceData);
|
|
400
|
+
} catch (error) {
|
|
401
|
+
console.warn(
|
|
402
|
+
`Error fetching slice ${sliceDir.name}:`,
|
|
403
|
+
error instanceof Error ? error.message : String(error),
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
// Process slice directories with a concurrency cap to avoid API throttling
|
|
409
|
+
await mapWithConcurrency(sliceDirectories, 6, perSlice);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
return fetchedSlices;
|
|
413
|
+
};
|
|
414
|
+
|
|
415
|
+
/**
|
|
416
|
+
* Recursively fetches all files from a GitHub directory
|
|
417
|
+
*/
|
|
418
|
+
const fetchAllFilesFromDirectory = async (args: {
|
|
419
|
+
owner: string;
|
|
420
|
+
repo: string;
|
|
421
|
+
branch: string;
|
|
422
|
+
directoryPath: string;
|
|
423
|
+
}): Promise<SliceFile[]> => {
|
|
424
|
+
const { owner, repo, branch, directoryPath } = args;
|
|
425
|
+
const files: SliceFile[] = [];
|
|
426
|
+
|
|
427
|
+
// Try GitHub API first
|
|
428
|
+
let apiWorked = false;
|
|
429
|
+
try {
|
|
430
|
+
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${directoryPath}`;
|
|
431
|
+
const response = await fetch(apiUrl);
|
|
432
|
+
|
|
433
|
+
if (response.ok) {
|
|
434
|
+
apiWorked = true;
|
|
435
|
+
const contents = (await response.json()) as Array<{
|
|
436
|
+
name: string;
|
|
437
|
+
type: string;
|
|
438
|
+
path: string;
|
|
439
|
+
content?: string;
|
|
440
|
+
encoding?: string;
|
|
441
|
+
}>;
|
|
442
|
+
|
|
443
|
+
const fileItems = contents.filter((i) => i.type === "file");
|
|
444
|
+
const dirItems = contents.filter((i) => i.type === "dir");
|
|
445
|
+
|
|
446
|
+
// Process files with bounded concurrency
|
|
447
|
+
const fileResults = await mapWithConcurrency(
|
|
448
|
+
fileItems,
|
|
449
|
+
8,
|
|
450
|
+
async (item) => {
|
|
451
|
+
if (item.name === "model.json") return null;
|
|
452
|
+
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${item.path}`;
|
|
453
|
+
const fileResponse = await fetch(rawUrl);
|
|
454
|
+
if (!fileResponse.ok) return null;
|
|
455
|
+
|
|
456
|
+
const binaryExtensions = [
|
|
457
|
+
".png",
|
|
458
|
+
".jpg",
|
|
459
|
+
".jpeg",
|
|
460
|
+
".gif",
|
|
461
|
+
".svg",
|
|
462
|
+
".ico",
|
|
463
|
+
".webp",
|
|
464
|
+
];
|
|
465
|
+
const isBinaryFile = binaryExtensions.some((ext) =>
|
|
466
|
+
item.name.toLowerCase().endsWith(ext),
|
|
467
|
+
);
|
|
468
|
+
let fileContents: string | ArrayBuffer;
|
|
469
|
+
if (isBinaryFile) {
|
|
470
|
+
const arrayBuffer = await fileResponse.arrayBuffer();
|
|
471
|
+
fileContents = arrayBuffer;
|
|
472
|
+
} else {
|
|
473
|
+
fileContents = await fileResponse.text();
|
|
474
|
+
}
|
|
475
|
+
return {
|
|
476
|
+
path: item.name,
|
|
477
|
+
contents: fileContents,
|
|
478
|
+
isBinary: isBinaryFile,
|
|
479
|
+
} as SliceFile;
|
|
480
|
+
},
|
|
481
|
+
);
|
|
482
|
+
for (const r of fileResults) {
|
|
483
|
+
if (r) files.push(r);
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
// Recursively process directories sequentially (counts are usually low)
|
|
487
|
+
for (const item of dirItems) {
|
|
488
|
+
const subFiles = await fetchAllFilesFromDirectory({
|
|
489
|
+
owner,
|
|
490
|
+
repo,
|
|
491
|
+
branch,
|
|
492
|
+
directoryPath: item.path,
|
|
493
|
+
});
|
|
494
|
+
for (const subFile of subFiles) {
|
|
495
|
+
files.push({
|
|
496
|
+
...subFile,
|
|
497
|
+
path: `${item.name}/${subFile.path}`,
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
} catch (error) {
|
|
503
|
+
console.warn(
|
|
504
|
+
`GitHub API failed for directory ${directoryPath}, trying HTML parsing...`,
|
|
505
|
+
error instanceof Error ? error.message : String(error),
|
|
506
|
+
);
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
// If API failed, use GitHub Search API to find all files recursively
|
|
510
|
+
if (!apiWorked) {
|
|
511
|
+
try {
|
|
512
|
+
console.log(
|
|
513
|
+
`Using GitHub Search API to find all files in ${directoryPath}...`,
|
|
514
|
+
);
|
|
515
|
+
|
|
516
|
+
// Use GitHub Search API to find all files in this directory (recursively)
|
|
517
|
+
// Format: path:directoryPath repo:owner/repo
|
|
518
|
+
const searchQuery = `path:${directoryPath} repo:${owner}/${repo}`;
|
|
519
|
+
const searchUrl = `https://api.github.com/search/code?q=${encodeURIComponent(
|
|
520
|
+
searchQuery,
|
|
521
|
+
)}`;
|
|
522
|
+
|
|
523
|
+
const searchResponse = await fetch(searchUrl, {
|
|
524
|
+
headers: {
|
|
525
|
+
Accept: "application/vnd.github.v3+json",
|
|
526
|
+
},
|
|
527
|
+
});
|
|
528
|
+
|
|
529
|
+
if (searchResponse.ok) {
|
|
530
|
+
const searchData = (await searchResponse.json()) as {
|
|
531
|
+
items?: Array<{
|
|
532
|
+
path: string;
|
|
533
|
+
name: string;
|
|
534
|
+
}>;
|
|
535
|
+
total_count?: number;
|
|
536
|
+
};
|
|
537
|
+
|
|
538
|
+
if (searchData.items && searchData.items.length > 0) {
|
|
539
|
+
console.log(
|
|
540
|
+
`Found ${searchData.items.length} file(s) via Search API for ${directoryPath}`,
|
|
541
|
+
);
|
|
542
|
+
|
|
543
|
+
// Fetch all discovered files
|
|
544
|
+
// Note: Search API returns up to 100 results per page, but we should get all files
|
|
545
|
+
const fetched = await mapWithConcurrency(
|
|
546
|
+
searchData.items,
|
|
547
|
+
8,
|
|
548
|
+
async (item) => {
|
|
549
|
+
try {
|
|
550
|
+
if (item.name === "model.json") return null;
|
|
551
|
+
const relativePath = item.path.startsWith(directoryPath + "/")
|
|
552
|
+
? item.path.slice(directoryPath.length + 1)
|
|
553
|
+
: item.name;
|
|
554
|
+
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${item.path}`;
|
|
555
|
+
const fileResponse = await fetch(rawUrl);
|
|
556
|
+
if (!fileResponse.ok) {
|
|
557
|
+
console.warn(
|
|
558
|
+
`Failed to fetch file ${item.path}: ${fileResponse.status} ${fileResponse.statusText}`,
|
|
559
|
+
);
|
|
560
|
+
return null;
|
|
561
|
+
}
|
|
562
|
+
const binaryExtensions = [
|
|
563
|
+
".png",
|
|
564
|
+
".jpg",
|
|
565
|
+
".jpeg",
|
|
566
|
+
".gif",
|
|
567
|
+
".svg",
|
|
568
|
+
".ico",
|
|
569
|
+
".webp",
|
|
570
|
+
".woff",
|
|
571
|
+
".woff2",
|
|
572
|
+
".ttf",
|
|
573
|
+
".eot",
|
|
574
|
+
".otf",
|
|
575
|
+
".pdf",
|
|
576
|
+
".zip",
|
|
577
|
+
".gz",
|
|
578
|
+
];
|
|
579
|
+
const isBinaryFile = binaryExtensions.some((ext) =>
|
|
580
|
+
item.name.toLowerCase().endsWith(ext),
|
|
581
|
+
);
|
|
582
|
+
let fileContents: string | ArrayBuffer;
|
|
583
|
+
if (isBinaryFile) {
|
|
584
|
+
fileContents = await fileResponse.arrayBuffer();
|
|
585
|
+
} else {
|
|
586
|
+
fileContents = await fileResponse.text();
|
|
587
|
+
}
|
|
588
|
+
return {
|
|
589
|
+
path: relativePath,
|
|
590
|
+
contents: fileContents,
|
|
591
|
+
isBinary: isBinaryFile,
|
|
592
|
+
} as SliceFile | null;
|
|
593
|
+
} catch (error) {
|
|
594
|
+
console.warn(
|
|
595
|
+
`Error fetching file ${item.path}:`,
|
|
596
|
+
error instanceof Error ? error.message : String(error),
|
|
597
|
+
);
|
|
598
|
+
return null;
|
|
599
|
+
}
|
|
600
|
+
},
|
|
601
|
+
);
|
|
602
|
+
for (const item of fetched) {
|
|
603
|
+
if (item) files.push(item);
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
console.log(
|
|
607
|
+
`Fetched ${files.length} file(s) from ${directoryPath} via Search API`,
|
|
608
|
+
);
|
|
609
|
+
} else {
|
|
610
|
+
console.warn(`GitHub Search API found no files in ${directoryPath}`);
|
|
611
|
+
}
|
|
612
|
+
} else if (searchResponse.status === 403) {
|
|
613
|
+
console.warn(
|
|
614
|
+
`GitHub Search API returned 403 for ${directoryPath}. Cannot fetch files without API access.`,
|
|
615
|
+
);
|
|
616
|
+
} else {
|
|
617
|
+
console.warn(
|
|
618
|
+
`GitHub Search API failed for ${directoryPath}: ${searchResponse.statusText}`,
|
|
619
|
+
);
|
|
620
|
+
}
|
|
621
|
+
} catch (error) {
|
|
622
|
+
console.warn(
|
|
623
|
+
`Error using GitHub Search API for ${directoryPath}:`,
|
|
624
|
+
error instanceof Error ? error.message : String(error),
|
|
625
|
+
);
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
return files;
|
|
630
|
+
};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Concurrency helper to control parallel network/IO without external deps
|
|
3
|
+
* @param items - The items to map over
|
|
4
|
+
* @param limit - The maximum number of concurrent operations
|
|
5
|
+
* @param mapper - The function to map over the items
|
|
6
|
+
* @returns The results of the mapped items
|
|
7
|
+
*/
|
|
8
|
+
export async function mapWithConcurrency<T, R>(
|
|
9
|
+
items: readonly T[],
|
|
10
|
+
limit: number,
|
|
11
|
+
mapper: (item: T, index: number) => Promise<R>,
|
|
12
|
+
): Promise<R[]> {
|
|
13
|
+
const results = new Array<R>(items.length);
|
|
14
|
+
let nextIndex = 0;
|
|
15
|
+
|
|
16
|
+
const workers = new Array(Math.min(limit, items.length))
|
|
17
|
+
.fill(null)
|
|
18
|
+
.map(async () => {
|
|
19
|
+
while (true) {
|
|
20
|
+
const currentIndex = nextIndex++;
|
|
21
|
+
if (currentIndex >= items.length) break;
|
|
22
|
+
results[currentIndex] = await mapper(items[currentIndex], currentIndex);
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
await Promise.all(workers);
|
|
27
|
+
return results;
|
|
28
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { SharedSlice } from "@prismicio/types-internal/lib/customtypes";
|
|
2
|
+
|
|
3
|
+
import { NewSlice } from "../types";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* If needed, assigns new ids and names to avoid conflicts with existing slices.
|
|
7
|
+
* Names are compared case-insensitively to avoid conflicts
|
|
8
|
+
* between folder names with different casing.
|
|
9
|
+
*/
|
|
10
|
+
export function sliceWithoutConflicts({
|
|
11
|
+
existingSlices,
|
|
12
|
+
newSlices,
|
|
13
|
+
slice,
|
|
14
|
+
}: {
|
|
15
|
+
existingSlices: SharedSlice[];
|
|
16
|
+
newSlices: NewSlice[];
|
|
17
|
+
slice: SharedSlice;
|
|
18
|
+
}): SharedSlice {
|
|
19
|
+
const existingIds = new Set<string>();
|
|
20
|
+
const existingNames = new Set<string>();
|
|
21
|
+
|
|
22
|
+
for (const { id, name } of existingSlices) {
|
|
23
|
+
existingIds.add(id);
|
|
24
|
+
existingNames.add(name.toLowerCase());
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
for (const s of newSlices) {
|
|
28
|
+
existingIds.add(s.model.id);
|
|
29
|
+
existingNames.add(s.model.name.toLowerCase());
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
let id = slice.id;
|
|
33
|
+
let counter = 2;
|
|
34
|
+
while (existingIds.has(id)) {
|
|
35
|
+
id = `${slice.id}_${counter}`;
|
|
36
|
+
counter++;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
let name = slice.name;
|
|
40
|
+
counter = 2;
|
|
41
|
+
while (existingNames.has(name.toLowerCase())) {
|
|
42
|
+
name = `${slice.name}${counter}`;
|
|
43
|
+
counter++;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return {
|
|
47
|
+
...slice,
|
|
48
|
+
id,
|
|
49
|
+
name,
|
|
50
|
+
};
|
|
51
|
+
}
|