slice-machine-ui 2.20.1-beta.5 → 2.20.2-alpha.lg-import-slices.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/out/404.html +1 -1
- package/out/_next/static/-rx3Dikh863L90KD25ytx/_buildManifest.js +1 -0
- package/out/_next/static/chunks/130-e1a16d2f94fb2b64.js +1 -0
- package/out/_next/static/chunks/422-10a9f299957417bb.js +1 -0
- package/out/_next/static/chunks/{429-aab52070cad2884b.js → 429-e5d7e39160de9f5e.js} +1 -1
- package/out/_next/static/chunks/489-c9535ef34da63d1a.js +1 -0
- package/out/_next/static/chunks/585-c89bb2471e85b9f8.js +1 -0
- package/out/_next/static/chunks/954-bedaaabf664584a0.js +1 -0
- package/out/_next/static/chunks/pages/{_app-e595114a0455ad49.js → _app-2566e110112a7719.js} +1 -1
- package/out/_next/static/chunks/pages/custom-types/{[customTypeId]-6d613b67e6967ae5.js → [customTypeId]-273e9a82c085b596.js} +1 -1
- package/out/_next/static/chunks/pages/page-types/{[pageTypeId]-40207b66190e3fcd.js → [pageTypeId]-3fa7667de1a790d9.js} +1 -1
- package/out/_next/static/chunks/pages/slices/[lib]/[sliceName]/[variation]/{simulator-faeb6d2f77d97096.js → simulator-8c70298caf51bed0.js} +1 -1
- package/out/_next/static/chunks/pages/slices-76679cf064761d2b.js +1 -0
- package/out/changelog.html +1 -1
- package/out/changes.html +1 -1
- package/out/custom-types/[customTypeId].html +1 -1
- package/out/custom-types.html +1 -1
- package/out/index.html +1 -1
- package/out/labs.html +1 -1
- package/out/page-types/[pageTypeId].html +1 -1
- package/out/slices/[lib]/[sliceName]/[variation]/simulator.html +1 -1
- package/out/slices/[lib]/[sliceName]/[variation].html +1 -1
- package/out/slices.html +1 -1
- package/package.json +4 -4
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/ImportSlicesFromLibraryModal.tsx +336 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/SliceCard.tsx +48 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/hooks/useImportSlicesFromGithub.ts +91 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/index.tsx +1 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/types.ts +28 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/addSlices.ts +186 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/github.ts +657 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/mapWithConcurrency.ts +28 -0
- package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/sliceWithoutConflicts.ts +51 -0
- package/src/features/customTypes/customTypesBuilder/SliceZoneBlankSlate.tsx +11 -0
- package/src/features/customTypes/customTypesBuilder/shared/getSubmitButtonLabel.ts +12 -0
- package/src/features/customTypes/customTypesBuilder/shared/useExistingSlices.ts +26 -0
- package/src/features/customTypes/customTypesBuilder/sliceCreationOptions.tsx +14 -0
- package/src/legacy/lib/builders/CustomTypeBuilder/SliceZone/index.tsx +51 -0
- package/src/pages/slices.tsx +30 -0
- package/out/_next/static/9LzZngwZPG59uAvDGA4B-/_buildManifest.js +0 -1
- package/out/_next/static/chunks/422-c9192a1dbdd2ae0e.js +0 -1
- package/out/_next/static/chunks/489-32281540712d98bb.js +0 -1
- package/out/_next/static/chunks/633-152bf078e5366a0c.js +0 -1
- package/out/_next/static/chunks/pages/slices-ec56b94b35794675.js +0 -1
- /package/out/_next/static/{9LzZngwZPG59uAvDGA4B- → -rx3Dikh863L90KD25ytx}/_ssgManifest.js +0 -0
package/src/features/customTypes/customTypesBuilder/ImportSlicesFromLibraryModal/utils/github.ts
ADDED
|
@@ -0,0 +1,657 @@
|
|
|
1
|
+
import { SharedSliceContent } from "@prismicio/types-internal/lib/content";
|
|
2
|
+
import { SharedSlice } from "@prismicio/types-internal/lib/customtypes";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
|
|
5
|
+
import { SliceFile, SliceImport } from "../types";
|
|
6
|
+
import { mapWithConcurrency } from "./mapWithConcurrency";
|
|
7
|
+
|
|
8
|
+
class GitHubRepositoryAPI {
|
|
9
|
+
private readonly owner: string;
|
|
10
|
+
private readonly repo: string;
|
|
11
|
+
private readonly token?: string;
|
|
12
|
+
private readonly baseUrl = "https://api.github.com";
|
|
13
|
+
|
|
14
|
+
constructor(args: { owner: string; repo: string; token?: string }) {
|
|
15
|
+
this.owner = args.owner;
|
|
16
|
+
this.repo = args.repo;
|
|
17
|
+
this.token = args.token;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
private getHeaders(): HeadersInit {
|
|
21
|
+
const headers: HeadersInit = {
|
|
22
|
+
Accept: "application/vnd.github.v3+json",
|
|
23
|
+
};
|
|
24
|
+
if (this.token) {
|
|
25
|
+
headers.Authorization = `Bearer ${this.token}`;
|
|
26
|
+
}
|
|
27
|
+
return headers;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
private async request<T>(
|
|
31
|
+
endpoint: string,
|
|
32
|
+
options?: RequestInit,
|
|
33
|
+
): Promise<T> {
|
|
34
|
+
const url = `${this.baseUrl}${endpoint}`;
|
|
35
|
+
const response = await fetch(url, {
|
|
36
|
+
...options,
|
|
37
|
+
headers: { ...this.getHeaders(), ...options?.headers },
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
if (!response.ok) {
|
|
41
|
+
throw new Error(
|
|
42
|
+
`GitHub API request failed: ${response.status} ${response.statusText}`,
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return response.json() as Promise<T>;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async getDefaultBranch() {
|
|
50
|
+
const data = await this.request(`/repos/${this.owner}/${this.repo}`);
|
|
51
|
+
return z.object({ default_branch: z.string() }).parse(data).default_branch;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async getFileContents(
|
|
55
|
+
path: string,
|
|
56
|
+
branch: string,
|
|
57
|
+
isBinary = false,
|
|
58
|
+
): Promise<string | ArrayBuffer> {
|
|
59
|
+
const data = await this.request(
|
|
60
|
+
`/repos/${this.owner}/${this.repo}/contents/${path}?ref=${branch}`,
|
|
61
|
+
);
|
|
62
|
+
const parsed = z
|
|
63
|
+
.object({ content: z.string(), encoding: z.string() })
|
|
64
|
+
.parse(data);
|
|
65
|
+
|
|
66
|
+
if (parsed.encoding !== "base64") {
|
|
67
|
+
throw new Error(`Unexpected encoding for ${path}: ${parsed.encoding}`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Decode base64 content
|
|
71
|
+
const base64Content = parsed.content.replace(/\s/g, "");
|
|
72
|
+
const binaryString = atob(base64Content);
|
|
73
|
+
|
|
74
|
+
if (isBinary) {
|
|
75
|
+
// Convert to ArrayBuffer
|
|
76
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
77
|
+
for (let i = 0; i < binaryString.length; i++) {
|
|
78
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
79
|
+
}
|
|
80
|
+
return bytes.buffer;
|
|
81
|
+
} else {
|
|
82
|
+
// Return as string
|
|
83
|
+
return binaryString;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async getDirectoryContents(path: string) {
|
|
88
|
+
const contents = await this.request(
|
|
89
|
+
`/repos/${this.owner}/${this.repo}/contents/${path}`,
|
|
90
|
+
);
|
|
91
|
+
return z
|
|
92
|
+
.array(z.object({ name: z.string(), type: z.string(), path: z.string() }))
|
|
93
|
+
.parse(contents);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
async searchCode(args: { path?: string; filename?: string }) {
|
|
97
|
+
const { path, filename } = args;
|
|
98
|
+
|
|
99
|
+
const query: string[] = [];
|
|
100
|
+
if (path) query.push(`path:${path}`);
|
|
101
|
+
if (filename) query.push(`filename:${filename}`);
|
|
102
|
+
query.push(`repo:${this.owner}/${this.repo}`);
|
|
103
|
+
|
|
104
|
+
const searchUrl = `/search/code?q=${encodeURIComponent(query.join(" "))}`;
|
|
105
|
+
const data = await this.request(searchUrl);
|
|
106
|
+
|
|
107
|
+
return z
|
|
108
|
+
.object({
|
|
109
|
+
items: z
|
|
110
|
+
.array(z.object({ path: z.string(), name: z.string() }))
|
|
111
|
+
.optional(),
|
|
112
|
+
total_count: z.number().optional(),
|
|
113
|
+
})
|
|
114
|
+
.parse(data);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async getSliceLibraries(branch: string) {
|
|
118
|
+
const data = await this.request(
|
|
119
|
+
`/repos/${this.owner}/${this.repo}/contents/slicemachine.config.json?ref=${branch}`,
|
|
120
|
+
);
|
|
121
|
+
const parsed = z
|
|
122
|
+
.object({
|
|
123
|
+
content: z.string().optional(),
|
|
124
|
+
encoding: z.string().optional(),
|
|
125
|
+
})
|
|
126
|
+
.parse(data);
|
|
127
|
+
|
|
128
|
+
if (typeof parsed.content === "string") {
|
|
129
|
+
// GitHub API returns base64-encoded content
|
|
130
|
+
const decodedContent = atob(parsed.content.replace(/\s/g, ""));
|
|
131
|
+
|
|
132
|
+
return z
|
|
133
|
+
.object({ libraries: z.array(z.string()) })
|
|
134
|
+
.parse(JSON.parse(decodedContent)).libraries;
|
|
135
|
+
} else {
|
|
136
|
+
throw new Error("No content found in slicemachine.config.json");
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export const parseGithubUrl = (
|
|
142
|
+
githubUrl: string,
|
|
143
|
+
): {
|
|
144
|
+
owner: string;
|
|
145
|
+
repo: string;
|
|
146
|
+
} => {
|
|
147
|
+
const urlMatch = githubUrl.match(/github\.com\/([^\/]+)\/([^\/]+)(?:\/|$)/);
|
|
148
|
+
if (!urlMatch) {
|
|
149
|
+
throw new Error("Invalid GitHub URL format");
|
|
150
|
+
}
|
|
151
|
+
const [, owner, repoRaw] = urlMatch;
|
|
152
|
+
const repo = repoRaw.replace(/\.git$/, "");
|
|
153
|
+
return { owner, repo };
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
export const getDefaultBranch = async ({
|
|
157
|
+
owner,
|
|
158
|
+
repo,
|
|
159
|
+
}: {
|
|
160
|
+
owner: string;
|
|
161
|
+
repo: string;
|
|
162
|
+
}): Promise<string> => {
|
|
163
|
+
const github = new GitHubRepositoryAPI({ owner, repo });
|
|
164
|
+
return github.getDefaultBranch();
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
export const getSliceLibraries = async ({
|
|
168
|
+
owner,
|
|
169
|
+
repo,
|
|
170
|
+
branch,
|
|
171
|
+
}: {
|
|
172
|
+
owner: string;
|
|
173
|
+
repo: string;
|
|
174
|
+
branch: string;
|
|
175
|
+
}): Promise<string[]> => {
|
|
176
|
+
const github = new GitHubRepositoryAPI({ owner, repo });
|
|
177
|
+
return github.getSliceLibraries(branch);
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
export const fetchSlicesFromLibraries = async ({
|
|
181
|
+
owner,
|
|
182
|
+
repo,
|
|
183
|
+
branch,
|
|
184
|
+
libraries,
|
|
185
|
+
}: {
|
|
186
|
+
owner: string;
|
|
187
|
+
repo: string;
|
|
188
|
+
branch: string;
|
|
189
|
+
libraries: string[];
|
|
190
|
+
}) => {
|
|
191
|
+
const github = new GitHubRepositoryAPI({ owner, repo });
|
|
192
|
+
const fetchedSlices: SliceImport[] = [];
|
|
193
|
+
|
|
194
|
+
console.log(
|
|
195
|
+
`Fetching slices from ${libraries.length} library/libraries:`,
|
|
196
|
+
libraries,
|
|
197
|
+
);
|
|
198
|
+
|
|
199
|
+
for (const libraryPath of libraries) {
|
|
200
|
+
// Normalize library path (remove leading ./ if present)
|
|
201
|
+
const normalizedPath = libraryPath.replace(/^\.\//, "");
|
|
202
|
+
|
|
203
|
+
let sliceDirectories: Array<{
|
|
204
|
+
name: string;
|
|
205
|
+
path: string;
|
|
206
|
+
}> = [];
|
|
207
|
+
|
|
208
|
+
// Try GitHub API first
|
|
209
|
+
let apiFailed = false;
|
|
210
|
+
|
|
211
|
+
try {
|
|
212
|
+
const libraryContents = await github.getDirectoryContents(normalizedPath);
|
|
213
|
+
sliceDirectories = libraryContents
|
|
214
|
+
.filter((item) => item.type === "dir")
|
|
215
|
+
.map((item) => ({
|
|
216
|
+
name: item.name,
|
|
217
|
+
path: item.path,
|
|
218
|
+
}));
|
|
219
|
+
} catch (error) {
|
|
220
|
+
apiFailed = true;
|
|
221
|
+
const errorMessage =
|
|
222
|
+
error instanceof Error ? error.message : String(error);
|
|
223
|
+
if (errorMessage.includes("403")) {
|
|
224
|
+
console.warn(
|
|
225
|
+
`GitHub API returned 403 for ${libraryPath}, trying direct discovery...`,
|
|
226
|
+
);
|
|
227
|
+
} else {
|
|
228
|
+
console.warn(
|
|
229
|
+
`GitHub API error for ${libraryPath}, trying direct discovery...`,
|
|
230
|
+
errorMessage,
|
|
231
|
+
);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// If API failed, use GitHub Search API to find all model.json files in this library path
|
|
236
|
+
if (apiFailed && sliceDirectories.length === 0) {
|
|
237
|
+
console.log(
|
|
238
|
+
`Attempting to discover slices using GitHub Search API for ${libraryPath}...`,
|
|
239
|
+
);
|
|
240
|
+
|
|
241
|
+
try {
|
|
242
|
+
// Use GitHub Search API to find all model.json files in the library path
|
|
243
|
+
const searchData = await github.searchCode({
|
|
244
|
+
path: normalizedPath,
|
|
245
|
+
filename: "model.json",
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
if (searchData.items && searchData.items.length > 0) {
|
|
249
|
+
// Extract slice directory names from the paths
|
|
250
|
+
// Path format: slices/marketing/slice-name/model.json
|
|
251
|
+
const foundSlices = new Set<string>();
|
|
252
|
+
for (const item of searchData.items) {
|
|
253
|
+
// Extract the slice directory name from the path
|
|
254
|
+
// e.g., "slices/marketing/hero/model.json" -> "hero"
|
|
255
|
+
const pathParts = item.path.split("/");
|
|
256
|
+
// The slice name should be the second-to-last part (before "model.json")
|
|
257
|
+
if (pathParts.length >= 2) {
|
|
258
|
+
const sliceName = pathParts[pathParts.length - 2];
|
|
259
|
+
if (sliceName && !foundSlices.has(sliceName)) {
|
|
260
|
+
foundSlices.add(sliceName);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Convert to slice directories format
|
|
266
|
+
sliceDirectories = Array.from(foundSlices).map((sliceName) => ({
|
|
267
|
+
name: sliceName,
|
|
268
|
+
path: `${normalizedPath}/${sliceName}`,
|
|
269
|
+
}));
|
|
270
|
+
|
|
271
|
+
console.log(
|
|
272
|
+
`Discovered ${sliceDirectories.length} slice(s) via GitHub Search API for library ${libraryPath}`,
|
|
273
|
+
);
|
|
274
|
+
} else {
|
|
275
|
+
console.warn(
|
|
276
|
+
`GitHub Search API found no model.json files in ${libraryPath}`,
|
|
277
|
+
);
|
|
278
|
+
}
|
|
279
|
+
} catch (error) {
|
|
280
|
+
const errorMessage =
|
|
281
|
+
error instanceof Error ? error.message : String(error);
|
|
282
|
+
if (errorMessage.includes("403")) {
|
|
283
|
+
console.warn(
|
|
284
|
+
`GitHub Search API also returned 403. Cannot discover slices without API access.`,
|
|
285
|
+
);
|
|
286
|
+
} else {
|
|
287
|
+
console.warn(
|
|
288
|
+
`Error using GitHub Search API for ${libraryPath}:`,
|
|
289
|
+
errorMessage,
|
|
290
|
+
);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
if (sliceDirectories.length === 0) {
|
|
296
|
+
console.warn(
|
|
297
|
+
`No slices found in library ${libraryPath}. The repository may be private or require authentication.`,
|
|
298
|
+
);
|
|
299
|
+
continue;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
console.log(
|
|
303
|
+
`Processing ${sliceDirectories.length} slice(s) in library ${libraryPath}`,
|
|
304
|
+
);
|
|
305
|
+
|
|
306
|
+
// Fetch each slice's model.json, screenshot, and all other files with bounded concurrency
|
|
307
|
+
const perSlice = async (sliceDir: { name: string; path: string }) => {
|
|
308
|
+
try {
|
|
309
|
+
const modelContent = await github.getFileContents(
|
|
310
|
+
`${sliceDir.path}/model.json`,
|
|
311
|
+
branch,
|
|
312
|
+
false,
|
|
313
|
+
);
|
|
314
|
+
|
|
315
|
+
if (typeof modelContent !== "string") {
|
|
316
|
+
console.warn(
|
|
317
|
+
`Failed to fetch model.json for slice: ${sliceDir.name} - unexpected content type`,
|
|
318
|
+
);
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
const modelResult = SharedSlice.decode(JSON.parse(modelContent));
|
|
323
|
+
if (modelResult._tag === "Left") {
|
|
324
|
+
console.warn(
|
|
325
|
+
`Failed to decode model.json for slice: ${sliceDir.name}`,
|
|
326
|
+
);
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
const model = modelResult.right;
|
|
330
|
+
|
|
331
|
+
// Fetch all files from the slice directory
|
|
332
|
+
// Wrap in try-catch to prevent failures from blocking other slices
|
|
333
|
+
let sliceFiles: SliceFile[] = [];
|
|
334
|
+
try {
|
|
335
|
+
sliceFiles = await fetchAllFilesFromDirectory({
|
|
336
|
+
api: github,
|
|
337
|
+
branch,
|
|
338
|
+
directoryPath: sliceDir.path,
|
|
339
|
+
});
|
|
340
|
+
console.log(
|
|
341
|
+
`Fetched ${sliceFiles.length} file(s) for slice ${sliceDir.name}:`,
|
|
342
|
+
sliceFiles.map((f) => `${f.path}${f.isBinary ? " (binary)" : ""}`),
|
|
343
|
+
);
|
|
344
|
+
} catch (error) {
|
|
345
|
+
console.warn(
|
|
346
|
+
`Failed to fetch files for slice ${sliceDir.name}:`,
|
|
347
|
+
error instanceof Error ? error.message : String(error),
|
|
348
|
+
);
|
|
349
|
+
// Continue with empty sliceFiles array
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
// Extract component contents and mocks
|
|
353
|
+
let componentContents: string | undefined;
|
|
354
|
+
let mocks: SharedSliceContent[] | undefined;
|
|
355
|
+
|
|
356
|
+
for (const file of sliceFiles) {
|
|
357
|
+
if (
|
|
358
|
+
componentContents === undefined &&
|
|
359
|
+
file.path.match(/^index\.(tsx?|jsx?|vue|svelte)$/)
|
|
360
|
+
) {
|
|
361
|
+
if (typeof file.contents === "string") {
|
|
362
|
+
componentContents = file.contents;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
if (file.path === "mocks.json" && typeof file.contents === "string") {
|
|
367
|
+
try {
|
|
368
|
+
const parsedMocksResult = SharedSliceContent.decode(
|
|
369
|
+
JSON.parse(file.contents),
|
|
370
|
+
);
|
|
371
|
+
if (parsedMocksResult._tag === "Left") {
|
|
372
|
+
console.warn(
|
|
373
|
+
`Failed to decode mocks.json for slice: ${sliceDir.name}`,
|
|
374
|
+
);
|
|
375
|
+
} else {
|
|
376
|
+
const parsedMocks = parsedMocksResult.right;
|
|
377
|
+
if (Array.isArray(parsedMocks) && parsedMocks.length > 0) {
|
|
378
|
+
mocks = parsedMocks;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
} catch {
|
|
382
|
+
console.warn(
|
|
383
|
+
`Failed to decode mocks.json for slice: ${sliceDir.name}`,
|
|
384
|
+
);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
// Fetch screenshots for all variations (usually few); keep unbounded or lightly bounded
|
|
390
|
+
let thumbnailUrl: string | undefined;
|
|
391
|
+
let screenshotFile: File | undefined;
|
|
392
|
+
const screenshots: Record<string, File> = {};
|
|
393
|
+
|
|
394
|
+
if (model.variations !== undefined && model.variations.length > 0) {
|
|
395
|
+
const screenshotResults = await Promise.allSettled(
|
|
396
|
+
model.variations.map(async (variation) => {
|
|
397
|
+
try {
|
|
398
|
+
const screenshotPath = `${sliceDir.path}/screenshot-${variation.id}.png`;
|
|
399
|
+
const screenshotContent = await github.getFileContents(
|
|
400
|
+
screenshotPath,
|
|
401
|
+
branch,
|
|
402
|
+
true,
|
|
403
|
+
);
|
|
404
|
+
|
|
405
|
+
if (screenshotContent instanceof ArrayBuffer) {
|
|
406
|
+
const blob = new Blob([screenshotContent], {
|
|
407
|
+
type: "image/png",
|
|
408
|
+
});
|
|
409
|
+
const file = new File(
|
|
410
|
+
[blob],
|
|
411
|
+
`screenshot-${variation.id}.png`,
|
|
412
|
+
{
|
|
413
|
+
type: "image/png",
|
|
414
|
+
},
|
|
415
|
+
);
|
|
416
|
+
screenshots[variation.id] = file;
|
|
417
|
+
|
|
418
|
+
if (
|
|
419
|
+
thumbnailUrl === undefined &&
|
|
420
|
+
model.variations[0] !== undefined &&
|
|
421
|
+
variation.id === model.variations[0].id
|
|
422
|
+
) {
|
|
423
|
+
thumbnailUrl = URL.createObjectURL(blob);
|
|
424
|
+
screenshotFile = file;
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
} catch (error) {
|
|
428
|
+
// Screenshot might not exist for this variation, that's okay
|
|
429
|
+
console.warn(
|
|
430
|
+
`Failed to fetch screenshot for variation ${variation.id}:`,
|
|
431
|
+
error instanceof Error ? error.message : String(error),
|
|
432
|
+
);
|
|
433
|
+
}
|
|
434
|
+
}),
|
|
435
|
+
);
|
|
436
|
+
|
|
437
|
+
screenshotResults.forEach((result, index) => {
|
|
438
|
+
if (result.status === "rejected") {
|
|
439
|
+
console.warn(
|
|
440
|
+
`Failed to fetch screenshot for variation ${model.variations[index]?.id}:`,
|
|
441
|
+
result.reason instanceof Error
|
|
442
|
+
? result.reason.message
|
|
443
|
+
: String(result.reason),
|
|
444
|
+
);
|
|
445
|
+
}
|
|
446
|
+
});
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
const backupImageFile =
|
|
450
|
+
screenshotFile ??
|
|
451
|
+
new File([], `${model.name}.json`, {
|
|
452
|
+
type: "application/json",
|
|
453
|
+
});
|
|
454
|
+
|
|
455
|
+
const sliceData: SliceImport = {
|
|
456
|
+
image: backupImageFile,
|
|
457
|
+
thumbnailUrl: thumbnailUrl ?? URL.createObjectURL(backupImageFile),
|
|
458
|
+
model,
|
|
459
|
+
files: sliceFiles,
|
|
460
|
+
componentContents,
|
|
461
|
+
mocks,
|
|
462
|
+
screenshots:
|
|
463
|
+
Object.keys(screenshots).length > 0 ? screenshots : undefined,
|
|
464
|
+
};
|
|
465
|
+
fetchedSlices.push(sliceData);
|
|
466
|
+
} catch (error) {
|
|
467
|
+
console.warn(
|
|
468
|
+
`Error fetching slice ${sliceDir.name}:`,
|
|
469
|
+
error instanceof Error ? error.message : String(error),
|
|
470
|
+
);
|
|
471
|
+
}
|
|
472
|
+
};
|
|
473
|
+
|
|
474
|
+
// Process slice directories with a concurrency cap to avoid API throttling
|
|
475
|
+
await mapWithConcurrency(sliceDirectories, 6, perSlice);
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
return fetchedSlices;
|
|
479
|
+
};
|
|
480
|
+
|
|
481
|
+
/**
|
|
482
|
+
* Recursively fetches all files from a GitHub directory
|
|
483
|
+
*/
|
|
484
|
+
const fetchAllFilesFromDirectory = async (args: {
|
|
485
|
+
api: GitHubRepositoryAPI;
|
|
486
|
+
branch: string;
|
|
487
|
+
directoryPath: string;
|
|
488
|
+
}): Promise<SliceFile[]> => {
|
|
489
|
+
const { api, branch, directoryPath } = args;
|
|
490
|
+
const files: SliceFile[] = [];
|
|
491
|
+
|
|
492
|
+
// Try GitHub API first
|
|
493
|
+
let apiWorked = false;
|
|
494
|
+
try {
|
|
495
|
+
const contents = await api.getDirectoryContents(directoryPath);
|
|
496
|
+
apiWorked = true;
|
|
497
|
+
|
|
498
|
+
const fileItems = contents.filter((i) => i.type === "file");
|
|
499
|
+
const dirItems = contents.filter((i) => i.type === "dir");
|
|
500
|
+
|
|
501
|
+
// Process files with bounded concurrency
|
|
502
|
+
const fileResults = await mapWithConcurrency(fileItems, 8, async (item) => {
|
|
503
|
+
if (item.name === "model.json") return null;
|
|
504
|
+
try {
|
|
505
|
+
const binaryExtensions = [
|
|
506
|
+
".png",
|
|
507
|
+
".jpg",
|
|
508
|
+
".jpeg",
|
|
509
|
+
".gif",
|
|
510
|
+
".svg",
|
|
511
|
+
".ico",
|
|
512
|
+
".webp",
|
|
513
|
+
];
|
|
514
|
+
const isBinaryFile = binaryExtensions.some((ext) =>
|
|
515
|
+
item.name.toLowerCase().endsWith(ext),
|
|
516
|
+
);
|
|
517
|
+
|
|
518
|
+
const fileContents = await api.getFileContents(
|
|
519
|
+
item.path,
|
|
520
|
+
branch,
|
|
521
|
+
isBinaryFile,
|
|
522
|
+
);
|
|
523
|
+
|
|
524
|
+
return {
|
|
525
|
+
path: item.name,
|
|
526
|
+
contents: fileContents,
|
|
527
|
+
isBinary: isBinaryFile,
|
|
528
|
+
} as SliceFile;
|
|
529
|
+
} catch (error) {
|
|
530
|
+
console.warn(
|
|
531
|
+
`Failed to fetch file ${item.path}:`,
|
|
532
|
+
error instanceof Error ? error.message : String(error),
|
|
533
|
+
);
|
|
534
|
+
return null;
|
|
535
|
+
}
|
|
536
|
+
});
|
|
537
|
+
for (const r of fileResults) {
|
|
538
|
+
if (r) files.push(r);
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
// Recursively process directories sequentially (counts are usually low)
|
|
542
|
+
for (const item of dirItems) {
|
|
543
|
+
const subFiles = await fetchAllFilesFromDirectory({
|
|
544
|
+
api,
|
|
545
|
+
branch,
|
|
546
|
+
directoryPath: item.path,
|
|
547
|
+
});
|
|
548
|
+
for (const subFile of subFiles) {
|
|
549
|
+
files.push({
|
|
550
|
+
...subFile,
|
|
551
|
+
path: `${item.name}/${subFile.path}`,
|
|
552
|
+
});
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
} catch (error) {
|
|
556
|
+
console.warn(
|
|
557
|
+
`GitHub API failed for directory ${directoryPath}, trying Search API...`,
|
|
558
|
+
error instanceof Error ? error.message : String(error),
|
|
559
|
+
);
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
// If API failed, use GitHub Search API to find all files recursively
|
|
563
|
+
if (!apiWorked) {
|
|
564
|
+
try {
|
|
565
|
+
console.log(
|
|
566
|
+
`Using GitHub Search API to find all files in ${directoryPath}...`,
|
|
567
|
+
);
|
|
568
|
+
|
|
569
|
+
// Use GitHub Search API to find all files in this directory (recursively)
|
|
570
|
+
const searchData = await api.searchCode({ path: directoryPath });
|
|
571
|
+
|
|
572
|
+
if (searchData.items && searchData.items.length > 0) {
|
|
573
|
+
console.log(
|
|
574
|
+
`Found ${searchData.items.length} file(s) via Search API for ${directoryPath}`,
|
|
575
|
+
);
|
|
576
|
+
|
|
577
|
+
// Fetch all discovered files
|
|
578
|
+
// Note: Search API returns up to 100 results per page, but we should get all files
|
|
579
|
+
const fetched = await mapWithConcurrency(
|
|
580
|
+
searchData.items,
|
|
581
|
+
8,
|
|
582
|
+
async (item) => {
|
|
583
|
+
try {
|
|
584
|
+
if (item.name === "model.json") return null;
|
|
585
|
+
const relativePath = item.path.startsWith(directoryPath + "/")
|
|
586
|
+
? item.path.slice(directoryPath.length + 1)
|
|
587
|
+
: item.name;
|
|
588
|
+
|
|
589
|
+
const binaryExtensions = [
|
|
590
|
+
".png",
|
|
591
|
+
".jpg",
|
|
592
|
+
".jpeg",
|
|
593
|
+
".gif",
|
|
594
|
+
".svg",
|
|
595
|
+
".ico",
|
|
596
|
+
".webp",
|
|
597
|
+
".woff",
|
|
598
|
+
".woff2",
|
|
599
|
+
".ttf",
|
|
600
|
+
".eot",
|
|
601
|
+
".otf",
|
|
602
|
+
".pdf",
|
|
603
|
+
".zip",
|
|
604
|
+
".gz",
|
|
605
|
+
];
|
|
606
|
+
const isBinaryFile = binaryExtensions.some((ext) =>
|
|
607
|
+
item.name.toLowerCase().endsWith(ext),
|
|
608
|
+
);
|
|
609
|
+
|
|
610
|
+
const fileContents = await api.getFileContents(
|
|
611
|
+
item.path,
|
|
612
|
+
branch,
|
|
613
|
+
isBinaryFile,
|
|
614
|
+
);
|
|
615
|
+
|
|
616
|
+
return {
|
|
617
|
+
path: relativePath,
|
|
618
|
+
contents: fileContents,
|
|
619
|
+
isBinary: isBinaryFile,
|
|
620
|
+
} as SliceFile | null;
|
|
621
|
+
} catch (error) {
|
|
622
|
+
console.warn(
|
|
623
|
+
`Error fetching file ${item.path}:`,
|
|
624
|
+
error instanceof Error ? error.message : String(error),
|
|
625
|
+
);
|
|
626
|
+
return null;
|
|
627
|
+
}
|
|
628
|
+
},
|
|
629
|
+
);
|
|
630
|
+
for (const item of fetched) {
|
|
631
|
+
if (item) files.push(item);
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
console.log(
|
|
635
|
+
`Fetched ${files.length} file(s) from ${directoryPath} via Search API`,
|
|
636
|
+
);
|
|
637
|
+
} else {
|
|
638
|
+
console.warn(`GitHub Search API found no files in ${directoryPath}`);
|
|
639
|
+
}
|
|
640
|
+
} catch (error) {
|
|
641
|
+
const errorMessage =
|
|
642
|
+
error instanceof Error ? error.message : String(error);
|
|
643
|
+
if (errorMessage.includes("403")) {
|
|
644
|
+
console.warn(
|
|
645
|
+
`GitHub Search API returned 403 for ${directoryPath}. Cannot fetch files without API access.`,
|
|
646
|
+
);
|
|
647
|
+
} else {
|
|
648
|
+
console.warn(
|
|
649
|
+
`Error using GitHub Search API for ${directoryPath}:`,
|
|
650
|
+
errorMessage,
|
|
651
|
+
);
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
return files;
|
|
657
|
+
};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Concurrency helper to control parallel network/IO without external deps
|
|
3
|
+
* @param items - The items to map over
|
|
4
|
+
* @param limit - The maximum number of concurrent operations
|
|
5
|
+
* @param mapper - The function to map over the items
|
|
6
|
+
* @returns The results of the mapped items
|
|
7
|
+
*/
|
|
8
|
+
export async function mapWithConcurrency<T, R>(
|
|
9
|
+
items: readonly T[],
|
|
10
|
+
limit: number,
|
|
11
|
+
mapper: (item: T, index: number) => Promise<R>,
|
|
12
|
+
): Promise<R[]> {
|
|
13
|
+
const results = new Array<R>(items.length);
|
|
14
|
+
let nextIndex = 0;
|
|
15
|
+
|
|
16
|
+
const workers = new Array(Math.min(limit, items.length))
|
|
17
|
+
.fill(null)
|
|
18
|
+
.map(async () => {
|
|
19
|
+
while (true) {
|
|
20
|
+
const currentIndex = nextIndex++;
|
|
21
|
+
if (currentIndex >= items.length) break;
|
|
22
|
+
results[currentIndex] = await mapper(items[currentIndex], currentIndex);
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
await Promise.all(workers);
|
|
27
|
+
return results;
|
|
28
|
+
}
|