@expofp/loader 1.0.65 → 1.0.78
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle/bundle.js +557 -431
- package/dist/bundle/bundle.js.map +1 -1
- package/dist/bundle/cssTextAssetResolver.offlineFunc-CWvHnYni.js +19 -0
- package/dist/bundle/cssTextAssetResolver.offlineFunc-CWvHnYni.js.map +1 -0
- package/dist/bundle/{downloadOfflineZip-B8tTyZYe.js → downloadOfflineZip-CNz_lUGZ.js} +701 -687
- package/dist/bundle/downloadOfflineZip-CNz_lUGZ.js.map +1 -0
- package/dist/bundle/legacyDataUrlBaseResolver.offlineFunc-DPaSp_zV.js +87 -0
- package/dist/bundle/legacyDataUrlBaseResolver.offlineFunc-DPaSp_zV.js.map +1 -0
- package/dist/bundle/makeOffline-Dj-0o5_7.js +76 -0
- package/dist/bundle/makeOffline-Dj-0o5_7.js.map +1 -0
- package/dist/bundle/makeOfflineBundle-D8tePWGI.js +70 -0
- package/dist/bundle/makeOfflineBundle-D8tePWGI.js.map +1 -0
- package/dist/bundle/tools-D0u8lBvQ.js +102 -0
- package/dist/bundle/tools-D0u8lBvQ.js.map +1 -0
- package/dist/esm/_OLD_fetchWithRetry.d.ts +1 -0
- package/dist/esm/_OLD_fetchWithRetry.js +101 -0
- package/dist/esm/index.d.ts +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/{loadAndWaitGlobal.d.ts → loadScript.d.ts} +2 -1
- package/dist/esm/{loadAndWaitGlobal.js → loadScript.js} +16 -6
- package/dist/esm/offline/generateZip.js +22 -2
- package/dist/esm/offline/index.d.ts +1 -0
- package/dist/esm/offline/index.js +4 -0
- package/dist/esm/offline/makeOffline.js +11 -3
- package/dist/esm/offline/makeOfflineBundle.js +26 -9
- package/dist/esm/offline/tools.d.ts +1 -0
- package/dist/esm/offline/tools.js +47 -31
- package/dist/esm/resolvers/assetResolver.d.ts +1 -1
- package/dist/esm/resolvers/assetResolver.js +1 -1
- package/dist/esm/resolvers/bundleAssetsResolver.js +1 -1
- package/dist/esm/resolvers/cssTextAssetResolver.d.ts +8 -0
- package/dist/esm/resolvers/cssTextAssetResolver.js +15 -0
- package/dist/esm/resolvers/cssTextAssetResolver.offlineFunc.d.ts +2 -0
- package/dist/esm/resolvers/cssTextAssetResolver.offlineFunc.js +22 -0
- package/dist/esm/resolvers/expoRuntimeBranchResolver.js +1 -1
- package/dist/esm/resolvers/expoRuntimeGetBranchResolver.js +1 -1
- package/dist/esm/resolvers/expoRuntimeResolver.js +1 -1
- package/dist/esm/resolvers/httpResolver.d.ts +1 -1
- package/dist/esm/resolvers/httpResolver.js +1 -1
- package/dist/esm/resolvers/index.js +4 -0
- package/dist/esm/resolvers/legacyAssetUrlsResolver.d.ts +4 -1
- package/dist/esm/resolvers/legacyAssetUrlsResolver.js +5 -2
- package/dist/esm/resolvers/legacyDataResolver.d.ts +4 -1
- package/dist/esm/resolvers/legacyDataResolver.js +4 -1
- package/dist/esm/resolvers/legacyDataUrlBaseResolver.d.ts +8 -0
- package/dist/esm/resolvers/legacyDataUrlBaseResolver.js +15 -0
- package/dist/esm/resolvers/legacyDataUrlBaseResolver.offlineFunc.d.ts +2 -0
- package/dist/esm/resolvers/legacyDataUrlBaseResolver.offlineFunc.js +129 -0
- package/dist/esm/shared.d.ts +1 -0
- package/dist/esm/shared.js +10 -2
- package/dist/esm/types.d.ts +7 -2
- package/package.json +4 -2
- package/dist/bundle/downloadOfflineZip-B8tTyZYe.js.map +0 -1
- package/dist/bundle/makeOffline-DH6wJEem.js +0 -158
- package/dist/bundle/makeOffline-DH6wJEem.js.map +0 -1
- package/dist/bundle/makeOfflineBundle-C-xleVMN.js +0 -58
- package/dist/bundle/makeOfflineBundle-C-xleVMN.js.map +0 -1
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { i as U, p as $, j as f, k as m } from "./bundle.js";
|
|
2
|
+
import { a as F, r as _ } from "./tools-D0u8lBvQ.js";
|
|
3
|
+
const D = async function* (a, i) {
|
|
4
|
+
U("legacyDataResolver offlineFunc:", a);
|
|
5
|
+
const n = i.signal, o = $(a), l = `${o}version.js`;
|
|
6
|
+
try {
|
|
7
|
+
await f(l, n);
|
|
8
|
+
} catch {
|
|
9
|
+
console.warn(`Could not load version.js at ${l}, proceeding without it`);
|
|
10
|
+
}
|
|
11
|
+
const e = globalThis.__fpDataVersion || (+/* @__PURE__ */ new Date()).toString(), t = F(o) + e + "/", r = `${o}data.js?v=${e}`;
|
|
12
|
+
await f(r, n);
|
|
13
|
+
const s = p("__data");
|
|
14
|
+
yield* v(s, o, t), yield { text: `var __data = ${JSON.stringify(s, null, 2)};`, targetFilePath: `${t}data.js` }, yield { url: `${o}wf.data.js?v=${e}`, targetFilePath: `${t}wf.data.js` };
|
|
15
|
+
const u = `${o}fp.svg.js?v=${e}`;
|
|
16
|
+
yield { url: u, targetFilePath: `${t}fp.svg.js` }, await f(u, n);
|
|
17
|
+
const y = (p("__fpLayers") || []).map((c) => `fp.svg.${c.name}.js`);
|
|
18
|
+
for (const c of y)
|
|
19
|
+
yield { url: `${o}${c}?v=${e}`, targetFilePath: `${t}${c}` };
|
|
20
|
+
return `${m}:${t}`;
|
|
21
|
+
};
|
|
22
|
+
function* v(a, i, n) {
|
|
23
|
+
if (a.logo) {
|
|
24
|
+
const l = o(a.logo);
|
|
25
|
+
yield l.file, a.logo = l.localUrl;
|
|
26
|
+
}
|
|
27
|
+
for (const l of a.exhibitors || []) {
|
|
28
|
+
if (l.logo) {
|
|
29
|
+
const t = o(l.logo);
|
|
30
|
+
yield t.file;
|
|
31
|
+
const r = ["__small", "__tiny"];
|
|
32
|
+
for (const s of r) {
|
|
33
|
+
const g = h(t.file.url, s), d = h(t.file.targetFilePath, s);
|
|
34
|
+
yield { url: g, targetFilePath: d };
|
|
35
|
+
}
|
|
36
|
+
l.logo = t.localUrl;
|
|
37
|
+
}
|
|
38
|
+
if (l.leadingImageUrl) {
|
|
39
|
+
const t = o(l.leadingImageUrl);
|
|
40
|
+
yield t.file, l.leadingImageUrl = t.localUrl;
|
|
41
|
+
}
|
|
42
|
+
const e = l.gallery || [];
|
|
43
|
+
for (let t = 0; t < e.length; t++) {
|
|
44
|
+
const r = e[t], s = o(r);
|
|
45
|
+
yield s.file, e[t] = s.localUrl;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
for (const l of a.events || []) {
|
|
49
|
+
if (l.logoFile) {
|
|
50
|
+
const e = o(l.logoFile);
|
|
51
|
+
yield e.file, l.logoFile = e.localUrl;
|
|
52
|
+
}
|
|
53
|
+
for (const e of l.speakers || [])
|
|
54
|
+
if (e.photoFile) {
|
|
55
|
+
const t = o(e.photoFile);
|
|
56
|
+
yield t.file, e.photoFile = t.localUrl;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
function o(l) {
|
|
60
|
+
const e = i + l, t = _(l), r = n + t;
|
|
61
|
+
return {
|
|
62
|
+
file: { url: e, targetFilePath: r },
|
|
63
|
+
localUrl: t
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
function p(a) {
|
|
68
|
+
const i = globalThis[a];
|
|
69
|
+
return delete globalThis[a], i;
|
|
70
|
+
}
|
|
71
|
+
function h(a, i) {
|
|
72
|
+
let n, o;
|
|
73
|
+
try {
|
|
74
|
+
n = new URL(a), o = !0;
|
|
75
|
+
} catch {
|
|
76
|
+
n = new URL(a, "http://example.com"), o = !1;
|
|
77
|
+
}
|
|
78
|
+
const l = n.pathname, e = l.lastIndexOf(".");
|
|
79
|
+
if (e === -1)
|
|
80
|
+
return a;
|
|
81
|
+
const t = l.substring(0, e), r = l.substring(e);
|
|
82
|
+
return n.pathname = t + i + r, o ? n.toString() : n.pathname.slice(1) + n.search;
|
|
83
|
+
}
|
|
84
|
+
export {
|
|
85
|
+
D as offlineFunc
|
|
86
|
+
};
|
|
87
|
+
//# sourceMappingURL=legacyDataUrlBaseResolver.offlineFunc-DPaSp_zV.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"legacyDataUrlBaseResolver.offlineFunc-DPaSp_zV.js","sources":["../../src/resolvers/legacyDataUrlBaseResolver.offlineFunc.ts"],"sourcesContent":["import { loadScript } from '../loadScript';\nimport { makeTargetPathFromUrl, relativeUrlToFilePath } from '../offline/tools';\nimport { parseRefValue } from '../resolve';\nimport { LocalFetchFile, OfflineFunc } from '../types';\nimport { log, SCHEMA } from './legacyDataUrlBaseResolver';\n\nexport const offlineFunc: OfflineFunc = async function* (ref: string, context: any) {\n log('legacyDataResolver offlineFunc:', ref);\n const signal = context.signal;\n const dataUrlBase = parseRefValue(ref);\n const versionUrl = `${dataUrlBase}version.js`;\n try {\n await loadScript(versionUrl, signal);\n } catch {\n console.warn(`Could not load version.js at ${versionUrl}, proceeding without it`);\n }\n\n const version = (globalThis as any).__fpDataVersion || (+new Date()).toString();\n const localDataUrlBase = makeTargetPathFromUrl(dataUrlBase) + version + '/';\n\n const dataUrl = `${dataUrlBase}data.js?v=${version}`;\n\n // data.js\n await loadScript(dataUrl, signal);\n const __data = getAndDeleteGlobal('__data');\n yield* localizeDataJson(__data, dataUrlBase, localDataUrlBase);\n const text = `var __data = ${JSON.stringify(__data, null, 2)};`;\n yield { text, targetFilePath: `${localDataUrlBase}data.js` };\n\n // wf.data.js\n const wfDataUrl = `${dataUrlBase}wf.data.js?v=${version}`;\n yield { url: wfDataUrl, targetFilePath: `${localDataUrlBase}wf.data.js` };\n\n // fp.svg.js\n const fpSvgUrl = `${dataUrlBase}fp.svg.js?v=${version}`;\n yield { url: fpSvgUrl, targetFilePath: `${localDataUrlBase}fp.svg.js` };\n\n // load fp.svg.js to get __fpLayers\n await loadScript(fpSvgUrl, signal);\n const __fpLayers: { name: string }[] = getAndDeleteGlobal('__fpLayers') || [];\n const fpLayerFiles = __fpLayers.map((x) => `fp.svg.${x.name}.js`);\n for (const layerFile of fpLayerFiles) {\n // fp.svg.{layer}.js\n const layerUrl = `${dataUrlBase}${layerFile}?v=${version}`;\n yield { url: layerUrl, targetFilePath: `${localDataUrlBase}${layerFile}` };\n }\n\n return `${SCHEMA}:${localDataUrlBase}`;\n};\n\nfunction* localizeDataJson(\n data: any,\n dataUrlBase: string,\n localDataUrlBase: string\n): Generator<LocalFetchFile> {\n if (data.logo) {\n const r = z(data.logo);\n yield r.file;\n data.logo = r.localUrl;\n }\n for (const exhibitor of data.exhibitors || []) {\n if (exhibitor.logo) {\n const r = z(exhibitor.logo);\n yield r.file;\n\n const variants = ['__small', '__tiny'];\n for (const variant of variants) {\n const url = addSuffix(r.file.url, variant);\n const targetFilePath = addSuffix(r.file.targetFilePath, variant);\n yield { url, targetFilePath };\n }\n\n exhibitor.logo = r.localUrl;\n }\n if (exhibitor.leadingImageUrl) {\n const r = z(exhibitor.leadingImageUrl);\n yield r.file;\n exhibitor.leadingImageUrl = r.localUrl;\n }\n const gallery = exhibitor.gallery || [];\n for (let i = 0; i < gallery.length; i++) {\n const url = gallery[i];\n const r = z(url);\n yield r.file;\n gallery[i] = r.localUrl;\n }\n }\n\n for (const event of data.events || []) {\n if (event.logoFile) {\n const r = z(event.logoFile);\n yield r.file;\n event.logoFile = r.localUrl;\n }\n for (const speaker of event.speakers || []) {\n if (speaker.photoFile) {\n const r = z(speaker.photoFile);\n yield r.file;\n speaker.photoFile = r.localUrl;\n }\n }\n }\n\n function z(relativeUrl: string) {\n const url = dataUrlBase + relativeUrl;\n const localUrl = relativeUrlToFilePath(relativeUrl);\n const targetFilePath = localDataUrlBase + localUrl;\n return {\n file: { url, targetFilePath },\n localUrl,\n };\n }\n}\n\nfunction getAndDeleteGlobal(globalVar: string): any {\n const value = (globalThis as any)[globalVar];\n delete (globalThis as any)[globalVar];\n return value;\n}\n\n// abs and relative paths\nfunction addSuffix(absoluteOrRelativeUrl: string, suffix: string): string {\n let urlObj: URL;\n let absolute: boolean;\n try {\n urlObj = new URL(absoluteOrRelativeUrl);\n absolute = true;\n } catch {\n urlObj = new URL(absoluteOrRelativeUrl, 'http://example.com');\n absolute = false;\n }\n\n const pathname = urlObj.pathname;\n const dotIndex = pathname.lastIndexOf('.');\n if (dotIndex === -1) {\n return absoluteOrRelativeUrl; // no extension found\n }\n const name = pathname.substring(0, dotIndex);\n const extension = pathname.substring(dotIndex);\n urlObj.pathname = name + suffix + extension;\n if (absolute) {\n // return full absolute URL\n return urlObj.toString();\n } else {\n return urlObj.pathname.slice(1) + urlObj.search;\n }\n}\n"],"names":["offlineFunc","ref","context","log","signal","dataUrlBase","parseRefValue","versionUrl","loadScript","version","localDataUrlBase","makeTargetPathFromUrl","dataUrl","__data","getAndDeleteGlobal","localizeDataJson","fpSvgUrl","fpLayerFiles","x","layerFile","SCHEMA","data","r","z","exhibitor","variants","variant","url","addSuffix","targetFilePath","gallery","i","event","speaker","relativeUrl","localUrl","relativeUrlToFilePath","globalVar","value","absoluteOrRelativeUrl","suffix","urlObj","absolute","pathname","dotIndex","name","extension"],"mappings":";;AAMO,MAAMA,IAA2B,iBAAiBC,GAAaC,GAAc;AAClF,EAAAC,EAAI,mCAAmCF,CAAG;AAC1C,QAAMG,IAASF,EAAQ,QACjBG,IAAcC,EAAcL,CAAG,GAC/BM,IAAa,GAAGF,CAAW;AACjC,MAAI;AACF,UAAMG,EAAWD,GAAYH,CAAM;AAAA,EACrC,QAAQ;AACN,YAAQ,KAAK,gCAAgCG,CAAU,yBAAyB;AAAA,EAClF;AAEA,QAAME,IAAW,WAAmB,oBAAoB,CAAC,oBAAI,KAAA,GAAQ,SAAA,GAC/DC,IAAmBC,EAAsBN,CAAW,IAAII,IAAU,KAElEG,IAAU,GAAGP,CAAW,aAAaI,CAAO;AAGlD,QAAMD,EAAWI,GAASR,CAAM;AAChC,QAAMS,IAASC,EAAmB,QAAQ;AAC1C,SAAOC,EAAiBF,GAAQR,GAAaK,CAAgB,GAE7D,MAAM,EAAE,MADK,gBAAgB,KAAK,UAAUG,GAAQ,MAAM,CAAC,CAAC,KAC9C,gBAAgB,GAAGH,CAAgB,UAAA,GAIjD,MAAM,EAAE,KADU,GAAGL,CAAW,gBAAgBI,CAAO,IAC/B,gBAAgB,GAAGC,CAAgB,aAAA;AAG3D,QAAMM,IAAW,GAAGX,CAAW,eAAeI,CAAO;AACrD,QAAM,EAAE,KAAKO,GAAU,gBAAgB,GAAGN,CAAgB,YAAA,GAG1D,MAAMF,EAAWQ,GAAUZ,CAAM;AAEjC,QAAMa,KADiCH,EAAmB,YAAY,KAAK,CAAA,GAC3C,IAAI,CAACI,MAAM,UAAUA,EAAE,IAAI,KAAK;AAChE,aAAWC,KAAaF;AAGtB,UAAM,EAAE,KADS,GAAGZ,CAAW,GAAGc,CAAS,MAAMV,CAAO,IACjC,gBAAgB,GAAGC,CAAgB,GAAGS,CAAS,GAAA;AAGxE,SAAO,GAAGC,CAAM,IAAIV,CAAgB;AACtC;AAEA,UAAUK,EACRM,GACAhB,GACAK,GAC2B;AAC3B,MAAIW,EAAK,MAAM;AACb,UAAMC,IAAIC,EAAEF,EAAK,IAAI;AACrB,UAAMC,EAAE,MACRD,EAAK,OAAOC,EAAE;AAAA,EAChB;AACA,aAAWE,KAAaH,EAAK,cAAc,CAAA,GAAI;AAC7C,QAAIG,EAAU,MAAM;AAClB,YAAMF,IAAIC,EAAEC,EAAU,IAAI;AAC1B,YAAMF,EAAE;AAER,YAAMG,IAAW,CAAC,WAAW,QAAQ;AACrC,iBAAWC,KAAWD,GAAU;AAC9B,cAAME,IAAMC,EAAUN,EAAE,KAAK,KAAKI,CAAO,GACnCG,IAAiBD,EAAUN,EAAE,KAAK,gBAAgBI,CAAO;AAC/D,cAAM,EAAE,KAAAC,GAAK,gBAAAE,EAAA;AAAA,MACf;AAEA,MAAAL,EAAU,OAAOF,EAAE;AAAA,IACrB;AACA,QAAIE,EAAU,iBAAiB;AAC7B,YAAMF,IAAIC,EAAEC,EAAU,eAAe;AACrC,YAAMF,EAAE,MACRE,EAAU,kBAAkBF,EAAE;AAAA,IAChC;AACA,UAAMQ,IAAUN,EAAU,WAAW,CAAA;AACrC,aAASO,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMJ,IAAMG,EAAQC,CAAC,GACfT,IAAIC,EAAEI,CAAG;AACf,YAAML,EAAE,MACRQ,EAAQC,CAAC,IAAIT,EAAE;AAAA,IACjB;AAAA,EACF;AAEA,aAAWU,KAASX,EAAK,UAAU,CAAA,GAAI;AACrC,QAAIW,EAAM,UAAU;AAClB,YAAMV,IAAIC,EAAES,EAAM,QAAQ;AAC1B,YAAMV,EAAE,MACRU,EAAM,WAAWV,EAAE;AAAA,IACrB;AACA,eAAWW,KAAWD,EAAM,YAAY,CAAA;AACtC,UAAIC,EAAQ,WAAW;AACrB,cAAMX,IAAIC,EAAEU,EAAQ,SAAS;AAC7B,cAAMX,EAAE,MACRW,EAAQ,YAAYX,EAAE;AAAA,MACxB;AAAA,EAEJ;AAEA,WAASC,EAAEW,GAAqB;AAC9B,UAAMP,IAAMtB,IAAc6B,GACpBC,IAAWC,EAAsBF,CAAW,GAC5CL,IAAiBnB,IAAmByB;AAC1C,WAAO;AAAA,MACL,MAAM,EAAE,KAAAR,GAAK,gBAAAE,EAAA;AAAA,MACb,UAAAM;AAAA,IAAA;AAAA,EAEJ;AACF;AAEA,SAASrB,EAAmBuB,GAAwB;AAClD,QAAMC,IAAS,WAAmBD,CAAS;AAC3C,gBAAQ,WAAmBA,CAAS,GAC7BC;AACT;AAGA,SAASV,EAAUW,GAA+BC,GAAwB;AACxE,MAAIC,GACAC;AACJ,MAAI;AACF,IAAAD,IAAS,IAAI,IAAIF,CAAqB,GACtCG,IAAW;AAAA,EACb,QAAQ;AACN,IAAAD,IAAS,IAAI,IAAIF,GAAuB,oBAAoB,GAC5DG,IAAW;AAAA,EACb;AAEA,QAAMC,IAAWF,EAAO,UAClBG,IAAWD,EAAS,YAAY,GAAG;AACzC,MAAIC,MAAa;AACf,WAAOL;AAET,QAAMM,IAAOF,EAAS,UAAU,GAAGC,CAAQ,GACrCE,IAAYH,EAAS,UAAUC,CAAQ;AAE7C,SADAH,EAAO,WAAWI,IAAOL,IAASM,GAC9BJ,IAEKD,EAAO,SAAA,IAEPA,EAAO,SAAS,MAAM,CAAC,IAAIA,EAAO;AAE7C;"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { d as m, r as b, b as O, e as R, s as F, f as g, h, p as d } from "./bundle.js";
|
|
2
|
+
import { m as j, a as P } from "./tools-D0u8lBvQ.js";
|
|
3
|
+
const w = m("efp:loader:makeOffline"), u = m("efp:loader:makeOffline:walk");
|
|
4
|
+
async function U(n) {
|
|
5
|
+
w("makeOffline", n);
|
|
6
|
+
const t = await c(n);
|
|
7
|
+
let f = !1, a;
|
|
8
|
+
async function* e() {
|
|
9
|
+
a = yield* t, f = !0;
|
|
10
|
+
}
|
|
11
|
+
return {
|
|
12
|
+
get manifest() {
|
|
13
|
+
if (!f) throw new Error("Iterate over files before getting manifest");
|
|
14
|
+
return a;
|
|
15
|
+
},
|
|
16
|
+
files: e()
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
async function* c(n) {
|
|
20
|
+
w("makeOfflineInternal", n);
|
|
21
|
+
const t = {
|
|
22
|
+
refCache: /* @__PURE__ */ new Map(),
|
|
23
|
+
forceFetch: !0,
|
|
24
|
+
signal: null
|
|
25
|
+
}, f = { manifest: n };
|
|
26
|
+
return yield* a(f, "manifest"), f.manifest;
|
|
27
|
+
async function* a(e, r) {
|
|
28
|
+
if (u(r, e[r]), !(typeof e[r] != "object" || e[r] === null)) {
|
|
29
|
+
if ("$ref" in e[r]) {
|
|
30
|
+
u("Found $ref:", e[r].$ref);
|
|
31
|
+
const l = b.filter((i) => O(i, e[r].$ref));
|
|
32
|
+
if (l.length === 0)
|
|
33
|
+
throw new Error(`No resolver found for ref: ${e[r].$ref}`);
|
|
34
|
+
if (l.length > 1)
|
|
35
|
+
throw new Error(`Multiple resolvers can make offline ref: ${e[r].$ref}`);
|
|
36
|
+
const o = l[0];
|
|
37
|
+
let s;
|
|
38
|
+
switch (o.offlineFunc) {
|
|
39
|
+
case "localizeRef":
|
|
40
|
+
s = x;
|
|
41
|
+
break;
|
|
42
|
+
case "resolveRef":
|
|
43
|
+
s = z;
|
|
44
|
+
break;
|
|
45
|
+
default:
|
|
46
|
+
const i = o.offlineFunc;
|
|
47
|
+
s = (C, v, $) => i(v, $);
|
|
48
|
+
}
|
|
49
|
+
const p = yield* s(o, e[r].$ref, t);
|
|
50
|
+
if (Object.isFrozen(e)) throw new Error("Unexpected frozen node during makeOffline");
|
|
51
|
+
e[r] = R({ $ref: p }, e[r]);
|
|
52
|
+
} else
|
|
53
|
+
e[r] = F(e[r]);
|
|
54
|
+
if (Array.isArray(e[r]))
|
|
55
|
+
for (const [l] of e[r].entries())
|
|
56
|
+
yield* a(e[r], l);
|
|
57
|
+
else if (typeof e[r] == "object" && e[r] !== null)
|
|
58
|
+
for (const l of Object.keys(e[r]))
|
|
59
|
+
yield* a(e[r], l);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
async function* x(n, t, f) {
|
|
64
|
+
const a = g(await h(n, t, f));
|
|
65
|
+
yield* c(a);
|
|
66
|
+
const e = d(t), r = P(e);
|
|
67
|
+
return yield { url: e, targetFilePath: r }, `${t.substring(0, t.length - e.length - 1)}:${r}`;
|
|
68
|
+
}
|
|
69
|
+
async function* z(n, t, f) {
|
|
70
|
+
const a = g(await h(n, t, f)), e = j(t);
|
|
71
|
+
return yield { data: yield* c(a), targetFilePath: e }, e;
|
|
72
|
+
}
|
|
73
|
+
export {
|
|
74
|
+
U as makeOffline
|
|
75
|
+
};
|
|
76
|
+
//# sourceMappingURL=makeOffline-Dj-0o5_7.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"makeOffline-Dj-0o5_7.js","sources":["../../src/offline/makeOffline.ts"],"sourcesContent":["import debug from 'debug';\nimport {\n canResolve,\n parseRefValue,\n resolverResolve,\n type ResolveContextInternal,\n} from '../resolve';\nimport { resolvers } from '../resolvers';\nimport { createMergedObjectWithOverridenNonRefProps, deepClone, shallowClone } from '../shared';\nimport type { LocalFile, MakeOfflineResult, ManifestData, Resolver } from '../types';\nimport { makeTargetPathFromUrl, makeUniqueJsonTargetPathFromString } from './tools';\n\nconst log = debug('efp:loader:makeOffline');\n\nconst logWalk = debug('efp:loader:makeOffline:walk');\n\nexport async function makeOffline(manifest: any): Promise<MakeOfflineResult> {\n log('makeOffline', manifest);\n // if (typeof manifest !== 'object' || manifest === null) {\n // throw new Error('Manifest must be an object');\n // }\n\n const res = await makeOfflineInternal(manifest);\n let done = false;\n let resultManifest: any;\n async function* files() {\n resultManifest = yield* res;\n done = true;\n }\n\n return {\n get manifest(): ManifestData {\n if (!done) throw new Error('Iterate over files before getting manifest');\n return resultManifest;\n },\n files: files() as AsyncGenerator<LocalFile, void, void>,\n };\n}\n\nasync function* makeOfflineInternal(manifest: any): AsyncGenerator<LocalFile, ManifestData, void> {\n log('makeOfflineInternal', manifest);\n const resolveContext: ResolveContextInternal = {\n refCache: new Map<string, Promise<any>>(),\n forceFetch: true,\n signal: null,\n };\n const parent = { manifest };\n yield* walk(parent, 'manifest');\n return parent.manifest;\n\n async function* walk(node: any, key: string | number): AsyncGenerator<LocalFile> {\n // console.log('walk', node, key, node[key]);\n logWalk(key, node[key]);\n // let node[key] = node[key];\n if (typeof node[key] !== 'object' || node[key] === null) {\n return;\n }\n // let resolversToUse1 = resolvers.filter((r) => r.canResolve(node[key].$ref));\n // const usedResolvers = new Set<Resolver>();\n // do {\n if ('$ref' in node[key]) {\n logWalk('Found $ref:', node[key].$ref);\n const resolversToUse = resolvers.filter((r) => canResolve(r, node[key].$ref));\n if (resolversToUse.length === 0) {\n throw new Error(`No resolver found for ref: ${node[key].$ref}`);\n }\n if (resolversToUse.length > 1) {\n throw new Error(`Multiple resolvers can make offline ref: ${node[key].$ref}`);\n }\n\n const resolver = resolversToUse[0];\n\n let func: typeof offlineLocalizeRef;\n switch (resolver.offlineFunc) {\n case 'localizeRef':\n func = offlineLocalizeRef;\n break;\n case 'resolveRef':\n func = offlineResolveRef;\n break;\n default:\n const m = resolver.offlineFunc;\n func = (_resolver, ref, context) => m(ref, context);\n // throw new Error(`Unknown offlineMethod: ${resolver.offlineMethod}`);\n }\n\n const mergeRef = yield* func(resolver, node[key].$ref, resolveContext);\n\n if (Object.isFrozen(node)) throw new Error('Unexpected frozen node during makeOffline');\n\n node[key] = createMergedObjectWithOverridenNonRefProps({ $ref: mergeRef }, node[key]);\n } else {\n node[key] = shallowClone(node[key]);\n }\n\n // recurse\n if (Array.isArray(node[key])) {\n for (const [index] of node[key].entries()) {\n yield* walk(node[key], index);\n }\n } else if (typeof node[key] === 'object' && node[key] !== null) {\n for (const key1 of Object.keys(node[key])) {\n // debugger;\n yield* walk(node[key], key1);\n }\n }\n }\n}\n\n// function canResolve(resolver: Resolver, ref: string): boolean {\n// if (resolver.canResolve) {\n// return resolver.canResolve(ref);\n// }\n// if (resolver.schema) {\n// return canResolveRefSchema(ref, resolver.schema);\n// }\n// throw new Error('Resolver is missing canResolve method and schema property');\n// }\n\nasync function* offlineLocalizeRef(\n resolver: Resolver,\n ref: string,\n context: ResolveContextInternal\n): AsyncGenerator<LocalFile, string, void> {\n const refData = deepClone(await resolverResolve(resolver, ref, context));\n // emit assets\n yield* makeOfflineInternal(refData);\n const url = parseRefValue(ref);\n const targetFilePath = makeTargetPathFromUrl(url);\n // TODO: handle CSS and other text files that may reference other assets\n yield { url, targetFilePath };\n const schema = ref.substring(0, ref.length - url.length - 1);\n return `${schema}:${targetFilePath}`;\n}\n\nasync function* offlineResolveRef(\n resolver: Resolver,\n ref: string,\n context: ResolveContextInternal\n): AsyncGenerator<LocalFile, string, void> {\n const refData = deepClone(await resolverResolve(resolver, ref, context));\n const targetFilePath = makeUniqueJsonTargetPathFromString(ref);\n const data = yield* makeOfflineInternal(refData);\n yield { data, targetFilePath };\n return targetFilePath;\n}\n\n// export function createMakeOfflineResult(\n// manifest: any,\n// walk: (node: any, key: string | number) => AsyncGenerator<LocalFile>\n// ): MakeOfflineResult {\n// log('createMakeOfflineResult', manifest);\n// if (typeof manifest !== 'object' || manifest === null) {\n// throw new Error('Manifest must be an object');\n// }\n// const parent = { manifest };\n// let done = false;\n// async function* files() {\n// yield* walk(parent, 'manifest');\n// done = true;\n// }\n\n// return {\n// get manifest() {\n// if (!done) {\n// throw new Error('Cannot access manifest before all files are generated');\n// }\n// return parent.manifest;\n// },\n// files: files(),\n// };\n// }\n"],"names":["log","debug","logWalk","makeOffline","manifest","res","makeOfflineInternal","done","resultManifest","files","resolveContext","parent","walk","node","key","resolversToUse","resolvers","r","canResolve","resolver","func","offlineLocalizeRef","offlineResolveRef","m","_resolver","ref","context","mergeRef","createMergedObjectWithOverridenNonRefProps","shallowClone","index","key1","refData","deepClone","resolverResolve","url","parseRefValue","targetFilePath","makeTargetPathFromUrl","makeUniqueJsonTargetPathFromString"],"mappings":";;AAYA,MAAMA,IAAMC,EAAM,wBAAwB,GAEpCC,IAAUD,EAAM,6BAA6B;AAEnD,eAAsBE,EAAYC,GAA2C;AAC3E,EAAAJ,EAAI,eAAeI,CAAQ;AAK3B,QAAMC,IAAM,MAAMC,EAAoBF,CAAQ;AAC9C,MAAIG,IAAO,IACPC;AACJ,kBAAgBC,IAAQ;AACtB,IAAAD,IAAiB,OAAOH,GACxBE,IAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,IAAI,WAAyB;AAC3B,UAAI,CAACA,EAAM,OAAM,IAAI,MAAM,4CAA4C;AACvE,aAAOC;AAAA,IACT;AAAA,IACA,OAAOC,EAAA;AAAA,EAAM;AAEjB;AAEA,gBAAgBH,EAAoBF,GAA8D;AAChG,EAAAJ,EAAI,uBAAuBI,CAAQ;AACnC,QAAMM,IAAyC;AAAA,IAC7C,8BAAc,IAAA;AAAA,IACd,YAAY;AAAA,IACZ,QAAQ;AAAA,EAAA,GAEJC,IAAS,EAAE,UAAAP,EAAA;AACjB,gBAAOQ,EAAKD,GAAQ,UAAU,GACvBA,EAAO;AAEd,kBAAgBC,EAAKC,GAAWC,GAAiD;AAI/E,QAFAZ,EAAQY,GAAKD,EAAKC,CAAG,CAAC,GAElB,SAAOD,EAAKC,CAAG,KAAM,YAAYD,EAAKC,CAAG,MAAM,OAMnD;AAAA,UAAI,UAAUD,EAAKC,CAAG,GAAG;AACvB,QAAAZ,EAAQ,eAAeW,EAAKC,CAAG,EAAE,IAAI;AACrC,cAAMC,IAAiBC,EAAU,OAAO,CAACC,MAAMC,EAAWD,GAAGJ,EAAKC,CAAG,EAAE,IAAI,CAAC;AAC5E,YAAIC,EAAe,WAAW;AAC5B,gBAAM,IAAI,MAAM,8BAA8BF,EAAKC,CAAG,EAAE,IAAI,EAAE;AAEhE,YAAIC,EAAe,SAAS;AAC1B,gBAAM,IAAI,MAAM,4CAA4CF,EAAKC,CAAG,EAAE,IAAI,EAAE;AAG9E,cAAMK,IAAWJ,EAAe,CAAC;AAEjC,YAAIK;AACJ,gBAAQD,EAAS,aAAA;AAAA,UACf,KAAK;AACH,YAAAC,IAAOC;AACP;AAAA,UACF,KAAK;AACH,YAAAD,IAAOE;AACP;AAAA,UACF;AACE,kBAAMC,IAAIJ,EAAS;AACnB,YAAAC,IAAO,CAACI,GAAWC,GAAKC,MAAYH,EAAEE,GAAKC,CAAO;AAAA,QAAA;AAItD,cAAMC,IAAW,OAAOP,EAAKD,GAAUN,EAAKC,CAAG,EAAE,MAAMJ,CAAc;AAErE,YAAI,OAAO,SAASG,CAAI,EAAG,OAAM,IAAI,MAAM,2CAA2C;AAEtF,QAAAA,EAAKC,CAAG,IAAIc,EAA2C,EAAE,MAAMD,EAAA,GAAYd,EAAKC,CAAG,CAAC;AAAA,MACtF;AACE,QAAAD,EAAKC,CAAG,IAAIe,EAAahB,EAAKC,CAAG,CAAC;AAIpC,UAAI,MAAM,QAAQD,EAAKC,CAAG,CAAC;AACzB,mBAAW,CAACgB,CAAK,KAAKjB,EAAKC,CAAG,EAAE;AAC9B,iBAAOF,EAAKC,EAAKC,CAAG,GAAGgB,CAAK;AAAA,eAErB,OAAOjB,EAAKC,CAAG,KAAM,YAAYD,EAAKC,CAAG,MAAM;AACxD,mBAAWiB,KAAQ,OAAO,KAAKlB,EAAKC,CAAG,CAAC;AAEtC,iBAAOF,EAAKC,EAAKC,CAAG,GAAGiB,CAAI;AAAA;AAAA,EAGjC;AACF;AAYA,gBAAgBV,EACdF,GACAM,GACAC,GACyC;AACzC,QAAMM,IAAUC,EAAU,MAAMC,EAAgBf,GAAUM,GAAKC,CAAO,CAAC;AAEvE,SAAOpB,EAAoB0B,CAAO;AAClC,QAAMG,IAAMC,EAAcX,CAAG,GACvBY,IAAiBC,EAAsBH,CAAG;AAEhD,eAAM,EAAE,KAAAA,GAAK,gBAAAE,EAAA,GAEN,GADQZ,EAAI,UAAU,GAAGA,EAAI,SAASU,EAAI,SAAS,CAAC,CAC3C,IAAIE,CAAc;AACpC;AAEA,gBAAgBf,EACdH,GACAM,GACAC,GACyC;AACzC,QAAMM,IAAUC,EAAU,MAAMC,EAAgBf,GAAUM,GAAKC,CAAO,CAAC,GACjEW,IAAiBE,EAAmCd,CAAG;AAE7D,eAAM,EAAE,MADK,OAAOnB,EAAoB0B,CAAO,GACjC,gBAAAK,EAAA,GACPA;AACT;"}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { d, a as f, resolve as r } from "./bundle.js";
|
|
2
|
+
import { makeOffline as s } from "./makeOffline-Dj-0o5_7.js";
|
|
3
|
+
const o = d("efp:loader:makeOfflineBundle");
|
|
4
|
+
async function* p(t) {
|
|
5
|
+
o("Bundling", t), f(t);
|
|
6
|
+
const a = await s(t), n = /* @__PURE__ */ new Map();
|
|
7
|
+
for await (const e of a.files)
|
|
8
|
+
if ("data" in e) {
|
|
9
|
+
n.set(e.targetFilePath, e.data);
|
|
10
|
+
const i = JSON.stringify(e.data, null, 2);
|
|
11
|
+
o("Bundling file with data", e.targetFilePath, i), yield {
|
|
12
|
+
path: e.targetFilePath,
|
|
13
|
+
data: new TextEncoder().encode(i).buffer
|
|
14
|
+
};
|
|
15
|
+
} else if ("url" in e) {
|
|
16
|
+
const i = await u(e.url);
|
|
17
|
+
i ? (o("Bundling file from url", e.targetFilePath), yield {
|
|
18
|
+
path: e.targetFilePath,
|
|
19
|
+
data: await i.arrayBuffer()
|
|
20
|
+
}) : console.warn(`Skipping file ${e.url} as it could not be downloaded`);
|
|
21
|
+
} else if ("text" in e)
|
|
22
|
+
o("Bundling text file", e.targetFilePath), yield {
|
|
23
|
+
path: e.targetFilePath,
|
|
24
|
+
data: new TextEncoder().encode(e.text).buffer
|
|
25
|
+
};
|
|
26
|
+
else
|
|
27
|
+
throw new Error("Unknown file type");
|
|
28
|
+
if (await r(a.manifest, "/runtime", { preResolvedRefs: n })) {
|
|
29
|
+
const e = await w(a.manifest, n);
|
|
30
|
+
yield {
|
|
31
|
+
path: "./index.html",
|
|
32
|
+
data: new TextEncoder().encode(e).buffer
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const c = 10, l = /* @__PURE__ */ new Set();
|
|
37
|
+
async function u(t) {
|
|
38
|
+
for (; l.size >= c; )
|
|
39
|
+
await Promise.race(l);
|
|
40
|
+
const a = (async () => {
|
|
41
|
+
o("Fetching file for zip:", t);
|
|
42
|
+
const n = await fetch(t);
|
|
43
|
+
if (!n.ok) {
|
|
44
|
+
t.includes("/data/fp.svg.ViewBox.js");
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
return await n.blob();
|
|
48
|
+
})();
|
|
49
|
+
l.add(a);
|
|
50
|
+
try {
|
|
51
|
+
return await a;
|
|
52
|
+
} finally {
|
|
53
|
+
l.delete(a);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
async function w(t, a) {
|
|
57
|
+
const n = await r(t, "/runtime/entry", { preResolvedRefs: a });
|
|
58
|
+
return `
|
|
59
|
+
<!DOCTYPE html>
|
|
60
|
+
<script type="module">
|
|
61
|
+
import { load } from ${JSON.stringify(n)};
|
|
62
|
+
await load(${JSON.stringify(t)});
|
|
63
|
+
console.info('🚀 loaded');
|
|
64
|
+
<\/script>
|
|
65
|
+
`;
|
|
66
|
+
}
|
|
67
|
+
export {
|
|
68
|
+
p as makeOfflineBundle
|
|
69
|
+
};
|
|
70
|
+
//# sourceMappingURL=makeOfflineBundle-D8tePWGI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"makeOfflineBundle-D8tePWGI.js","sources":["../../src/offline/makeOfflineBundle.ts"],"sourcesContent":["import debug from 'debug';\nimport { resolve } from '..';\nimport { makeOffline } from './makeOffline';\nimport { deepFreeze } from '../shared';\n\nconst log = debug('efp:loader:makeOfflineBundle');\n\nexport async function* makeOfflineBundle(\n manifest: unknown\n): AsyncGenerator<{ path: string; data: ArrayBuffer }> {\n log('Bundling', manifest);\n deepFreeze(manifest);\n const offlineData = await makeOffline(manifest);\n const preResolvedRefs = new Map<string, any>();\n\n for await (const file of offlineData.files) {\n if ('data' in file) {\n preResolvedRefs.set(file.targetFilePath, file.data);\n const jsonString = JSON.stringify(file.data, null, 2);\n log('Bundling file with data', file.targetFilePath, jsonString);\n yield {\n path: file.targetFilePath,\n data: new TextEncoder().encode(jsonString).buffer,\n };\n } else if ('url' in file) {\n const blob = await downloadFile(file.url);\n if (!blob) {\n console.warn(`Skipping file ${file.url} as it could not be downloaded`);\n } else {\n log('Bundling file from url', file.targetFilePath);\n yield {\n path: file.targetFilePath,\n data: await blob.arrayBuffer(),\n };\n }\n } else if ('text' in file) {\n log('Bundling text file', file.targetFilePath);\n yield {\n path: file.targetFilePath,\n data: new TextEncoder().encode(file.text).buffer,\n };\n } else {\n throw new Error('Unknown file type');\n }\n }\n if (await resolve(offlineData.manifest, '/runtime', { preResolvedRefs })) {\n const html = await getIndexHtml(offlineData.manifest, preResolvedRefs);\n yield {\n path: './index.html',\n data: new TextEncoder().encode(html).buffer,\n };\n }\n}\n\nconst MAX_CONCURRENT_DOWNLOADS = 10;\nconst queue = new Set<Promise<Blob | void>>();\n// have a queue to limit concurrent downloads\nasync function downloadFile(url: string): Promise<Blob | void> {\n while (queue.size >= MAX_CONCURRENT_DOWNLOADS) {\n await Promise.race(queue);\n }\n const downloadPromise = (async () => {\n log('Fetching file for zip:', url);\n const response = await fetch(url);\n if (!response.ok) {\n // Rodion's bug. May be missing file.\n if (!url.includes('/data/fp.svg.ViewBox.js')) {\n // throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);\n }\n // console.warn(`Warning: Failed to fetch ${url}: ${response.status}`);\n return;\n }\n return await response.blob();\n })();\n queue.add(downloadPromise);\n try {\n return await downloadPromise;\n } finally {\n queue.delete(downloadPromise);\n }\n}\n\nasync function getIndexHtml(manifest: unknown, preResolvedRefs: Map<string, any>): Promise<string> {\n const entryPoint = await resolve(manifest, '/runtime/entry', { preResolvedRefs });\n const html = `\n<!DOCTYPE html>\n<script type=\"module\">\n import { load } from ${JSON.stringify(entryPoint)};\n await load(${JSON.stringify(manifest)});\n console.info('🚀 loaded');\n</script> \n`;\n return html;\n}\n"],"names":["log","debug","makeOfflineBundle","manifest","deepFreeze","offlineData","makeOffline","preResolvedRefs","file","jsonString","blob","downloadFile","resolve","html","getIndexHtml","MAX_CONCURRENT_DOWNLOADS","queue","url","downloadPromise","response","entryPoint"],"mappings":";;AAKA,MAAMA,IAAMC,EAAM,8BAA8B;AAEhD,gBAAuBC,EACrBC,GACqD;AACrD,EAAAH,EAAI,YAAYG,CAAQ,GACxBC,EAAWD,CAAQ;AACnB,QAAME,IAAc,MAAMC,EAAYH,CAAQ,GACxCI,wBAAsB,IAAA;AAE5B,mBAAiBC,KAAQH,EAAY;AACnC,QAAI,UAAUG,GAAM;AAClB,MAAAD,EAAgB,IAAIC,EAAK,gBAAgBA,EAAK,IAAI;AAClD,YAAMC,IAAa,KAAK,UAAUD,EAAK,MAAM,MAAM,CAAC;AACpD,MAAAR,EAAI,2BAA2BQ,EAAK,gBAAgBC,CAAU,GAC9D,MAAM;AAAA,QACJ,MAAMD,EAAK;AAAA,QACX,MAAM,IAAI,YAAA,EAAc,OAAOC,CAAU,EAAE;AAAA,MAAA;AAAA,IAE/C,WAAW,SAASD,GAAM;AACxB,YAAME,IAAO,MAAMC,EAAaH,EAAK,GAAG;AACxC,MAAKE,KAGHV,EAAI,0BAA0BQ,EAAK,cAAc,GACjD,MAAM;AAAA,QACJ,MAAMA,EAAK;AAAA,QACX,MAAM,MAAME,EAAK,YAAA;AAAA,MAAY,KAL/B,QAAQ,KAAK,iBAAiBF,EAAK,GAAG,gCAAgC;AAAA,IAQ1E,WAAW,UAAUA;AACnB,MAAAR,EAAI,sBAAsBQ,EAAK,cAAc,GAC7C,MAAM;AAAA,QACJ,MAAMA,EAAK;AAAA,QACX,MAAM,IAAI,YAAA,EAAc,OAAOA,EAAK,IAAI,EAAE;AAAA,MAAA;AAAA;AAG5C,YAAM,IAAI,MAAM,mBAAmB;AAGvC,MAAI,MAAMI,EAAQP,EAAY,UAAU,YAAY,EAAE,iBAAAE,EAAA,CAAiB,GAAG;AACxE,UAAMM,IAAO,MAAMC,EAAaT,EAAY,UAAUE,CAAe;AACrE,UAAM;AAAA,MACJ,MAAM;AAAA,MACN,MAAM,IAAI,YAAA,EAAc,OAAOM,CAAI,EAAE;AAAA,IAAA;AAAA,EAEzC;AACF;AAEA,MAAME,IAA2B,IAC3BC,wBAAY,IAAA;AAElB,eAAeL,EAAaM,GAAmC;AAC7D,SAAOD,EAAM,QAAQD;AACnB,UAAM,QAAQ,KAAKC,CAAK;AAE1B,QAAME,KAAmB,YAAY;AACnC,IAAAlB,EAAI,0BAA0BiB,CAAG;AACjC,UAAME,IAAW,MAAM,MAAMF,CAAG;AAChC,QAAI,CAACE,EAAS,IAAI;AAEhB,MAAKF,EAAI,SAAS,yBAAyB;AAI3C;AAAA,IACF;AACA,WAAO,MAAME,EAAS,KAAA;AAAA,EACxB,GAAA;AACA,EAAAH,EAAM,IAAIE,CAAe;AACzB,MAAI;AACF,WAAO,MAAMA;AAAA,EACf,UAAA;AACE,IAAAF,EAAM,OAAOE,CAAe;AAAA,EAC9B;AACF;AAEA,eAAeJ,EAAaX,GAAmBI,GAAoD;AACjG,QAAMa,IAAa,MAAMR,EAAQT,GAAU,kBAAkB,EAAE,iBAAAI,GAAiB;AAShF,SARa;AAAA;AAAA;AAAA,yBAGU,KAAK,UAAUa,CAAU,CAAC;AAAA,eACpC,KAAK,UAAUjB,CAAQ,CAAC;AAAA;AAAA;AAAA;AAKvC;"}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { d as $ } from "./bundle.js";
|
|
2
|
+
function r(t, n) {
|
|
3
|
+
let e = n >>> 0;
|
|
4
|
+
for (let o = 0; o < t.length; o++)
|
|
5
|
+
e ^= t.charCodeAt(o), e = Math.imul(e, 16777619);
|
|
6
|
+
return (e >>> 0).toString(16).padStart(8, "0");
|
|
7
|
+
}
|
|
8
|
+
function d(t) {
|
|
9
|
+
return r(t, 2166136261) + // your original seed
|
|
10
|
+
r(t, 569420461) + r(t, 461845907) + r(t, 2246822507);
|
|
11
|
+
}
|
|
12
|
+
const a = /* @__PURE__ */ new Map(), g = /* @__PURE__ */ new Map(), m = /* @__PURE__ */ new Set([
|
|
13
|
+
"con",
|
|
14
|
+
"prn",
|
|
15
|
+
"aux",
|
|
16
|
+
"nul",
|
|
17
|
+
"com1",
|
|
18
|
+
"com2",
|
|
19
|
+
"com3",
|
|
20
|
+
"com4",
|
|
21
|
+
"com5",
|
|
22
|
+
"com6",
|
|
23
|
+
"com7",
|
|
24
|
+
"com8",
|
|
25
|
+
"com9",
|
|
26
|
+
"lpt1",
|
|
27
|
+
"lpt2",
|
|
28
|
+
"lpt3",
|
|
29
|
+
"lpt4",
|
|
30
|
+
"lpt5",
|
|
31
|
+
"lpt6",
|
|
32
|
+
"lpt7",
|
|
33
|
+
"lpt8",
|
|
34
|
+
"lpt9"
|
|
35
|
+
]);
|
|
36
|
+
function x(t) {
|
|
37
|
+
let n = t.normalize("NFKD").replace(/[^\p{Letter}\p{Number}]+/gu, "-").replace(/-+/g, "-").replace(/^-|-$/g, "").toLowerCase();
|
|
38
|
+
return n = n.replace(/[<>:"/\\|?*]/g, ""), n = n.replace(/[. ]+$/g, ""), n || (n = "file"), m.has(n) && (n += "-file"), n;
|
|
39
|
+
}
|
|
40
|
+
function c(t) {
|
|
41
|
+
const n = a.get(t);
|
|
42
|
+
if (n) return n;
|
|
43
|
+
const e = x(t);
|
|
44
|
+
let o = e, s = 2;
|
|
45
|
+
for (; ; ) {
|
|
46
|
+
const i = g.get(o);
|
|
47
|
+
if (!i)
|
|
48
|
+
return g.set(o, t), a.set(t, o), o;
|
|
49
|
+
if (i === t)
|
|
50
|
+
return a.set(t, o), o;
|
|
51
|
+
o = `${e}-${s++}`;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const f = $("efp:loader:offline");
|
|
55
|
+
function p(t, n = "") {
|
|
56
|
+
let e = c(t);
|
|
57
|
+
return n && (e = `${c(n)}$${e}`), e.endsWith("/") ? e += "index.json" : e.endsWith(".json") || (e += ".json"), "./" + e;
|
|
58
|
+
}
|
|
59
|
+
function w(t) {
|
|
60
|
+
const n = new URL(t, "http://example.com"), e = t.startsWith("/") ? n.pathname : n.pathname.slice(1), o = n.search;
|
|
61
|
+
return h(e, o);
|
|
62
|
+
}
|
|
63
|
+
const u = 120;
|
|
64
|
+
function h(t, n) {
|
|
65
|
+
if (!t.endsWith("/")) {
|
|
66
|
+
if (!t.match(/\.[^\/]+$/))
|
|
67
|
+
throw new Error(`Cannot make target path from URL without file extension: ${t}`);
|
|
68
|
+
const e = t.substring(t.lastIndexOf("."));
|
|
69
|
+
let o = t.substring(0, t.lastIndexOf("."));
|
|
70
|
+
const s = o.match(/[^a-zA-Z0-9\-._\/]/g);
|
|
71
|
+
if (s) {
|
|
72
|
+
const i = c(o);
|
|
73
|
+
f(
|
|
74
|
+
`Pathname contains invalid filesystem characters (${[...new Set(s)].join(
|
|
75
|
+
", "
|
|
76
|
+
)}), slugifying it: ${o}${e} => ${i}${e}`
|
|
77
|
+
), o = i;
|
|
78
|
+
}
|
|
79
|
+
if (t = o + e, t.length > u && (f(
|
|
80
|
+
`Pathname is too long (${t.length} characters), truncating to ${u} characters: ${t}`
|
|
81
|
+
), t = t.substring(0, u - e.length) + e), n) {
|
|
82
|
+
const i = d(n), l = t.lastIndexOf(".");
|
|
83
|
+
l !== -1 ? t = `${t.slice(0, l)}.${i}${t.slice(l)}` : t = `${t}${i}`;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return t;
|
|
87
|
+
}
|
|
88
|
+
function b(t, n = "") {
|
|
89
|
+
try {
|
|
90
|
+
new URL(t);
|
|
91
|
+
} catch {
|
|
92
|
+
debugger;
|
|
93
|
+
}
|
|
94
|
+
const e = new URL(t), o = `${e.protocol}//${e.host}`, s = c(o), i = h(e.pathname, e.search);
|
|
95
|
+
return `./${n}${s}${i}`;
|
|
96
|
+
}
|
|
97
|
+
export {
|
|
98
|
+
b as a,
|
|
99
|
+
p as m,
|
|
100
|
+
w as r
|
|
101
|
+
};
|
|
102
|
+
//# sourceMappingURL=tools-D0u8lBvQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tools-D0u8lBvQ.js","sources":["../../src/offline/hashString.ts","../../src/offline/slugify.ts","../../src/offline/tools.ts"],"sourcesContent":["// 32-bit murmur-ish hash with configurable seed\nfunction murmur32WithSeed(str: string, seed: number): string {\n let h = seed >>> 0;\n\n for (let i = 0; i < str.length; i++) {\n h ^= str.charCodeAt(i);\n h = Math.imul(h, 0x01000193); // FNV-ish prime, good mixer\n }\n\n return (h >>> 0).toString(16).padStart(8, '0'); // 8 hex chars\n}\n\n// 128-bit (MD5-width) hash: 4 × 32-bit parts concatenated\nexport function hashString(str: string): string {\n return (\n murmur32WithSeed(str, 0x811c9dc5) + // your original seed\n murmur32WithSeed(str, 0x21f0aaad) +\n murmur32WithSeed(str, 0x1b873593) +\n murmur32WithSeed(str, 0x85ebca6b)\n );\n}\n","// Module-level caches\nconst inputToSlug = new Map<string, string>();\nconst slugToInput = new Map<string, string>();\n\nconst WINDOWS_RESERVED = new Set([\n \"con\", \"prn\", \"aux\", \"nul\",\n \"com1\", \"com2\", \"com3\", \"com4\", \"com5\", \"com6\", \"com7\", \"com8\", \"com9\",\n \"lpt1\", \"lpt2\", \"lpt3\", \"lpt4\", \"lpt5\", \"lpt6\", \"lpt7\", \"lpt8\", \"lpt9\",\n]);\n\nfunction makeBaseSlug(input: string): string {\n let slug = input\n .normalize(\"NFKD\")\n // All non letters/numbers → \"-\"\n .replace(/[^\\p{Letter}\\p{Number}]+/gu, \"-\")\n // Collapse multiple \"-\"\n .replace(/-+/g, \"-\")\n // Trim \"-\" from start/end\n .replace(/^-|-$/g, \"\")\n .toLowerCase();\n\n // Strip forbidden Windows characters just in case\n slug = slug.replace(/[<>:\"/\\\\|?*]/g, \"\");\n\n // Windows forbids trailing space/period\n slug = slug.replace(/[. ]+$/g, \"\");\n\n // Fallback if everything was stripped\n if (!slug) slug = \"file\";\n\n // Avoid bare reserved device names\n if (WINDOWS_RESERVED.has(slug)) {\n slug += \"-file\";\n }\n\n return slug;\n}\n\nexport function slugifyFsUnique(input: string): string {\n // If we've seen this exact input before, return the same slug\n const existing = inputToSlug.get(input);\n if (existing) return existing;\n\n const base = makeBaseSlug(input);\n let candidate = base;\n let counter = 2;\n\n while (true) {\n const existingInput = slugToInput.get(candidate);\n\n if (!existingInput) {\n // Free slug → claim it for this input\n slugToInput.set(candidate, input);\n inputToSlug.set(input, candidate);\n return candidate;\n }\n\n if (existingInput === input) {\n // Same input somehow (super defensive)\n inputToSlug.set(input, candidate);\n return candidate;\n }\n\n // Collision: same slug already used by different input → add suffix\n candidate = `${base}-${counter++}`;\n }\n}\n\n// // Optional: to reset between runs/tests\n// export function resetSlugCache() {\n// inputToSlug.clear();\n// slugToInput.clear();\n// }","import debug from 'debug';\nimport { hashString } from './hashString';\nimport { slugifyFsUnique } from './slugify';\n\nconst log = debug('efp:loader:offline');\n\nexport function makeUniqueJsonTargetPathFromString(str: string, namespace: string = ''): string {\n // const hash = hashString(str);\n let result = slugifyFsUnique(str); // + '-' + hash;\n if (namespace) {\n result = `${slugifyFsUnique(namespace)}$${result}`;\n }\n\n if (result.endsWith('/')) {\n result += 'index.json';\n } else if (!result.endsWith('.json')) {\n result += '.json';\n }\n return './' + result;\n // handle directory case\n}\n\nexport function relativeUrlToFilePath(relativeUrl: string): string {\n const urlObj = new URL(relativeUrl, 'http://example.com');\n const pathname = relativeUrl.startsWith('/') ? urlObj.pathname : urlObj.pathname.slice(1);\n const search = urlObj.search;\n return pathnameSearchToPahthname(pathname, search);\n}\n\nconst MAX_PATHNAME_LENGTH = 120;\n\nfunction pathnameSearchToPahthname(pathname: string, search: string): string {\n // for directory\n if (pathname.endsWith('/')) {\n // just trust it for now\n // TODO: need to combine with the logic below\n } else {\n // if path doesn't end with extension, throw\n if (!pathname.match(/\\.[^\\/]+$/)) {\n throw new Error(`Cannot make target path from URL without file extension: ${pathname}`);\n }\n\n const extension = pathname.substring(pathname.lastIndexOf('.'));\n let pathnameWithoutExtension = pathname.substring(0, pathname.lastIndexOf('.'));\n\n // check the pathname contains only valid fs characters\n const invalidPathnameChars = pathnameWithoutExtension.match(/[^a-zA-Z0-9\\-._\\/]/g);\n if (invalidPathnameChars) {\n const fixedPathnameWithoutExtension = slugifyFsUnique(pathnameWithoutExtension);\n log(\n `Pathname contains invalid filesystem characters (${[...new Set(invalidPathnameChars)].join(\n ', '\n )}), slugifying it: ${pathnameWithoutExtension}${extension} => ${fixedPathnameWithoutExtension}${extension}`\n );\n pathnameWithoutExtension = fixedPathnameWithoutExtension;\n }\n\n pathname = pathnameWithoutExtension + extension;\n\n if (pathname.length > MAX_PATHNAME_LENGTH) {\n log(\n `Pathname is too long (${pathname.length} characters), truncating to ${MAX_PATHNAME_LENGTH} characters: ${pathname}`\n );\n pathname = pathname.substring(0, MAX_PATHNAME_LENGTH - extension.length) + extension;\n }\n\n if (search) {\n // create a hash from search params\n const hash = hashString(search);\n const dotIndex = pathname.lastIndexOf('.');\n if (dotIndex !== -1) {\n pathname = `${pathname.slice(0, dotIndex)}.${hash}${pathname.slice(dotIndex)}`;\n } else {\n pathname = `${pathname}${hash}`;\n }\n }\n }\n return pathname;\n}\n\nexport function makeTargetPathFromUrl(url: string, prefix: string = ''): string {\n // https://example.com/dir1/dir2/a.js => \"{prefix}{origin-slug}/dir1/dir2/a.js\";\n // https://example.com/dir1/dir2/a.js?params => \"{prefix}{origin-slug}/dir1/dir2/a{paramsmd5hash}.js\";\n // use slugify.ts\n\n try {\n new URL(url);\n } catch {\n debugger;\n }\n const urlObj = new URL(url);\n const origin = `${urlObj.protocol}//${urlObj.host}`;\n const originSlug = slugifyFsUnique(origin);\n\n const pathname = pathnameSearchToPahthname(urlObj.pathname, urlObj.search);\n\n // // handle directory case\n // if (pathname.endsWith('/')) {\n // pathname += '__index.json';\n // }\n\n return `./${prefix}${originSlug}${pathname}`;\n}\n"],"names":["murmur32WithSeed","str","seed","h","i","hashString","inputToSlug","slugToInput","WINDOWS_RESERVED","makeBaseSlug","input","slug","slugifyFsUnique","existing","base","candidate","counter","existingInput","log","debug","makeUniqueJsonTargetPathFromString","namespace","result","relativeUrlToFilePath","relativeUrl","urlObj","pathname","search","pathnameSearchToPahthname","MAX_PATHNAME_LENGTH","extension","pathnameWithoutExtension","invalidPathnameChars","fixedPathnameWithoutExtension","hash","dotIndex","makeTargetPathFromUrl","url","prefix","origin","originSlug"],"mappings":";AACA,SAASA,EAAiBC,GAAaC,GAAsB;AAC3D,MAAIC,IAAID,MAAS;AAEjB,WAASE,IAAI,GAAGA,IAAIH,EAAI,QAAQG;AAC9B,IAAAD,KAAKF,EAAI,WAAWG,CAAC,GACrBD,IAAI,KAAK,KAAKA,GAAG,QAAU;AAG7B,UAAQA,MAAM,GAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC/C;AAGO,SAASE,EAAWJ,GAAqB;AAC9C,SACED,EAAiBC,GAAK,UAAU;AAAA,EAChCD,EAAiBC,GAAK,SAAU,IAChCD,EAAiBC,GAAK,SAAU,IAChCD,EAAiBC,GAAK,UAAU;AAEpC;ACnBA,MAAMK,wBAAkB,IAAA,GAClBC,wBAAkB,IAAA,GAElBC,wBAAuB,IAAI;AAAA,EAC/B;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EACrB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAChE;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAClE,CAAC;AAED,SAASC,EAAaC,GAAuB;AAC3C,MAAIC,IAAOD,EACR,UAAU,MAAM,EAEhB,QAAQ,8BAA8B,GAAG,EAEzC,QAAQ,OAAO,GAAG,EAElB,QAAQ,UAAU,EAAE,EACpB,YAAA;AAGH,SAAAC,IAAOA,EAAK,QAAQ,iBAAiB,EAAE,GAGvCA,IAAOA,EAAK,QAAQ,WAAW,EAAE,GAG5BA,MAAMA,IAAO,SAGdH,EAAiB,IAAIG,CAAI,MAC3BA,KAAQ,UAGHA;AACT;AAEO,SAASC,EAAgBF,GAAuB;AAErD,QAAMG,IAAWP,EAAY,IAAII,CAAK;AACtC,MAAIG,EAAU,QAAOA;AAErB,QAAMC,IAAOL,EAAaC,CAAK;AAC/B,MAAIK,IAAYD,GACZE,IAAU;AAEd,aAAa;AACX,UAAMC,IAAgBV,EAAY,IAAIQ,CAAS;AAE/C,QAAI,CAACE;AAEH,aAAAV,EAAY,IAAIQ,GAAWL,CAAK,GAChCJ,EAAY,IAAII,GAAOK,CAAS,GACzBA;AAGT,QAAIE,MAAkBP;AAEpB,aAAAJ,EAAY,IAAII,GAAOK,CAAS,GACzBA;AAIT,IAAAA,IAAY,GAAGD,CAAI,IAAIE,GAAS;AAAA,EAClC;AACF;AC9DA,MAAME,IAAMC,EAAM,oBAAoB;AAE/B,SAASC,EAAmCnB,GAAaoB,IAAoB,IAAY;AAE9F,MAAIC,IAASV,EAAgBX,CAAG;AAChC,SAAIoB,MACFC,IAAS,GAAGV,EAAgBS,CAAS,CAAC,IAAIC,CAAM,KAG9CA,EAAO,SAAS,GAAG,IACrBA,KAAU,eACAA,EAAO,SAAS,OAAO,MACjCA,KAAU,UAEL,OAAOA;AAEhB;AAEO,SAASC,EAAsBC,GAA6B;AACjE,QAAMC,IAAS,IAAI,IAAID,GAAa,oBAAoB,GAClDE,IAAWF,EAAY,WAAW,GAAG,IAAIC,EAAO,WAAWA,EAAO,SAAS,MAAM,CAAC,GAClFE,IAASF,EAAO;AACtB,SAAOG,EAA0BF,GAAUC,CAAM;AACnD;AAEA,MAAME,IAAsB;AAE5B,SAASD,EAA0BF,GAAkBC,GAAwB;AAE3E,MAAI,CAAAD,EAAS,SAAS,GAAG,GAGlB;AAEL,QAAI,CAACA,EAAS,MAAM,WAAW;AAC7B,YAAM,IAAI,MAAM,4DAA4DA,CAAQ,EAAE;AAGxF,UAAMI,IAAYJ,EAAS,UAAUA,EAAS,YAAY,GAAG,CAAC;AAC9D,QAAIK,IAA2BL,EAAS,UAAU,GAAGA,EAAS,YAAY,GAAG,CAAC;AAG9E,UAAMM,IAAuBD,EAAyB,MAAM,qBAAqB;AACjF,QAAIC,GAAsB;AACxB,YAAMC,IAAgCrB,EAAgBmB,CAAwB;AAC9E,MAAAb;AAAA,QACE,oDAAoD,CAAC,GAAG,IAAI,IAAIc,CAAoB,CAAC,EAAE;AAAA,UACrF;AAAA,QAAA,CACD,qBAAqBD,CAAwB,GAAGD,CAAS,OAAOG,CAA6B,GAAGH,CAAS;AAAA,MAAA,GAE5GC,IAA2BE;AAAA,IAC7B;AAWA,QATAP,IAAWK,IAA2BD,GAElCJ,EAAS,SAASG,MACpBX;AAAA,MACE,yBAAyBQ,EAAS,MAAM,+BAA+BG,CAAmB,gBAAgBH,CAAQ;AAAA,IAAA,GAEpHA,IAAWA,EAAS,UAAU,GAAGG,IAAsBC,EAAU,MAAM,IAAIA,IAGzEH,GAAQ;AAEV,YAAMO,IAAO7B,EAAWsB,CAAM,GACxBQ,IAAWT,EAAS,YAAY,GAAG;AACzC,MAAIS,MAAa,KACfT,IAAW,GAAGA,EAAS,MAAM,GAAGS,CAAQ,CAAC,IAAID,CAAI,GAAGR,EAAS,MAAMS,CAAQ,CAAC,KAE5ET,IAAW,GAAGA,CAAQ,GAAGQ,CAAI;AAAA,IAEjC;AAAA,EACF;AACA,SAAOR;AACT;AAEO,SAASU,EAAsBC,GAAaC,IAAiB,IAAY;AAK9E,MAAI;AACF,QAAI,IAAID,CAAG;AAAA,EACb,QAAQ;AACN;AAAA,EACF;AACA,QAAMZ,IAAS,IAAI,IAAIY,CAAG,GACpBE,IAAS,GAAGd,EAAO,QAAQ,KAAKA,EAAO,IAAI,IAC3Ce,IAAa5B,EAAgB2B,CAAM,GAEnCb,IAAWE,EAA0BH,EAAO,UAAUA,EAAO,MAAM;AAOzE,SAAO,KAAKa,CAAM,GAAGE,CAAU,GAAGd,CAAQ;AAC5C;"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
// import debug from 'debug';
|
|
2
|
+
export {};
|
|
3
|
+
// const log = debug('efp:loader:fetchWithRetry');
|
|
4
|
+
// function sleep(ms: number, signal?: AbortSignal | null) {
|
|
5
|
+
// if (signal?.aborted) return Promise.reject(new Error('Aborted'));
|
|
6
|
+
// return new Promise<void>((resolve, reject) => {
|
|
7
|
+
// const t = setTimeout(resolve, ms);
|
|
8
|
+
// signal?.addEventListener(
|
|
9
|
+
// 'abort',
|
|
10
|
+
// () => {
|
|
11
|
+
// clearTimeout(t);
|
|
12
|
+
// reject(new Error('Aborted'));
|
|
13
|
+
// },
|
|
14
|
+
// { once: true }
|
|
15
|
+
// );
|
|
16
|
+
// });
|
|
17
|
+
// }
|
|
18
|
+
// function isAbortError(err: any) {
|
|
19
|
+
// return err?.name === 'AbortError' || err?.message === 'Aborted';
|
|
20
|
+
// }
|
|
21
|
+
// function shouldRetryFetchError(err: any) {
|
|
22
|
+
// if (!err || isAbortError(err)) return false;
|
|
23
|
+
// // undici / node fetch often puts the real error in `cause`
|
|
24
|
+
// const e = err.cause ?? err;
|
|
25
|
+
// const code = e.code as string | undefined;
|
|
26
|
+
// // Common transient Node/undici network codes
|
|
27
|
+
// const transientCodes = new Set([
|
|
28
|
+
// 'ECONNRESET',
|
|
29
|
+
// 'ECONNREFUSED',
|
|
30
|
+
// 'EPIPE',
|
|
31
|
+
// 'ETIMEDOUT',
|
|
32
|
+
// 'ENETUNREACH',
|
|
33
|
+
// 'EAI_AGAIN',
|
|
34
|
+
// 'UND_ERR_CONNECT_TIMEOUT',
|
|
35
|
+
// 'UND_ERR_SOCKET',
|
|
36
|
+
// 'UND_ERR_HEADERS_TIMEOUT',
|
|
37
|
+
// 'UND_ERR_BODY_TIMEOUT',
|
|
38
|
+
// ]);
|
|
39
|
+
// // Your exact case often has no stable code; message match helps.
|
|
40
|
+
// const msg = String(e?.message ?? err?.message ?? '');
|
|
41
|
+
// const looksTransientByMessage =
|
|
42
|
+
// msg.includes('Client network socket disconnected') ||
|
|
43
|
+
// msg.includes('socket hang up') ||
|
|
44
|
+
// msg.includes('TLS') ||
|
|
45
|
+
// msg.includes('handshake');
|
|
46
|
+
// return (code && transientCodes.has(code)) || looksTransientByMessage;
|
|
47
|
+
// }
|
|
48
|
+
// function shouldRetryStatus(status: number) {
|
|
49
|
+
// return status === 408 || status === 429 || (status >= 500 && status <= 599);
|
|
50
|
+
// }
|
|
51
|
+
// export async function fetchWithRetry(
|
|
52
|
+
// url: string,
|
|
53
|
+
// init: RequestInit & { signal?: AbortSignal | null },
|
|
54
|
+
// { retries = 4, baseDelayMs = 200, maxDelayMs = 5_000, timeoutMsPerAttempt = 20_000 } = {}
|
|
55
|
+
// ): Promise<Response> {
|
|
56
|
+
// let attempt = 0;
|
|
57
|
+
// let lastErr: any;
|
|
58
|
+
// while (attempt <= retries) {
|
|
59
|
+
// if (init.signal?.aborted) throw new Error('Script load aborted');
|
|
60
|
+
// // Per-attempt timeout + outer signal
|
|
61
|
+
// const ac = new AbortController();
|
|
62
|
+
// const timeout = setTimeout(() => ac.abort(), timeoutMsPerAttempt);
|
|
63
|
+
// // If caller aborts, abort this attempt too
|
|
64
|
+
// const onAbort = () => ac.abort();
|
|
65
|
+
// init.signal?.addEventListener('abort', onAbort, { once: true });
|
|
66
|
+
// try {
|
|
67
|
+
// const res = await fetch(url, { ...init, signal: ac.signal });
|
|
68
|
+
// if (!res.ok && shouldRetryStatus(res.status) && attempt < retries) {
|
|
69
|
+
// // consume/close body to avoid resource leaks before retrying
|
|
70
|
+
// try {
|
|
71
|
+
// await res.arrayBuffer();
|
|
72
|
+
// } catch {}
|
|
73
|
+
// const jitter = 0.6 + Math.random() * 0.8; // 0.6..1.4
|
|
74
|
+
// const delay = Math.min(maxDelayMs, baseDelayMs * 2 ** attempt) * jitter;
|
|
75
|
+
// await sleep(delay, init.signal);
|
|
76
|
+
// attempt++;
|
|
77
|
+
// log('Retrying fetch due to status', { url, attempt, status: res.status });
|
|
78
|
+
// continue;
|
|
79
|
+
// }
|
|
80
|
+
// return res;
|
|
81
|
+
// } catch (err: any) {
|
|
82
|
+
// if (isAbortError(err) || init.signal?.aborted) {
|
|
83
|
+
// throw new Error('Script load aborted');
|
|
84
|
+
// }
|
|
85
|
+
// lastErr = err;
|
|
86
|
+
// if (attempt >= retries || !shouldRetryFetchError(err)) {
|
|
87
|
+
// throw err;
|
|
88
|
+
// }
|
|
89
|
+
// const jitter = 0.6 + Math.random() * 0.8;
|
|
90
|
+
// const delay = Math.min(maxDelayMs, baseDelayMs * 2 ** attempt) * jitter;
|
|
91
|
+
// await sleep(delay, init.signal);
|
|
92
|
+
// attempt++;
|
|
93
|
+
// log('Retrying fetch', { url, attempt, err });
|
|
94
|
+
// continue;
|
|
95
|
+
// } finally {
|
|
96
|
+
// clearTimeout(timeout);
|
|
97
|
+
// init.signal?.removeEventListener?.('abort', onAbort as any);
|
|
98
|
+
// }
|
|
99
|
+
// }
|
|
100
|
+
// throw lastErr ?? new Error('fetch failed');
|
|
101
|
+
// }
|
package/dist/esm/index.d.ts
CHANGED
|
@@ -3,5 +3,5 @@ export declare const load: (manifest: unknown, ...args: IArguments[]) => Promise
|
|
|
3
3
|
export declare const initialize: (manifest: unknown, ...args: IArguments[]) => Promise<any>;
|
|
4
4
|
export declare function callFunction_Experimental(name: string, manifest: unknown, ...args: IArguments[]): Promise<any>;
|
|
5
5
|
export { mutateManifest } from './mutateManifest';
|
|
6
|
-
export { downloadOfflineZip, makeOfflineBundle, saveOfflineZip } from './offline';
|
|
6
|
+
export { downloadOfflineZip, makeOfflineBundle, saveOfflineZip, makeOffline } from './offline';
|
|
7
7
|
export { resolve } from './resolve';
|
package/dist/esm/index.js
CHANGED
|
@@ -18,7 +18,7 @@ export async function callFunction_Experimental(name, manifest, ...args) {
|
|
|
18
18
|
return await fn(manifest, ...args);
|
|
19
19
|
}
|
|
20
20
|
export { mutateManifest } from './mutateManifest';
|
|
21
|
-
export { downloadOfflineZip, makeOfflineBundle, saveOfflineZip } from './offline';
|
|
21
|
+
export { downloadOfflineZip, makeOfflineBundle, saveOfflineZip, makeOffline } from './offline';
|
|
22
22
|
export { resolve } from './resolve';
|
|
23
23
|
// preloadJson('https://efp-runtime.expofp.com/branches/main.json');
|
|
24
24
|
// preconnectUrl('https://efp-runtime.expofp.com/');
|
|
@@ -8,8 +8,9 @@ export interface LoadAndWaitOptions {
|
|
|
8
8
|
signal: AbortSignal | null;
|
|
9
9
|
}
|
|
10
10
|
/**
|
|
11
|
+
* @deprecated Use loadScript directly instead.
|
|
11
12
|
* Load a script from `scriptUrl` and optionally wait until `globalVar` appears.
|
|
12
13
|
* Works in both browser (via <script>) and Node 18+ (via fetch + eval).
|
|
13
14
|
*/
|
|
14
15
|
export declare function loadAndWaitGlobal(scriptUrl: string, options: LoadAndWaitOptions): Promise<void>;
|
|
15
|
-
export declare function
|
|
16
|
+
export declare function loadScript(scriptUrl: string, signal: AbortSignal | null): Promise<void>;
|
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
import debug from 'debug';
|
|
2
|
+
import fetchRetry from 'fetch-retry';
|
|
3
|
+
const fetchWithRetry = fetchRetry(fetch);
|
|
4
|
+
const log = debug('efp:loader:loadScript');
|
|
2
5
|
/**
|
|
6
|
+
* @deprecated Use loadScript directly instead.
|
|
3
7
|
* Load a script from `scriptUrl` and optionally wait until `globalVar` appears.
|
|
4
8
|
* Works in both browser (via <script>) and Node 18+ (via fetch + eval).
|
|
5
9
|
*/
|
|
@@ -27,8 +31,17 @@ export async function loadAndWaitGlobal(scriptUrl, options) {
|
|
|
27
31
|
function isBrowser() {
|
|
28
32
|
return typeof window !== 'undefined' && typeof document !== 'undefined';
|
|
29
33
|
}
|
|
34
|
+
export async function loadScript(scriptUrl, signal) {
|
|
35
|
+
log('Loading', scriptUrl);
|
|
36
|
+
if (isBrowser()) {
|
|
37
|
+
await loadInBrowser(scriptUrl, signal);
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
await loadInNode(scriptUrl, signal);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
30
43
|
/* -------------------- Browser implementation -------------------- */
|
|
31
|
-
|
|
44
|
+
function loadInBrowser(scriptUrl, signal) {
|
|
32
45
|
return new Promise((resolve, reject) => {
|
|
33
46
|
const script = document.createElement('script');
|
|
34
47
|
script.src = scriptUrl;
|
|
@@ -63,15 +76,12 @@ export function loadInBrowser(scriptUrl, signal) {
|
|
|
63
76
|
}
|
|
64
77
|
/* -------------------- Node implementation -------------------- */
|
|
65
78
|
async function loadInNode(scriptUrl, signal) {
|
|
66
|
-
if (typeof fetch !== 'function') {
|
|
67
|
-
throw new Error('loadInNode: fetch is not available. Use Node 18+ or provide a global fetch.');
|
|
68
|
-
}
|
|
69
79
|
if (signal?.aborted) {
|
|
70
80
|
throw new Error('Script load aborted');
|
|
71
81
|
}
|
|
72
82
|
let res;
|
|
73
83
|
try {
|
|
74
|
-
res = await
|
|
84
|
+
res = await fetchWithRetry(scriptUrl, { signal });
|
|
75
85
|
}
|
|
76
86
|
catch (err) {
|
|
77
87
|
// Node / WHATWG fetch uses AbortError on abort
|
|
@@ -2,16 +2,36 @@ import debug from 'debug';
|
|
|
2
2
|
import JSZip from 'jszip';
|
|
3
3
|
import { makeOfflineBundle } from './makeOfflineBundle';
|
|
4
4
|
const log = debug('efp:loader:generateZip');
|
|
5
|
+
const MAX_CONCURRENT_DOWNLOADS = 10;
|
|
6
|
+
async function processConcurrently(iterator, concurrency, processor) {
|
|
7
|
+
const activePromises = new Set();
|
|
8
|
+
for await (const item of iterator) {
|
|
9
|
+
const promise = Promise.resolve(processor(item)).then(() => {
|
|
10
|
+
activePromises.delete(promise);
|
|
11
|
+
});
|
|
12
|
+
activePromises.add(promise);
|
|
13
|
+
if (activePromises.size >= concurrency) {
|
|
14
|
+
await Promise.race(activePromises);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
await Promise.all(activePromises);
|
|
18
|
+
}
|
|
5
19
|
export async function generateZip(manifest, extraFiles) {
|
|
6
20
|
const zip = new JSZip();
|
|
7
21
|
const bundle = await makeOfflineBundle(manifest);
|
|
8
|
-
|
|
22
|
+
let hasFiles = false;
|
|
23
|
+
await processConcurrently(bundle, MAX_CONCURRENT_DOWNLOADS, (file) => {
|
|
9
24
|
log('Adding file to zip:', file.path, 'size:', file.data.byteLength);
|
|
10
25
|
zip.file(file.path, file.data);
|
|
11
|
-
|
|
26
|
+
hasFiles = true;
|
|
27
|
+
});
|
|
12
28
|
for (const extraFile of extraFiles ?? []) {
|
|
13
29
|
log('Adding extra file to zip:', extraFile.path, 'size:', extraFile.data.byteLength);
|
|
14
30
|
zip.file(extraFile.path, extraFile.data);
|
|
31
|
+
hasFiles = true;
|
|
32
|
+
}
|
|
33
|
+
if (!hasFiles) {
|
|
34
|
+
throw new Error('No files were added to the ZIP archive');
|
|
15
35
|
}
|
|
16
36
|
// Generate archive
|
|
17
37
|
const blob = await zip.generateAsync({ type: 'blob' });
|