@expofp/loader 1.0.54 → 1.0.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{index.js → bundle/bundle.js} +34 -32
- package/dist/bundle/bundle.js.map +1 -0
- package/dist/{downloadZip-D1Leqczj.js → bundle/downloadZip-D2QfL_V_.js} +2 -2
- package/dist/bundle/downloadZip-D2QfL_V_.js.map +1 -0
- package/dist/{makeOffline-DdJq2uGI.js → bundle/makeOffline-CBs7cwrM.js} +2 -2
- package/dist/bundle/makeOffline-CBs7cwrM.js.map +1 -0
- package/dist/{makeOfflineBundle-CdE5LdlF.js → bundle/makeOfflineBundle-BpFIeDmK.js} +3 -3
- package/dist/bundle/makeOfflineBundle-BpFIeDmK.js.map +1 -0
- package/dist/{importJson.d.ts → esm/importJson.d.ts} +1 -1
- package/dist/esm/importJson.js +52 -0
- package/dist/esm/index.js +16 -0
- package/dist/esm/loadAndWaitGlobal.js +157 -0
- package/dist/{mutateManifest.d.ts → esm/mutateManifest.d.ts} +1 -1
- package/dist/esm/mutateManifest.js +10 -0
- package/dist/esm/offline/downloadZip copy.js +89 -0
- package/dist/esm/offline/downloadZip.js +18 -0
- package/dist/esm/offline/hashString.js +16 -0
- package/dist/esm/offline/index.js +29 -0
- package/dist/{offline → esm/offline}/makeOffline.d.ts +1 -1
- package/dist/esm/offline/makeOffline.js +134 -0
- package/dist/esm/offline/makeOfflineBundle copy.js +92 -0
- package/dist/esm/offline/makeOfflineBundle.js +64 -0
- package/dist/esm/offline/slugify.js +61 -0
- package/dist/esm/offline/tools.js +69 -0
- package/dist/{resolve.d.ts → esm/resolve.d.ts} +1 -1
- package/dist/esm/resolve.js +331 -0
- package/dist/esm/resolvers/_OLD_expoResolver.js +49 -0
- package/dist/esm/resolvers/assetResolver.js +26 -0
- package/dist/{resolvers → esm/resolvers}/bundleAssetsResolver.d.ts +1 -1
- package/dist/esm/resolvers/bundleAssetsResolver.js +20 -0
- package/dist/{resolvers → esm/resolvers}/expoRuntimeBranchResolver.d.ts +1 -1
- package/dist/esm/resolvers/expoRuntimeBranchResolver.js +20 -0
- package/dist/{resolvers → esm/resolvers}/expoRuntimeGetBranchResolver.d.ts +1 -1
- package/dist/esm/resolvers/expoRuntimeGetBranchResolver.js +14 -0
- package/dist/{resolvers → esm/resolvers}/expoRuntimeResolver.d.ts +1 -1
- package/dist/esm/resolvers/expoRuntimeResolver.js +38 -0
- package/dist/{resolvers → esm/resolvers}/httpResolver.d.ts +1 -1
- package/dist/esm/resolvers/httpResolver.js +14 -0
- package/dist/{resolvers → esm/resolvers}/index.d.ts +1 -1
- package/dist/esm/resolvers/index.js +18 -0
- package/dist/{resolvers → esm/resolvers}/legacyAssetUrlsResolver.d.ts +1 -1
- package/dist/esm/resolvers/legacyAssetUrlsResolver.js +103 -0
- package/dist/{resolvers → esm/resolvers}/legacyDataResolver.d.ts +1 -1
- package/dist/esm/resolvers/legacyDataResolver.js +17 -0
- package/dist/esm/returnCachedRef.js +12 -0
- package/dist/esm/shared.js +270 -0
- package/dist/{types.d.ts → esm/types.d.ts} +1 -1
- package/dist/esm/types.js +1 -0
- package/package.json +14 -11
- package/dist/downloadZip-D1Leqczj.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/makeOffline-DdJq2uGI.js.map +0 -1
- package/dist/makeOfflineBundle-CdE5LdlF.js.map +0 -1
- /package/dist/{index.d.ts → esm/index.d.ts} +0 -0
- /package/dist/{loadAndWaitGlobal.d.ts → esm/loadAndWaitGlobal.d.ts} +0 -0
- /package/dist/{offline → esm/offline}/downloadZip copy.d.ts +0 -0
- /package/dist/{offline → esm/offline}/downloadZip.d.ts +0 -0
- /package/dist/{offline → esm/offline}/hashString.d.ts +0 -0
- /package/dist/{offline → esm/offline}/index.d.ts +0 -0
- /package/dist/{offline → esm/offline}/makeOfflineBundle copy.d.ts +0 -0
- /package/dist/{offline → esm/offline}/makeOfflineBundle.d.ts +0 -0
- /package/dist/{offline → esm/offline}/slugify.d.ts +0 -0
- /package/dist/{offline → esm/offline}/tools.d.ts +0 -0
- /package/dist/{resolvers → esm/resolvers}/_OLD_expoResolver.d.ts +0 -0
- /package/dist/{resolvers → esm/resolvers}/assetResolver.d.ts +0 -0
- /package/dist/{returnCachedRef.d.ts → esm/returnCachedRef.d.ts} +0 -0
- /package/dist/{shared.d.ts → esm/shared.d.ts} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"makeOffline-CBs7cwrM.js","sources":["../../src/offline/hashString.ts","../../src/offline/slugify.ts","../../src/offline/tools.ts","../../src/offline/makeOffline.ts"],"sourcesContent":["// 32-bit murmur-ish hash with configurable seed\nfunction murmur32WithSeed(str: string, seed: number): string {\n let h = seed >>> 0;\n\n for (let i = 0; i < str.length; i++) {\n h ^= str.charCodeAt(i);\n h = Math.imul(h, 0x01000193); // FNV-ish prime, good mixer\n }\n\n return (h >>> 0).toString(16).padStart(8, '0'); // 8 hex chars\n}\n\n// 128-bit (MD5-width) hash: 4 × 32-bit parts concatenated\nexport function hashString(str: string): string {\n return (\n murmur32WithSeed(str, 0x811c9dc5) + // your original seed\n murmur32WithSeed(str, 0x21f0aaad) +\n murmur32WithSeed(str, 0x1b873593) +\n murmur32WithSeed(str, 0x85ebca6b)\n );\n}\n","// Module-level caches\nconst inputToSlug = new Map<string, string>();\nconst slugToInput = new Map<string, string>();\n\nconst WINDOWS_RESERVED = new Set([\n \"con\", \"prn\", \"aux\", \"nul\",\n \"com1\", \"com2\", \"com3\", \"com4\", \"com5\", \"com6\", \"com7\", \"com8\", \"com9\",\n \"lpt1\", \"lpt2\", \"lpt3\", \"lpt4\", \"lpt5\", \"lpt6\", \"lpt7\", \"lpt8\", \"lpt9\",\n]);\n\nfunction makeBaseSlug(input: string): string {\n let slug = input\n .normalize(\"NFKD\")\n // All non letters/numbers → \"-\"\n .replace(/[^\\p{Letter}\\p{Number}]+/gu, \"-\")\n // Collapse multiple \"-\"\n .replace(/-+/g, \"-\")\n // Trim \"-\" from start/end\n .replace(/^-|-$/g, \"\")\n .toLowerCase();\n\n // Strip forbidden Windows characters just in case\n slug = slug.replace(/[<>:\"/\\\\|?*]/g, \"\");\n\n // Windows forbids trailing space/period\n slug = slug.replace(/[. ]+$/g, \"\");\n\n // Fallback if everything was stripped\n if (!slug) slug = \"file\";\n\n // Avoid bare reserved device names\n if (WINDOWS_RESERVED.has(slug)) {\n slug += \"-file\";\n }\n\n return slug;\n}\n\nexport function slugifyFsUnique(input: string): string {\n // If we've seen this exact input before, return the same slug\n const existing = inputToSlug.get(input);\n if (existing) return existing;\n\n const base = makeBaseSlug(input);\n let candidate = base;\n let counter = 2;\n\n while (true) {\n const existingInput = slugToInput.get(candidate);\n\n if (!existingInput) {\n // Free slug → claim it for this input\n slugToInput.set(candidate, input);\n inputToSlug.set(input, candidate);\n return candidate;\n }\n\n if (existingInput === input) {\n // Same input somehow (super defensive)\n inputToSlug.set(input, candidate);\n return candidate;\n }\n\n // Collision: same slug already used by different input → add suffix\n candidate = `${base}-${counter++}`;\n }\n}\n\n// // Optional: to reset between runs/tests\n// export function resetSlugCache() {\n// inputToSlug.clear();\n// slugToInput.clear();\n// }","import { hashString } from './hashString';\nimport { slugifyFsUnique } from './slugify';\n\nexport function makeUniqueJsonTargetPathFromString(str: string, namespace: string = ''): string {\n // const hash = hashString(str);\n let result = slugifyFsUnique(str); // + '-' + hash;\n if (namespace) {\n result = `${slugifyFsUnique(namespace)}$${result}`;\n }\n\n if (result.endsWith('/')) {\n result += 'index.json';\n } else if (!result.endsWith('.json')) {\n result += '.json';\n }\n return './' + result;\n // handle directory case\n}\n\nexport function makeTargetPathFromUrl(url: string, prefix: string = ''): string {\n // https://example.com/dir1/dir2/a.js => \"{prefix}{origin-slug}/dir1/dir2/a.js\";\n // https://example.com/dir1/dir2/a.js?params => \"{prefix}{origin-slug}/dir1/dir2/a{paramsmd5hash}.js\";\n // use slugify.ts\n\n try {\n new URL(url);\n } catch {\n debugger;\n }\n const urlObj = new URL(url);\n const origin = `${urlObj.protocol}//${urlObj.host}`;\n const originSlug = slugifyFsUnique(origin);\n\n let pathname = urlObj.pathname;\n let search = urlObj.search;\n\n // if path doesn't end with extension, throw\n if (!pathname.match(/\\.[^\\/]+$/)) {\n throw new Error(`Cannot make target path from URL without file extension: ${url}`);\n }\n\n const extension = pathname.substring(pathname.lastIndexOf('.'));\n let pathnameWithoutExtension = pathname.substring(0, pathname.lastIndexOf('.'));\n\n // check the pathname contains only valid fs characters\n const invalidPathnameChars = pathnameWithoutExtension.match(/[^a-zA-Z0-9\\-._\\/]/g);\n if (invalidPathnameChars) {\n const fixedPathnameWithoutExtension = slugifyFsUnique(pathnameWithoutExtension);\n console.warn(\n `Warning: pathname contains invalid filesystem characters (${[\n ...new Set(invalidPathnameChars),\n ].join(\n ', '\n )}), slugifying it: ${pathnameWithoutExtension}${extension} => ${fixedPathnameWithoutExtension}${extension}`\n );\n pathnameWithoutExtension = fixedPathnameWithoutExtension;\n }\n\n pathname = pathnameWithoutExtension + extension;\n\n if (pathname.length > 120) {\n console.warn(\n `Warning: pathname is too long (${pathname.length} characters), truncating to 150 characters: ${pathname}`\n );\n pathname = pathname.substring(0, 120 - extension.length) + extension;\n }\n\n if (search) {\n // create a hash from search params\n const hash = hashString(search);\n const dotIndex = pathname.lastIndexOf('.');\n if (dotIndex !== -1) {\n pathname = `${pathname.slice(0, dotIndex)}.${hash}${pathname.slice(dotIndex)}`;\n } else {\n pathname = `${pathname}${hash}`;\n }\n }\n\n // // handle directory case\n // if (pathname.endsWith('/')) {\n // pathname += '__index.json';\n // }\n\n return `./${prefix}${originSlug}${pathname}`;\n}\n","import {\n canResolve,\n parseRefValue,\n resolverResolve,\n type ResolveContextInternal,\n} from '../resolve';\nimport { resolvers } from '../resolvers';\nimport { createMergedObjectWithOverridenNonRefProps, deepClone, log } from '../shared';\nimport type { LocalFile, MakeOfflineResult, ManifestData, Resolver } from '../types';\nimport { makeTargetPathFromUrl, makeUniqueJsonTargetPathFromString } from './tools';\n\nexport async function makeOffline(manifest: any): Promise<MakeOfflineResult> {\n log('makeOffline', manifest);\n // if (typeof manifest !== 'object' || manifest === null) {\n // throw new Error('Manifest must be an object');\n // }\n\n const res = await makeOfflineInternal(manifest);\n let done = false;\n let resultManifest: any;\n async function* files() {\n resultManifest = yield* res;\n done = true;\n }\n\n return {\n get manifest(): ManifestData {\n if (!done) throw new Error('Iterate over files before getting manifest');\n return resultManifest;\n },\n files: files() as AsyncGenerator<LocalFile, void, void>,\n };\n}\n\nasync function* makeOfflineInternal(manifest: any): AsyncGenerator<LocalFile, ManifestData, void> {\n log('makeOfflineInternal', manifest);\n const resolveContext: ResolveContextInternal = {\n refCache: new Map<string, Promise<any>>(),\n forceFetch: true,\n signal: null,\n };\n const parent = { manifest };\n yield* walk(parent, 'manifest');\n return parent.manifest;\n\n async function* walk(node: any, key: string | number): AsyncGenerator<LocalFile> {\n // console.log('walk', node, key, node[key]);\n // let node[key] = node[key];\n if (typeof node[key] !== 'object' || node[key] === null) {\n return;\n }\n // let resolversToUse1 = resolvers.filter((r) => r.canResolve(node[key].$ref));\n // const usedResolvers = new Set<Resolver>();\n // do {\n if ('$ref' in node[key]) {\n const resolversToUse = resolvers.filter((r) => canResolve(r, node[key].$ref));\n if (resolversToUse.length === 0) {\n throw new Error(`No resolver found for ref: ${node[key].$ref}`);\n }\n if (resolversToUse.length > 1) {\n throw new Error(`Multiple resolvers can make offline ref: ${node[key].$ref}`);\n }\n\n const resolver = resolversToUse[0];\n\n let func: typeof offlineLocalizeRef;\n switch (resolver.offlineMethod) {\n case 'localizeRef':\n func = offlineLocalizeRef;\n break;\n case 'resolveRef':\n func = offlineResolveRef;\n break;\n default:\n throw new Error(`Unknown offlineMethod: ${resolver.offlineMethod}`);\n }\n\n const mergeRef = yield* func(resolver, node[key].$ref, resolveContext);\n\n if (Object.isFrozen(node[key])) throw new Error('Unexpected frozen node during makeOffline');\n\n node[key] = createMergedObjectWithOverridenNonRefProps({ $ref: mergeRef }, node[key]);\n }\n\n // recurse\n if (Array.isArray(node[key])) {\n for (const [index] of node[key].entries()) {\n yield* walk(node[key], index);\n }\n } else if (typeof node[key] === 'object' && node[key] !== null) {\n for (const key1 of Object.keys(node[key])) {\n // debugger;\n yield* walk(node[key], key1);\n }\n }\n }\n}\n\n// function canResolve(resolver: Resolver, ref: string): boolean {\n// if (resolver.canResolve) {\n// return resolver.canResolve(ref);\n// }\n// if (resolver.schema) {\n// return canResolveRefSchema(ref, resolver.schema);\n// }\n// throw new Error('Resolver is missing canResolve method and schema property');\n// }\n\nasync function* offlineLocalizeRef(\n resolver: Resolver,\n ref: string,\n context: ResolveContextInternal\n): AsyncGenerator<LocalFile, string, void> {\n const refData = deepClone(await resolverResolve(resolver, ref, context));\n // emit assets\n yield* makeOfflineInternal(refData);\n const url = parseRefValue(ref);\n const targetFilePath = makeTargetPathFromUrl(url);\n // TODO: handle CSS and other text files that may reference other assets\n yield { url, targetFilePath };\n const schema = ref.substring(0, ref.length - url.length - 1);\n return `${schema}:${targetFilePath}`;\n}\n\nasync function* offlineResolveRef(\n resolver: Resolver,\n ref: string,\n context: ResolveContextInternal\n): AsyncGenerator<LocalFile, string, void> {\n const refData = deepClone(await resolverResolve(resolver, ref, context));\n const targetFilePath = makeUniqueJsonTargetPathFromString(ref);\n const data = yield* makeOfflineInternal(refData);\n yield { data, targetFilePath };\n return targetFilePath;\n}\n\n// export function createMakeOfflineResult(\n// manifest: any,\n// walk: (node: any, key: string | number) => AsyncGenerator<LocalFile>\n// ): MakeOfflineResult {\n// log('createMakeOfflineResult', manifest);\n// if (typeof manifest !== 'object' || manifest === null) {\n// throw new Error('Manifest must be an object');\n// }\n// const parent = { manifest };\n// let done = false;\n// async function* files() {\n// yield* walk(parent, 'manifest');\n// done = true;\n// }\n\n// return {\n// get manifest() {\n// if (!done) {\n// throw new Error('Cannot access manifest before all files are generated');\n// }\n// return parent.manifest;\n// },\n// files: files(),\n// };\n// }\n"],"names":["murmur32WithSeed","str","seed","h","i","hashString","inputToSlug","slugToInput","WINDOWS_RESERVED","makeBaseSlug","input","slug","slugifyFsUnique","existing","base","candidate","counter","existingInput","makeUniqueJsonTargetPathFromString","namespace","result","makeTargetPathFromUrl","url","prefix","urlObj","origin","originSlug","pathname","search","extension","pathnameWithoutExtension","invalidPathnameChars","fixedPathnameWithoutExtension","hash","dotIndex","makeOffline","manifest","log","res","makeOfflineInternal","done","resultManifest","files","resolveContext","parent","walk","node","key","resolversToUse","resolvers","r","canResolve","resolver","func","offlineLocalizeRef","offlineResolveRef","mergeRef","createMergedObjectWithOverridenNonRefProps","index","key1","ref","context","refData","deepClone","resolverResolve","parseRefValue","targetFilePath"],"mappings":";AACA,SAASA,EAAiBC,GAAaC,GAAsB;AAC3D,MAAIC,IAAID,MAAS;AAEjB,WAASE,IAAI,GAAGA,IAAIH,EAAI,QAAQG;AAC9B,IAAAD,KAAKF,EAAI,WAAWG,CAAC,GACrBD,IAAI,KAAK,KAAKA,GAAG,QAAU;AAG7B,UAAQA,MAAM,GAAG,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC/C;AAGO,SAASE,EAAWJ,GAAqB;AAC9C,SACED,EAAiBC,GAAK,UAAU;AAAA,EAChCD,EAAiBC,GAAK,SAAU,IAChCD,EAAiBC,GAAK,SAAU,IAChCD,EAAiBC,GAAK,UAAU;AAEpC;ACnBA,MAAMK,wBAAkB,IAAA,GAClBC,wBAAkB,IAAA,GAElBC,wBAAuB,IAAI;AAAA,EAC/B;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EACrB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAChE;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAClE,CAAC;AAED,SAASC,EAAaC,GAAuB;AAC3C,MAAIC,IAAOD,EACR,UAAU,MAAM,EAEhB,QAAQ,8BAA8B,GAAG,EAEzC,QAAQ,OAAO,GAAG,EAElB,QAAQ,UAAU,EAAE,EACpB,YAAA;AAGH,SAAAC,IAAOA,EAAK,QAAQ,iBAAiB,EAAE,GAGvCA,IAAOA,EAAK,QAAQ,WAAW,EAAE,GAG5BA,MAAMA,IAAO,SAGdH,EAAiB,IAAIG,CAAI,MAC3BA,KAAQ,UAGHA;AACT;AAEO,SAASC,EAAgBF,GAAuB;AAErD,QAAMG,IAAWP,EAAY,IAAII,CAAK;AACtC,MAAIG,EAAU,QAAOA;AAErB,QAAMC,IAAOL,EAAaC,CAAK;AAC/B,MAAIK,IAAYD,GACZE,IAAU;AAEd,aAAa;AACX,UAAMC,IAAgBV,EAAY,IAAIQ,CAAS;AAE/C,QAAI,CAACE;AAEH,aAAAV,EAAY,IAAIQ,GAAWL,CAAK,GAChCJ,EAAY,IAAII,GAAOK,CAAS,GACzBA;AAGT,QAAIE,MAAkBP;AAEpB,aAAAJ,EAAY,IAAII,GAAOK,CAAS,GACzBA;AAIT,IAAAA,IAAY,GAAGD,CAAI,IAAIE,GAAS;AAAA,EAClC;AACF;AC/DO,SAASE,EAAmCjB,GAAakB,IAAoB,IAAY;AAE9F,MAAIC,IAASR,EAAgBX,CAAG;AAChC,SAAIkB,MACFC,IAAS,GAAGR,EAAgBO,CAAS,CAAC,IAAIC,CAAM,KAG9CA,EAAO,SAAS,GAAG,IACrBA,KAAU,eACAA,EAAO,SAAS,OAAO,MACjCA,KAAU,UAEL,OAAOA;AAEhB;AAEO,SAASC,EAAsBC,GAAaC,IAAiB,IAAY;AAK9E,MAAI;AACF,QAAI,IAAID,CAAG;AAAA,EACb,QAAQ;AACN;AAAA,EACF;AACA,QAAME,IAAS,IAAI,IAAIF,CAAG,GACpBG,IAAS,GAAGD,EAAO,QAAQ,KAAKA,EAAO,IAAI,IAC3CE,IAAad,EAAgBa,CAAM;AAEzC,MAAIE,IAAWH,EAAO,UAClBI,IAASJ,EAAO;AAGpB,MAAI,CAACG,EAAS,MAAM,WAAW;AAC7B,UAAM,IAAI,MAAM,4DAA4DL,CAAG,EAAE;AAGnF,QAAMO,IAAYF,EAAS,UAAUA,EAAS,YAAY,GAAG,CAAC;AAC9D,MAAIG,IAA2BH,EAAS,UAAU,GAAGA,EAAS,YAAY,GAAG,CAAC;AAG9E,QAAMI,IAAuBD,EAAyB,MAAM,qBAAqB;AACjF,MAAIC,GAAsB;AACxB,UAAMC,IAAgCpB,EAAgBkB,CAAwB;AAC9E,YAAQ;AAAA,MACN,6DAA6D;AAAA,QAC3D,GAAG,IAAI,IAAIC,CAAoB;AAAA,MAAA,EAC/B;AAAA,QACA;AAAA,MAAA,CACD,qBAAqBD,CAAwB,GAAGD,CAAS,OAAOG,CAA6B,GAAGH,CAAS;AAAA,IAAA,GAE5GC,IAA2BE;AAAA,EAC7B;AAWA,MATAL,IAAWG,IAA2BD,GAElCF,EAAS,SAAS,QACpB,QAAQ;AAAA,IACN,kCAAkCA,EAAS,MAAM,+CAA+CA,CAAQ;AAAA,EAAA,GAE1GA,IAAWA,EAAS,UAAU,GAAG,MAAME,EAAU,MAAM,IAAIA,IAGzDD,GAAQ;AAEV,UAAMK,IAAO5B,EAAWuB,CAAM,GACxBM,IAAWP,EAAS,YAAY,GAAG;AACzC,IAAIO,MAAa,KACfP,IAAW,GAAGA,EAAS,MAAM,GAAGO,CAAQ,CAAC,IAAID,CAAI,GAAGN,EAAS,MAAMO,CAAQ,CAAC,KAE5EP,IAAW,GAAGA,CAAQ,GAAGM,CAAI;AAAA,EAEjC;AAOA,SAAO,KAAKV,CAAM,GAAGG,CAAU,GAAGC,CAAQ;AAC5C;ACzEA,eAAsBQ,EAAYC,GAA2C;AAC3E,EAAAC,EAAI,eAAeD,CAAQ;AAK3B,QAAME,IAAM,MAAMC,EAAoBH,CAAQ;AAC9C,MAAII,IAAO,IACPC;AACJ,kBAAgBC,IAAQ;AACtB,IAAAD,IAAiB,OAAOH,GACxBE,IAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,IAAI,WAAyB;AAC3B,UAAI,CAACA,EAAM,OAAM,IAAI,MAAM,4CAA4C;AACvE,aAAOC;AAAA,IACT;AAAA,IACA,OAAOC,EAAA;AAAA,EAAM;AAEjB;AAEA,gBAAgBH,EAAoBH,GAA8D;AAChG,EAAAC,EAAI,uBAAuBD,CAAQ;AACnC,QAAMO,IAAyC;AAAA,IAC7C,8BAAc,IAAA;AAAA,IACd,YAAY;AAAA,IACZ,QAAQ;AAAA,EAAA,GAEJC,IAAS,EAAE,UAAAR,EAAA;AACjB,gBAAOS,EAAKD,GAAQ,UAAU,GACvBA,EAAO;AAEd,kBAAgBC,EAAKC,GAAWC,GAAiD;AAG/E,QAAI,SAAOD,EAAKC,CAAG,KAAM,YAAYD,EAAKC,CAAG,MAAM,OAMnD;AAAA,UAAI,UAAUD,EAAKC,CAAG,GAAG;AACvB,cAAMC,IAAiBC,EAAU,OAAO,CAACC,MAAMC,EAAWD,GAAGJ,EAAKC,CAAG,EAAE,IAAI,CAAC;AAC5E,YAAIC,EAAe,WAAW;AAC5B,gBAAM,IAAI,MAAM,8BAA8BF,EAAKC,CAAG,EAAE,IAAI,EAAE;AAEhE,YAAIC,EAAe,SAAS;AAC1B,gBAAM,IAAI,MAAM,4CAA4CF,EAAKC,CAAG,EAAE,IAAI,EAAE;AAG9E,cAAMK,IAAWJ,EAAe,CAAC;AAEjC,YAAIK;AACJ,gBAAQD,EAAS,eAAA;AAAA,UACf,KAAK;AACH,YAAAC,IAAOC;AACP;AAAA,UACF,KAAK;AACH,YAAAD,IAAOE;AACP;AAAA,UACF;AACE,kBAAM,IAAI,MAAM,0BAA0BH,EAAS,aAAa,EAAE;AAAA,QAAA;AAGtE,cAAMI,IAAW,OAAOH,EAAKD,GAAUN,EAAKC,CAAG,EAAE,MAAMJ,CAAc;AAErE,YAAI,OAAO,SAASG,EAAKC,CAAG,CAAC,EAAG,OAAM,IAAI,MAAM,2CAA2C;AAE3F,QAAAD,EAAKC,CAAG,IAAIU,EAA2C,EAAE,MAAMD,EAAA,GAAYV,EAAKC,CAAG,CAAC;AAAA,MACtF;AAGA,UAAI,MAAM,QAAQD,EAAKC,CAAG,CAAC;AACzB,mBAAW,CAACW,CAAK,KAAKZ,EAAKC,CAAG,EAAE;AAC9B,iBAAOF,EAAKC,EAAKC,CAAG,GAAGW,CAAK;AAAA,eAErB,OAAOZ,EAAKC,CAAG,KAAM,YAAYD,EAAKC,CAAG,MAAM;AACxD,mBAAWY,KAAQ,OAAO,KAAKb,EAAKC,CAAG,CAAC;AAEtC,iBAAOF,EAAKC,EAAKC,CAAG,GAAGY,CAAI;AAAA;AAAA,EAGjC;AACF;AAYA,gBAAgBL,EACdF,GACAQ,GACAC,GACyC;AACzC,QAAMC,IAAUC,EAAU,MAAMC,EAAgBZ,GAAUQ,GAAKC,CAAO,CAAC;AAEvE,SAAOtB,EAAoBuB,CAAO;AAClC,QAAMxC,IAAM2C,EAAcL,CAAG,GACvBM,IAAiB7C,EAAsBC,CAAG;AAEhD,eAAM,EAAE,KAAAA,GAAK,gBAAA4C,EAAA,GAEN,GADQN,EAAI,UAAU,GAAGA,EAAI,SAAStC,EAAI,SAAS,CAAC,CAC3C,IAAI4C,CAAc;AACpC;AAEA,gBAAgBX,EACdH,GACAQ,GACAC,GACyC;AACzC,QAAMC,IAAUC,EAAU,MAAMC,EAAgBZ,GAAUQ,GAAKC,CAAO,CAAC,GACjEK,IAAiBhD,EAAmC0C,CAAG;AAE7D,eAAM,EAAE,MADK,OAAOrB,EAAoBuB,CAAO,GACjC,gBAAAI,EAAA,GACPA;AACT;"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { l as r, resolve as l } from "./
|
|
2
|
-
import { makeOffline as f } from "./makeOffline-
|
|
1
|
+
import { l as r, resolve as l } from "./bundle.js";
|
|
2
|
+
import { makeOffline as f } from "./makeOffline-CBs7cwrM.js";
|
|
3
3
|
async function* u(t) {
|
|
4
4
|
r("makeOfflineBundle", t);
|
|
5
5
|
const e = await f(t);
|
|
@@ -55,4 +55,4 @@ async function d(t) {
|
|
|
55
55
|
export {
|
|
56
56
|
u as makeOfflineBundle
|
|
57
57
|
};
|
|
58
|
-
//# sourceMappingURL=makeOfflineBundle-
|
|
58
|
+
//# sourceMappingURL=makeOfflineBundle-BpFIeDmK.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"makeOfflineBundle-BpFIeDmK.js","sources":["../../src/offline/makeOfflineBundle.ts"],"sourcesContent":["import { log } from '../shared';\nimport { makeOffline } from './makeOffline';\nimport { resolve } from '..';\n\nexport async function* makeOfflineBundle(\n manifest: unknown\n): AsyncGenerator<{ path: string; data: ArrayBuffer }> {\n log('makeOfflineBundle', manifest);\n const offlineData = await makeOffline(manifest);\n for await (const file of offlineData.files) {\n if ('data' in file) {\n const jsonString = JSON.stringify(file.data, null, 2);\n yield {\n path: file.targetFilePath,\n data: new TextEncoder().encode(jsonString).buffer,\n };\n } else if ('url' in file) {\n const blob = await downloadFile(file.url);\n yield {\n path: file.targetFilePath,\n data: await blob.arrayBuffer(),\n };\n }\n }\n // yield* generateJsLoaderFiles();\n const html = await getIndexHtml(offlineData.manifest);\n yield {\n path: 'index.html',\n data: new TextEncoder().encode(html).buffer,\n };\n}\n\nconst MAX_CONCURRENT_DOWNLOADS = 10;\nconst queue = new Set<Promise<Blob>>();\n// have a queue to limit concurrent downloads\nasync function downloadFile(url: string): Promise<Blob> {\n while (queue.size >= MAX_CONCURRENT_DOWNLOADS) {\n await Promise.race(queue);\n }\n const downloadPromise = (async () => {\n log('Fetching file for zip:', url);\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);\n }\n return await response.blob();\n })();\n queue.add(downloadPromise);\n try {\n return await downloadPromise;\n } finally {\n queue.delete(downloadPromise);\n }\n}\n\nasync function getIndexHtml(manifest: unknown) {\n const entryPoint = await resolve(manifest, '/runtime/entry');\n const html = `\n<!DOCTYPE html>\n<script type=\"module\">\n import { load } from ${JSON.stringify(entryPoint)};\n await load(${JSON.stringify(manifest)});\n console.info('🚀 loaded');\n</script> \n`;\n return html;\n}\n"],"names":["makeOfflineBundle","manifest","log","offlineData","makeOffline","file","jsonString","blob","downloadFile","html","getIndexHtml","MAX_CONCURRENT_DOWNLOADS","queue","url","downloadPromise","response","entryPoint","resolve"],"mappings":";;AAIA,gBAAuBA,EACrBC,GACqD;AACrD,EAAAC,EAAI,qBAAqBD,CAAQ;AACjC,QAAME,IAAc,MAAMC,EAAYH,CAAQ;AAC9C,mBAAiBI,KAAQF,EAAY;AACnC,QAAI,UAAUE,GAAM;AAClB,YAAMC,IAAa,KAAK,UAAUD,EAAK,MAAM,MAAM,CAAC;AACpD,YAAM;AAAA,QACJ,MAAMA,EAAK;AAAA,QACX,MAAM,IAAI,YAAA,EAAc,OAAOC,CAAU,EAAE;AAAA,MAAA;AAAA,IAE/C,WAAW,SAASD,GAAM;AACxB,YAAME,IAAO,MAAMC,EAAaH,EAAK,GAAG;AACxC,YAAM;AAAA,QACJ,MAAMA,EAAK;AAAA,QACX,MAAM,MAAME,EAAK,YAAA;AAAA,MAAY;AAAA,IAEjC;AAGF,QAAME,IAAO,MAAMC,EAAaP,EAAY,QAAQ;AACpD,QAAM;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,IAAI,YAAA,EAAc,OAAOM,CAAI,EAAE;AAAA,EAAA;AAEzC;AAEA,MAAME,IAA2B,IAC3BC,wBAAY,IAAA;AAElB,eAAeJ,EAAaK,GAA4B;AACtD,SAAOD,EAAM,QAAQD;AACnB,UAAM,QAAQ,KAAKC,CAAK;AAE1B,QAAME,KAAmB,YAAY;AACnC,IAAAZ,EAAI,0BAA0BW,CAAG;AACjC,UAAME,IAAW,MAAM,MAAMF,CAAG;AAChC,QAAI,CAACE,EAAS;AACZ,YAAM,IAAI,MAAM,mBAAmBF,CAAG,KAAKE,EAAS,MAAM,IAAIA,EAAS,UAAU,EAAE;AAErF,WAAO,MAAMA,EAAS,KAAA;AAAA,EACxB,GAAA;AACA,EAAAH,EAAM,IAAIE,CAAe;AACzB,MAAI;AACF,WAAO,MAAMA;AAAA,EACf,UAAA;AACE,IAAAF,EAAM,OAAOE,CAAe;AAAA,EAC9B;AACF;AAEA,eAAeJ,EAAaT,GAAmB;AAC7C,QAAMe,IAAa,MAAMC,EAAQhB,GAAU,gBAAgB;AAS3D,SARa;AAAA;AAAA;AAAA,yBAGU,KAAK,UAAUe,CAAU,CAAC;AAAA,eACpC,KAAK,UAAUf,CAAQ,CAAC;AAAA;AAAA;AAAA;AAKvC;"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { ResolveContextInternal } from './resolve';
|
|
1
|
+
import type { ResolveContextInternal } from './resolve';
|
|
2
2
|
export declare function importJson<T = any>(url: string, resolveContext: ResolveContextInternal): Promise<T>;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { deepFreeze, log } from './shared';
|
|
2
|
+
let importJsonNotAvailable;
|
|
3
|
+
const jsonFrozen = new WeakSet();
|
|
4
|
+
// ET: this is a workaround for Vite that analyzes dynamic imports and removes 'with' option
|
|
5
|
+
// to be removed when Vite supports it properly
|
|
6
|
+
// Cons: can have CSP issues in some environments
|
|
7
|
+
const importJsonNative = new Function('url', 'return import(url, { with: { type: "json" } });');
|
|
8
|
+
export async function importJson(url, resolveContext) {
|
|
9
|
+
// console.warn('importJson:', resolveContext.forceFetch);
|
|
10
|
+
if (importJsonNotAvailable === undefined && !resolveContext.forceFetch) {
|
|
11
|
+
try {
|
|
12
|
+
await importJsonNative(url);
|
|
13
|
+
importJsonNotAvailable = false;
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
log('importJson: dynamic import not available, falling back to fetch');
|
|
17
|
+
importJsonNotAvailable = true;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
let result = undefined;
|
|
21
|
+
if (!importJsonNotAvailable && !resolveContext.forceFetch) {
|
|
22
|
+
const module = await importJsonNative(url);
|
|
23
|
+
result = module.default;
|
|
24
|
+
resolveContext.importCallback?.(url, 'json');
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
log('importJson: fetch', url);
|
|
28
|
+
result = await loadJson(url, resolveContext.refCache, resolveContext.signal || null);
|
|
29
|
+
resolveContext.importCallback?.(url, 'json');
|
|
30
|
+
}
|
|
31
|
+
if (!jsonFrozen.has(result)) {
|
|
32
|
+
deepFreeze(result);
|
|
33
|
+
// jsonFrozen.add(result);
|
|
34
|
+
}
|
|
35
|
+
return result;
|
|
36
|
+
}
|
|
37
|
+
// const fetchCache = new Map<string, Promise<any>>();
|
|
38
|
+
async function loadJson(url, fetchCache, signal) {
|
|
39
|
+
const key = '__loadJson__' + url;
|
|
40
|
+
if (fetchCache.has(key)) {
|
|
41
|
+
return fetchCache.get(key);
|
|
42
|
+
}
|
|
43
|
+
const dataPromise = (async function loadJsonInner() {
|
|
44
|
+
const response = await fetch(url, { signal });
|
|
45
|
+
if (!response.ok) {
|
|
46
|
+
throw new Error(`Failed to fetch JSON from ${url}`);
|
|
47
|
+
}
|
|
48
|
+
return await response.json();
|
|
49
|
+
})();
|
|
50
|
+
fetchCache.set(key, dataPromise);
|
|
51
|
+
return dataPromise;
|
|
52
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { createFunction } from './shared';
|
|
2
|
+
export const mount = createFunction('mount');
|
|
3
|
+
export const load = createFunction('load');
|
|
4
|
+
export const initialize = createFunction('initialize');
|
|
5
|
+
// extra
|
|
6
|
+
// export const createFloorPlan = createFunction('createFloorPlan');
|
|
7
|
+
// export const rewriteManifestToLocalFiles = createFunction('rewriteManifestToLocalFiles');
|
|
8
|
+
export async function callFunction_Experimental(name, manifest, ...args) {
|
|
9
|
+
const fn = createFunction(name);
|
|
10
|
+
return await fn(manifest, ...args);
|
|
11
|
+
}
|
|
12
|
+
export { mutateManifest } from './mutateManifest';
|
|
13
|
+
export { downloadZip, makeOfflineBundle } from './offline';
|
|
14
|
+
export { resolve } from './resolve';
|
|
15
|
+
// preloadJson('https://efp-runtime.expofp.com/branches/main.json');
|
|
16
|
+
// preconnectUrl('https://efp-runtime.expofp.com/');
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
//import { log } from './shared';
|
|
2
|
+
/**
|
|
3
|
+
* Load a script from `scriptUrl` and optionally wait until `globalVar` appears.
|
|
4
|
+
* Works in both browser (via <script>) and Node 18+ (via fetch + eval).
|
|
5
|
+
*/
|
|
6
|
+
export async function loadAndWaitGlobal(scriptUrl, options) {
|
|
7
|
+
const { globalVar, timeoutMs = 10_000, pollIntervalMs = 10 } = options;
|
|
8
|
+
if (isBrowser()) {
|
|
9
|
+
if (globalVar && window[globalVar] !== undefined) {
|
|
10
|
+
delete window[globalVar];
|
|
11
|
+
}
|
|
12
|
+
await loadInBrowser(scriptUrl, options.signal);
|
|
13
|
+
if (globalVar) {
|
|
14
|
+
await waitForGlobal(() => window[globalVar], globalVar, timeoutMs, pollIntervalMs, options.signal);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
if (globalVar && globalThis[globalVar] !== undefined) {
|
|
19
|
+
delete globalThis[globalVar];
|
|
20
|
+
}
|
|
21
|
+
await loadInNode(scriptUrl, options.signal);
|
|
22
|
+
if (globalVar) {
|
|
23
|
+
await waitForGlobal(() => globalThis[globalVar], globalVar, timeoutMs, pollIntervalMs, options.signal);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
function isBrowser() {
|
|
28
|
+
return typeof window !== 'undefined' && typeof document !== 'undefined';
|
|
29
|
+
}
|
|
30
|
+
/* -------------------- Browser implementation -------------------- */
|
|
31
|
+
export function loadInBrowser(scriptUrl, signal) {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const script = document.createElement('script');
|
|
34
|
+
script.src = scriptUrl;
|
|
35
|
+
script.async = true;
|
|
36
|
+
const cleanup = () => {
|
|
37
|
+
script.onload = null;
|
|
38
|
+
script.onerror = null;
|
|
39
|
+
script.remove();
|
|
40
|
+
};
|
|
41
|
+
const onAbort = () => {
|
|
42
|
+
cleanup();
|
|
43
|
+
reject(new Error('Script load aborted'));
|
|
44
|
+
};
|
|
45
|
+
if (signal?.aborted) {
|
|
46
|
+
cleanup();
|
|
47
|
+
reject(new Error('Script load aborted'));
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
signal?.addEventListener('abort', onAbort, { once: true });
|
|
51
|
+
script.onload = () => {
|
|
52
|
+
signal?.removeEventListener('abort', onAbort);
|
|
53
|
+
cleanup();
|
|
54
|
+
resolve();
|
|
55
|
+
};
|
|
56
|
+
script.onerror = () => {
|
|
57
|
+
signal?.removeEventListener('abort', onAbort);
|
|
58
|
+
cleanup();
|
|
59
|
+
reject(new Error(`Failed to load script: ${scriptUrl}`));
|
|
60
|
+
};
|
|
61
|
+
document.head.appendChild(script);
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
/* -------------------- Node implementation -------------------- */
|
|
65
|
+
async function loadInNode(scriptUrl, signal) {
|
|
66
|
+
if (typeof fetch !== 'function') {
|
|
67
|
+
throw new Error('loadInNode: fetch is not available. Use Node 18+ or provide a global fetch.');
|
|
68
|
+
}
|
|
69
|
+
if (signal?.aborted) {
|
|
70
|
+
throw new Error('Script load aborted');
|
|
71
|
+
}
|
|
72
|
+
let res;
|
|
73
|
+
try {
|
|
74
|
+
res = await fetch(scriptUrl, { signal });
|
|
75
|
+
}
|
|
76
|
+
catch (err) {
|
|
77
|
+
// Node / WHATWG fetch uses AbortError on abort
|
|
78
|
+
if (err && err.name === 'AbortError') {
|
|
79
|
+
throw new Error('Script load aborted');
|
|
80
|
+
}
|
|
81
|
+
throw err;
|
|
82
|
+
}
|
|
83
|
+
if (!res.ok) {
|
|
84
|
+
throw new Error(`Failed to load script in Node (HTTP ${res.status}): ${scriptUrl}`);
|
|
85
|
+
}
|
|
86
|
+
// If it was aborted between headers and body:
|
|
87
|
+
if (signal?.aborted) {
|
|
88
|
+
throw new Error('Script load aborted');
|
|
89
|
+
}
|
|
90
|
+
let code;
|
|
91
|
+
try {
|
|
92
|
+
code = await res.text();
|
|
93
|
+
}
|
|
94
|
+
catch (err) {
|
|
95
|
+
if (err && err.name === 'AbortError') {
|
|
96
|
+
throw new Error('Script load aborted');
|
|
97
|
+
}
|
|
98
|
+
throw err;
|
|
99
|
+
}
|
|
100
|
+
if (signal?.aborted) {
|
|
101
|
+
throw new Error('Script load aborted');
|
|
102
|
+
}
|
|
103
|
+
// TEMPORARILY expose window for browser-style UMDs that expect it
|
|
104
|
+
const g = globalThis;
|
|
105
|
+
const prevWindow = g.window;
|
|
106
|
+
g.window = g;
|
|
107
|
+
try {
|
|
108
|
+
// Indirect eval -> global scope in Node
|
|
109
|
+
// Assumes the loaded script attaches itself to globalThis/window/global.
|
|
110
|
+
(0, eval)(code);
|
|
111
|
+
}
|
|
112
|
+
finally {
|
|
113
|
+
// Restore previous window (if any)
|
|
114
|
+
if (prevWindow === undefined) {
|
|
115
|
+
delete g.window;
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
g.window = prevWindow;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
/* -------------------- Shared helper -------------------- */
|
|
123
|
+
function waitForGlobal(getter, name, timeoutMs, pollIntervalMs, abortSignal) {
|
|
124
|
+
return new Promise((resolve, reject) => {
|
|
125
|
+
const start = Date.now();
|
|
126
|
+
const onAbort = () => {
|
|
127
|
+
reject(new Error('Wait for global aborted'));
|
|
128
|
+
};
|
|
129
|
+
if (abortSignal?.aborted) {
|
|
130
|
+
reject(new Error('Wait for global aborted'));
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
abortSignal?.addEventListener('abort', onAbort, { once: true });
|
|
134
|
+
const check = () => {
|
|
135
|
+
if (abortSignal?.aborted) {
|
|
136
|
+
return reject(new Error('Wait for global aborted'));
|
|
137
|
+
}
|
|
138
|
+
try {
|
|
139
|
+
if (getter() !== undefined) {
|
|
140
|
+
abortSignal?.removeEventListener('abort', onAbort);
|
|
141
|
+
// log(`loadAndWaitGlobal: global "${name}" is now available`);
|
|
142
|
+
return resolve();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
// ignore access errors and keep polling
|
|
147
|
+
}
|
|
148
|
+
if (Date.now() - start >= timeoutMs) {
|
|
149
|
+
abortSignal?.removeEventListener('abort', onAbort);
|
|
150
|
+
// console.error(`loadAndWaitGlobal: timed out waiting for global "${name}"`);
|
|
151
|
+
return reject(new Error(`Timed out after ${timeoutMs} ms waiting for global "${name}"`));
|
|
152
|
+
}
|
|
153
|
+
setTimeout(check, pollIntervalMs);
|
|
154
|
+
};
|
|
155
|
+
check();
|
|
156
|
+
});
|
|
157
|
+
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { ResolveOptions } from './resolve';
|
|
1
|
+
import type { ResolveOptions } from './resolve';
|
|
2
2
|
export declare function mutateManifest(manifest: any, paths: string[], resolveContext?: Omit<ResolveOptions, 'mutate'>): Promise<void>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { resolve } from './resolve';
|
|
2
|
+
export async function mutateManifest(manifest, paths, resolveContext) {
|
|
3
|
+
resolveContext = resolveContext || {};
|
|
4
|
+
for (const path of paths) {
|
|
5
|
+
await resolve(manifest, path, { ...resolveContext, mutate: true });
|
|
6
|
+
}
|
|
7
|
+
}
|
|
8
|
+
if (typeof window !== 'undefined') {
|
|
9
|
+
window['__debugMutateManifest'] = mutateManifest;
|
|
10
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
// import { gunzipSync } from 'fflate';
|
|
2
|
+
// import untar from 'js-untar';
|
|
3
|
+
// import JSZip from 'jszip';
|
|
4
|
+
// import { makeOffline } from './makeOffline';
|
|
5
|
+
// import { log } from '../shared';
|
|
6
|
+
export {};
|
|
7
|
+
// export async function downloadZip(manifest: unknown) {
|
|
8
|
+
// const zip = new JSZip();
|
|
9
|
+
// const offlineData = await makeOffline(manifest);
|
|
10
|
+
// for await (const file of offlineData.files) {
|
|
11
|
+
// if ('data' in file) {
|
|
12
|
+
// zip.file(file.targetFilePath, JSON.stringify(file.data, null, 2));
|
|
13
|
+
// } else if ('url' in file) {
|
|
14
|
+
// await scheduleFileForZip(zip, file.targetFilePath, file.url);
|
|
15
|
+
// }
|
|
16
|
+
// }
|
|
17
|
+
// await addJsLoader(zip);
|
|
18
|
+
// addIndexHtml(zip, offlineData.manifest);
|
|
19
|
+
// // Generate archive
|
|
20
|
+
// const blob = await zip.generateAsync({ type: 'blob' });
|
|
21
|
+
// // console.info('Manifest for HTML:', offlineData.manifest);
|
|
22
|
+
// // Create link & trigger download
|
|
23
|
+
// const a = document.createElement('a');
|
|
24
|
+
// a.href = URL.createObjectURL(blob);
|
|
25
|
+
// a.download = 'offline.zip';
|
|
26
|
+
// a.click();
|
|
27
|
+
// URL.revokeObjectURL(a.href);
|
|
28
|
+
// }
|
|
29
|
+
// const MAX_CONCURRENT_DOWNLOADS = 10;
|
|
30
|
+
// const queue = new Set<Promise<void>>();
|
|
31
|
+
// // have a queue to limit concurrent downloads
|
|
32
|
+
// async function scheduleFileForZip(zip: JSZip, targetFilePath: string, url: string) {
|
|
33
|
+
// while (queue.size >= MAX_CONCURRENT_DOWNLOADS) {
|
|
34
|
+
// await Promise.race(queue);
|
|
35
|
+
// }
|
|
36
|
+
// const downloadPromise = (async () => {
|
|
37
|
+
// log('Fetching file for zip:', url);
|
|
38
|
+
// const response = await fetch(url);
|
|
39
|
+
// if (!response.ok) {
|
|
40
|
+
// throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
|
|
41
|
+
// }
|
|
42
|
+
// const blob = await response.blob();
|
|
43
|
+
// zip.file(targetFilePath, blob);
|
|
44
|
+
// })();
|
|
45
|
+
// queue.add(downloadPromise);
|
|
46
|
+
// try {
|
|
47
|
+
// await downloadPromise;
|
|
48
|
+
// } finally {
|
|
49
|
+
// queue.delete(downloadPromise);
|
|
50
|
+
// }
|
|
51
|
+
// }
|
|
52
|
+
// async function addJsLoader(zip: JSZip) {
|
|
53
|
+
// const packageInfo = 'https://registry.npmjs.org/@expofp/loader';
|
|
54
|
+
// const response = await fetch(packageInfo);
|
|
55
|
+
// if (!response.ok) {
|
|
56
|
+
// throw new Error(`Failed to fetch ${packageInfo}: ${response.status} ${response.statusText}`);
|
|
57
|
+
// }
|
|
58
|
+
// const data = await response.json();
|
|
59
|
+
// const latestVersion = data['dist-tags'].latest;
|
|
60
|
+
// const tarballUrl = data.versions[latestVersion].dist.tarball;
|
|
61
|
+
// const tgzResponse = await fetch(tarballUrl);
|
|
62
|
+
// if (!tgzResponse.ok) {
|
|
63
|
+
// throw new Error(
|
|
64
|
+
// `Failed to fetch ${tarballUrl}: ${tgzResponse.status} ${tgzResponse.statusText}`
|
|
65
|
+
// );
|
|
66
|
+
// }
|
|
67
|
+
// const tgzArrayBuffer = await tgzResponse.arrayBuffer();
|
|
68
|
+
// // use fflate to convert tgz to tar
|
|
69
|
+
// const tarFile = gunzipSync(new Uint8Array(tgzArrayBuffer));
|
|
70
|
+
// const files = await untar(tarFile.buffer as ArrayBuffer);
|
|
71
|
+
// console.info('Extracted tar file from tgz, size:', files);
|
|
72
|
+
// // const arrayBuffer = await tgzResponse.arrayBuffer();
|
|
73
|
+
// // const files = await untar(arrayBuffer);
|
|
74
|
+
// for (const file of files) {
|
|
75
|
+
// console.info('Adding loader file to zip:', file.name);
|
|
76
|
+
// zip.file('efp-loader/' + file.name, file.buffer);
|
|
77
|
+
// }
|
|
78
|
+
// }
|
|
79
|
+
// function addIndexHtml(zip: JSZip, manifest: unknown) {
|
|
80
|
+
// const html = `
|
|
81
|
+
// <!DOCTYPE html>
|
|
82
|
+
// <script type="module">
|
|
83
|
+
// import { load } from './efp-loader/dist/loader.js';
|
|
84
|
+
// await load(${JSON.stringify(manifest)});
|
|
85
|
+
// console.info('🚀 FloorPlan loaded', floorplan);
|
|
86
|
+
// </script>
|
|
87
|
+
// `;
|
|
88
|
+
// zip.file('index.html', html);
|
|
89
|
+
// }
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import JSZip from 'jszip';
|
|
2
|
+
import { makeOfflineBundle } from './makeOfflineBundle';
|
|
3
|
+
export async function downloadZip(manifest) {
|
|
4
|
+
const zip = new JSZip();
|
|
5
|
+
const bundle = await makeOfflineBundle(manifest);
|
|
6
|
+
for await (const file of bundle) {
|
|
7
|
+
zip.file(file.path, file.data);
|
|
8
|
+
}
|
|
9
|
+
// Generate archive
|
|
10
|
+
const blob = await zip.generateAsync({ type: 'blob' });
|
|
11
|
+
// console.info('Manifest for HTML:', offlineData.manifest);
|
|
12
|
+
// Create link & trigger download
|
|
13
|
+
const a = document.createElement('a');
|
|
14
|
+
a.href = URL.createObjectURL(blob);
|
|
15
|
+
a.download = 'offline.zip';
|
|
16
|
+
a.click();
|
|
17
|
+
URL.revokeObjectURL(a.href);
|
|
18
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
// 32-bit murmur-ish hash with configurable seed
|
|
2
|
+
function murmur32WithSeed(str, seed) {
|
|
3
|
+
let h = seed >>> 0;
|
|
4
|
+
for (let i = 0; i < str.length; i++) {
|
|
5
|
+
h ^= str.charCodeAt(i);
|
|
6
|
+
h = Math.imul(h, 0x01000193); // FNV-ish prime, good mixer
|
|
7
|
+
}
|
|
8
|
+
return (h >>> 0).toString(16).padStart(8, '0'); // 8 hex chars
|
|
9
|
+
}
|
|
10
|
+
// 128-bit (MD5-width) hash: 4 × 32-bit parts concatenated
|
|
11
|
+
export function hashString(str) {
|
|
12
|
+
return (murmur32WithSeed(str, 0x811c9dc5) + // your original seed
|
|
13
|
+
murmur32WithSeed(str, 0x21f0aaad) +
|
|
14
|
+
murmur32WithSeed(str, 0x1b873593) +
|
|
15
|
+
murmur32WithSeed(str, 0x85ebca6b));
|
|
16
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export async function makeOfflineBundle(manifest) {
|
|
2
|
+
const { makeOfflineBundle } = await import('./makeOfflineBundle');
|
|
3
|
+
return await makeOfflineBundle(manifest);
|
|
4
|
+
}
|
|
5
|
+
export async function downloadZip(manifest) {
|
|
6
|
+
const { downloadZip } = await import('./downloadZip');
|
|
7
|
+
return await downloadZip(manifest);
|
|
8
|
+
}
|
|
9
|
+
async function debugLogOfflineManifestFiles(manifest) {
|
|
10
|
+
// console.log('Offline files:');
|
|
11
|
+
const { makeOffline } = await import('./makeOffline');
|
|
12
|
+
const result = await makeOffline(manifest);
|
|
13
|
+
// const allFiles = [];
|
|
14
|
+
for await (const file of (await result).files) {
|
|
15
|
+
if ('data' in file) {
|
|
16
|
+
console.info(`🗳️ Put ${file.targetFilePath} <=`, file.data);
|
|
17
|
+
}
|
|
18
|
+
else if ('url' in file) {
|
|
19
|
+
console.info(`🗳️ Download ${file.targetFilePath} <= ${file.url}`);
|
|
20
|
+
}
|
|
21
|
+
// allFiles.push(file);
|
|
22
|
+
}
|
|
23
|
+
console.info('Offline data: ', result.manifest);
|
|
24
|
+
}
|
|
25
|
+
if (typeof window !== 'undefined') {
|
|
26
|
+
window['__debugDownloadZip'] = downloadZip;
|
|
27
|
+
window['__debugMakeOfflineBundle'] = makeOfflineBundle;
|
|
28
|
+
window['__debugLogOfflineManifestFiles'] = debugLogOfflineManifestFiles;
|
|
29
|
+
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { MakeOfflineResult } from '../types';
|
|
1
|
+
import type { MakeOfflineResult } from '../types';
|
|
2
2
|
export declare function makeOffline(manifest: any): Promise<MakeOfflineResult>;
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { canResolve, parseRefValue, resolverResolve, } from '../resolve';
|
|
2
|
+
import { resolvers } from '../resolvers';
|
|
3
|
+
import { createMergedObjectWithOverridenNonRefProps, deepClone, log } from '../shared';
|
|
4
|
+
import { makeTargetPathFromUrl, makeUniqueJsonTargetPathFromString } from './tools';
|
|
5
|
+
export async function makeOffline(manifest) {
|
|
6
|
+
log('makeOffline', manifest);
|
|
7
|
+
// if (typeof manifest !== 'object' || manifest === null) {
|
|
8
|
+
// throw new Error('Manifest must be an object');
|
|
9
|
+
// }
|
|
10
|
+
const res = await makeOfflineInternal(manifest);
|
|
11
|
+
let done = false;
|
|
12
|
+
let resultManifest;
|
|
13
|
+
async function* files() {
|
|
14
|
+
resultManifest = yield* res;
|
|
15
|
+
done = true;
|
|
16
|
+
}
|
|
17
|
+
return {
|
|
18
|
+
get manifest() {
|
|
19
|
+
if (!done)
|
|
20
|
+
throw new Error('Iterate over files before getting manifest');
|
|
21
|
+
return resultManifest;
|
|
22
|
+
},
|
|
23
|
+
files: files(),
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
async function* makeOfflineInternal(manifest) {
|
|
27
|
+
log('makeOfflineInternal', manifest);
|
|
28
|
+
const resolveContext = {
|
|
29
|
+
refCache: new Map(),
|
|
30
|
+
forceFetch: true,
|
|
31
|
+
signal: null,
|
|
32
|
+
};
|
|
33
|
+
const parent = { manifest };
|
|
34
|
+
yield* walk(parent, 'manifest');
|
|
35
|
+
return parent.manifest;
|
|
36
|
+
async function* walk(node, key) {
|
|
37
|
+
// console.log('walk', node, key, node[key]);
|
|
38
|
+
// let node[key] = node[key];
|
|
39
|
+
if (typeof node[key] !== 'object' || node[key] === null) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
// let resolversToUse1 = resolvers.filter((r) => r.canResolve(node[key].$ref));
|
|
43
|
+
// const usedResolvers = new Set<Resolver>();
|
|
44
|
+
// do {
|
|
45
|
+
if ('$ref' in node[key]) {
|
|
46
|
+
const resolversToUse = resolvers.filter((r) => canResolve(r, node[key].$ref));
|
|
47
|
+
if (resolversToUse.length === 0) {
|
|
48
|
+
throw new Error(`No resolver found for ref: ${node[key].$ref}`);
|
|
49
|
+
}
|
|
50
|
+
if (resolversToUse.length > 1) {
|
|
51
|
+
throw new Error(`Multiple resolvers can make offline ref: ${node[key].$ref}`);
|
|
52
|
+
}
|
|
53
|
+
const resolver = resolversToUse[0];
|
|
54
|
+
let func;
|
|
55
|
+
switch (resolver.offlineMethod) {
|
|
56
|
+
case 'localizeRef':
|
|
57
|
+
func = offlineLocalizeRef;
|
|
58
|
+
break;
|
|
59
|
+
case 'resolveRef':
|
|
60
|
+
func = offlineResolveRef;
|
|
61
|
+
break;
|
|
62
|
+
default:
|
|
63
|
+
throw new Error(`Unknown offlineMethod: ${resolver.offlineMethod}`);
|
|
64
|
+
}
|
|
65
|
+
const mergeRef = yield* func(resolver, node[key].$ref, resolveContext);
|
|
66
|
+
if (Object.isFrozen(node[key]))
|
|
67
|
+
throw new Error('Unexpected frozen node during makeOffline');
|
|
68
|
+
node[key] = createMergedObjectWithOverridenNonRefProps({ $ref: mergeRef }, node[key]);
|
|
69
|
+
}
|
|
70
|
+
// recurse
|
|
71
|
+
if (Array.isArray(node[key])) {
|
|
72
|
+
for (const [index] of node[key].entries()) {
|
|
73
|
+
yield* walk(node[key], index);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
else if (typeof node[key] === 'object' && node[key] !== null) {
|
|
77
|
+
for (const key1 of Object.keys(node[key])) {
|
|
78
|
+
// debugger;
|
|
79
|
+
yield* walk(node[key], key1);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
// function canResolve(resolver: Resolver, ref: string): boolean {
|
|
85
|
+
// if (resolver.canResolve) {
|
|
86
|
+
// return resolver.canResolve(ref);
|
|
87
|
+
// }
|
|
88
|
+
// if (resolver.schema) {
|
|
89
|
+
// return canResolveRefSchema(ref, resolver.schema);
|
|
90
|
+
// }
|
|
91
|
+
// throw new Error('Resolver is missing canResolve method and schema property');
|
|
92
|
+
// }
|
|
93
|
+
async function* offlineLocalizeRef(resolver, ref, context) {
|
|
94
|
+
const refData = deepClone(await resolverResolve(resolver, ref, context));
|
|
95
|
+
// emit assets
|
|
96
|
+
yield* makeOfflineInternal(refData);
|
|
97
|
+
const url = parseRefValue(ref);
|
|
98
|
+
const targetFilePath = makeTargetPathFromUrl(url);
|
|
99
|
+
// TODO: handle CSS and other text files that may reference other assets
|
|
100
|
+
yield { url, targetFilePath };
|
|
101
|
+
const schema = ref.substring(0, ref.length - url.length - 1);
|
|
102
|
+
return `${schema}:${targetFilePath}`;
|
|
103
|
+
}
|
|
104
|
+
async function* offlineResolveRef(resolver, ref, context) {
|
|
105
|
+
const refData = deepClone(await resolverResolve(resolver, ref, context));
|
|
106
|
+
const targetFilePath = makeUniqueJsonTargetPathFromString(ref);
|
|
107
|
+
const data = yield* makeOfflineInternal(refData);
|
|
108
|
+
yield { data, targetFilePath };
|
|
109
|
+
return targetFilePath;
|
|
110
|
+
}
|
|
111
|
+
// export function createMakeOfflineResult(
|
|
112
|
+
// manifest: any,
|
|
113
|
+
// walk: (node: any, key: string | number) => AsyncGenerator<LocalFile>
|
|
114
|
+
// ): MakeOfflineResult {
|
|
115
|
+
// log('createMakeOfflineResult', manifest);
|
|
116
|
+
// if (typeof manifest !== 'object' || manifest === null) {
|
|
117
|
+
// throw new Error('Manifest must be an object');
|
|
118
|
+
// }
|
|
119
|
+
// const parent = { manifest };
|
|
120
|
+
// let done = false;
|
|
121
|
+
// async function* files() {
|
|
122
|
+
// yield* walk(parent, 'manifest');
|
|
123
|
+
// done = true;
|
|
124
|
+
// }
|
|
125
|
+
// return {
|
|
126
|
+
// get manifest() {
|
|
127
|
+
// if (!done) {
|
|
128
|
+
// throw new Error('Cannot access manifest before all files are generated');
|
|
129
|
+
// }
|
|
130
|
+
// return parent.manifest;
|
|
131
|
+
// },
|
|
132
|
+
// files: files(),
|
|
133
|
+
// };
|
|
134
|
+
// }
|