@veolab/discoverylab 1.3.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +7 -0
- package/dist/templates/bundle/161.bundle.js +4462 -0
- package/dist/templates/bundle/161.bundle.js.map +1 -0
- package/dist/templates/bundle/208.bundle.js +3479 -0
- package/dist/templates/bundle/208.bundle.js.map +1 -0
- package/dist/templates/bundle/304.bundle.js +11 -0
- package/dist/templates/bundle/304.bundle.js.map +1 -0
- package/dist/templates/bundle/45.bundle.js +93932 -0
- package/dist/templates/bundle/45.bundle.js.map +1 -0
- package/dist/templates/bundle/624.bundle.js +205 -0
- package/dist/templates/bundle/624.bundle.js.map +1 -0
- package/dist/templates/bundle/63.bundle.js +11 -0
- package/dist/templates/bundle/63.bundle.js.map +1 -0
- package/dist/templates/bundle/69.bundle.js +14678 -0
- package/dist/templates/bundle/69.bundle.js.map +1 -0
- package/dist/templates/bundle/761.bundle.js +64 -0
- package/dist/templates/bundle/761.bundle.js.map +1 -0
- package/dist/templates/bundle/872.bundle.js +451 -0
- package/dist/templates/bundle/872.bundle.js.map +1 -0
- package/dist/templates/bundle/892.bundle.js +11 -0
- package/dist/templates/bundle/892.bundle.js.map +1 -0
- package/dist/templates/bundle/997.bundle.js +258 -0
- package/dist/templates/bundle/997.bundle.js.map +1 -0
- package/dist/templates/bundle/bundle.js +40546 -0
- package/dist/templates/bundle/bundle.js.map +1 -0
- package/dist/templates/bundle/favicon.ico +0 -0
- package/dist/templates/bundle/index.html +49 -0
- package/dist/templates/bundle/public/mockup-android-google-pixel-9-pro.png +0 -0
- package/dist/templates/bundle/public/mockup-android.png +0 -0
- package/dist/templates/bundle/public/mockup-ios-iphone-17-pro.png +0 -0
- package/dist/templates/bundle/source-map-helper.wasm +0 -0
- package/dist/templates/manifest.json +25 -0
- package/package.json +2 -2
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bundle.js","mappings":";;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;;;;ACtoKA;AAjGA;AAIA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAgBA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AArCA;AAsCA;AACA;AACA;AAGA;AACA;AACA;AAAA;AAIA;AACA;AAAA;AACA;AAEA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAEA;AAAA;AAAA;AACA;AAIA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAAA;AAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAKA;AAAA;AAAA;AAxBA;AAyBA;AAEA;AAAA;AACA;AAAA;AAAA;AAEA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAAA;AAAA;AAGA;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;;ACzCA;AApJA;AACA;AAEA;AAYA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAIA;AAAA;AACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;AAAA;AAAA;AACA;AAEA;AACA;AACA;AAGA;AACA;AACA;AAAA;AAIA;AACA;AAAA;AAGA;AAEA;AACA;AACA;AACA;AACA;AACA;AAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AAAA;AAIA;AACA;AACA;AAEA;AACA;AACA;AAAA;AAEA;AAAA;AAEA;AAAA;AAEA;AACA;AAAA;AAIA;AACA;AAAA;AAGA;AACA;AAEA;AAEA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AAIA;AACA;AAEA;AACA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAGA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AAEA;AAIA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AAEA;AAEA;AAAA;AAGA;;;;AC3KA;AAAA;AACA;AACA;AACA;AAEA;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;;AC7QA;AAvCA;AACA;AACA;AAEA;AAYA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AAAA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAGA;;;;ACtBA;AA/BA;AAYA;AAEA;AACA;AACA;AACA;AAEA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAGA;AAEA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAGA;AAEA;AAAA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAAA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AAAA;AAAA;AACA;AAKA;AAAA;AAAA;AAGA;AAEA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAAA;AAGA;AACA;AACA;AACA;AAAA;AAIA;AACA;AACA;AACA;;;;ACnHA;AAjFA;AAEA;AACA;AACA;AACA;AACA;AACA;AAeA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAGA;AAEA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAEA;AAKA;AAAA;AAAA;AACA;AACA;AACA;AArHA;AAuHA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AAAA;AACA;AAAA;AAAA;AAGA;AAAA;AAAA;AACA;AAKA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAAA;AACA;AAIA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AAAA;AACA;AAAA;AAAA;AAGA;;;;ACrLA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;;;;ACuGA;AA3GA;AAEA;AAcA;AAAA;AACA;AAEA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAEA;AAAA;AAGA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AACA;AAAA;AAEA;AAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAGA;AAAA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AAAA;AAGA;;;AChKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;AC1IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;ACxIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;ACxKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;;AC+GA;AA9OA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AAmBA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAGA;AAGA;AACA;AACA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AAAA;AACA;AACA;AACA;AAEA;AAGA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAKA;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAAA;AAEA;AAAA;AAIA;AAGA;AACA;AAGA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AAGA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AAEA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAEA;AAGA;AAQA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AAGA;AACA;AACA;AACA;AAAA;AAEA;AAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AAGA;AAGA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAAA;AAAA;AAEA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAlBA;AAmBA;AAEA;AAAA;AACA;AAAA;AAAA;AAIA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AAEA;AAAA;AAKA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AACA;AACA;AACA;AA/TA;AAiUA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AAAA;AACA;AAAA;AAAA;AAEA;AAAA;AACA;AAAA;AAAA;AAEA;AAIA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AAAA;AACA;AAIA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAIA;;;;AC3QA;AA1GA;AACA;AACA;AAsBA;AAEA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AAEA;AACA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAIA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAGA;;;;ACnLA;AACA;AAEA;;;;;;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;AC7IA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjUA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AAAA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AAAA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AAAA;;;;;;;;ACJA;;;;;;;;;;;;;;ACGA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;;;;;;;;;;;;;;;;;;AC/VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;ACpEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC17EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;ACvzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC/0BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACjkFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AC7mEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AC/RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACnwBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACpRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AC3fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC3rDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;ACjiBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;AC7KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;ACzqBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;ACttDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;ACtnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;ACpMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;ACjPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;ACh9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;ACxqCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1cA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAAA;AACA;AAAA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AA8CA;;;;;;;;;;;;;;;ACl/QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;;;;;;;ACxrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACRA;AACA;AACA;AACA;AACA;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AErFA;AACA;AACA;AACA;AACA;AACA;AACA","sources":["./node_modules/@remotion/media/dist/esm/index.mjs","./src/shared/TerminalWindowDark.tsx","./node_modules/@remotion/google-fonts/dist/esm/FiraCode.mjs","./src/shared/CodeTypewriter.tsx","./src/utils/springs.ts","./node_modules/@remotion/google-fonts/dist/esm/Inter.mjs","./src/shared/FadeInText.tsx","./src/shared/DeviceFrame.tsx","./src/Studio/index.tsx","./src/utils/colors.ts","./src/shared/AnimatedCursor.tsx","./node_modules/@remotion/google-fonts/dist/esm/Neonderthaw.mjs","./node_modules/@remotion/google-fonts/dist/esm/BowlbyOneSC.mjs","./node_modules/@remotion/google-fonts/dist/esm/BricolageGrotesque.mjs","./node_modules/@remotion/google-fonts/dist/esm/MiltonianTattoo.mjs","./src/Showcase/index.tsx","./src/Root.tsx","./src/index.ts","./node_modules/@remotion/bundler/dist/setup-environment.js","./node_modules/@remotion/bundler/react-shim.js","./node_modules/react-dom/cjs/react-dom.production.min.js","./node_modules/react-dom/client.js","./node_modules/react-dom/index.js","./node_modules/react/cjs/react-jsx-runtime.production.min.js","./node_modules/react/cjs/react.production.min.js","./node_modules/react/index.js","./node_modules/react/jsx-runtime.js","./node_modules/scheduler/cjs/scheduler.production.min.js","./node_modules/scheduler/index.js","ignored|/Users/andersonmelo/work/ander.ai/veolab/discoverylab-templates/node_modules/mediabunny/dist/modules/src|./node.js","./node_modules/@remotion/studio/dist/esm/renderEntry.mjs","./node_modules/mediabunny/dist/modules/shared/aac-misc.js","./node_modules/mediabunny/dist/modules/shared/ac3-misc.js","./node_modules/mediabunny/dist/modules/shared/bitstream.js","./node_modules/mediabunny/dist/modules/shared/mp3-misc.js","./node_modules/mediabunny/dist/modules/src/adts/adts-reader.js","./node_modules/mediabunny/dist/modules/src/codec-data.js","./node_modules/mediabunny/dist/modules/src/codec.js","./node_modules/mediabunny/dist/modules/src/custom-coder.js","./node_modules/mediabunny/dist/modules/src/demuxer.js","./node_modules/mediabunny/dist/modules/src/flac/flac-misc.js","./node_modules/mediabunny/dist/modules/src/id3.js","./node_modules/mediabunny/dist/modules/src/isobmff/isobmff-demuxer.js","./node_modules/mediabunny/dist/modules/src/matroska/matroska-demuxer.js","./node_modules/mediabunny/dist/modules/src/mp3/mp3-reader.js","./node_modules/mediabunny/dist/modules/src/mp3/mp3-demuxer.js","./node_modules/mediabunny/dist/modules/src/ogg/ogg-demuxer.js","./node_modules/mediabunny/dist/modules/src/adts/adts-demuxer.js","./node_modules/mediabunny/dist/modules/src/flac/flac-demuxer.js","./node_modules/mediabunny/dist/modules/src/mpeg-ts/mpeg-ts-demuxer.js","./node_modules/mediabunny/dist/modules/src/input-format.js","./node_modules/mediabunny/dist/modules/src/input-track.js","./node_modules/mediabunny/dist/modules/src/input.js","./node_modules/mediabunny/dist/modules/src/isobmff/isobmff-misc.js","./node_modules/mediabunny/dist/modules/src/isobmff/isobmff-reader.js","./node_modules/mediabunny/dist/modules/src/matroska/ebml.js","./node_modules/mediabunny/dist/modules/src/matroska/matroska-misc.js","./node_modules/mediabunny/dist/modules/src/media-sink.js","./node_modules/mediabunny/dist/modules/src/metadata.js","./node_modules/mediabunny/dist/modules/src/misc.js","./node_modules/mediabunny/dist/modules/src/mpeg-ts/mpeg-ts-misc.js","./node_modules/mediabunny/dist/modules/src/ogg/ogg-misc.js","./node_modules/mediabunny/dist/modules/src/ogg/ogg-reader.js","./node_modules/mediabunny/dist/modules/src/packet.js","./node_modules/mediabunny/dist/modules/src/pcm.js","./node_modules/mediabunny/dist/modules/src/reader.js","./node_modules/mediabunny/dist/modules/src/sample.js","./node_modules/mediabunny/dist/modules/src/source.js","./node_modules/mediabunny/dist/modules/src/wave/wave-demuxer.js","./node_modules/remotion/dist/esm/index.mjs","./node_modules/remotion/dist/esm/no-react.mjs","webpack/bootstrap","webpack/runtime/compat get default export","webpack/runtime/create fake namespace object","webpack/runtime/define property getters","webpack/runtime/ensure chunk","webpack/runtime/get javascript chunk filename","webpack/runtime/global","webpack/runtime/hasOwnProperty shorthand","webpack/runtime/load script","webpack/runtime/make namespace object","webpack/runtime/publicPath","webpack/runtime/jsonp chunk loading","webpack/before-startup","webpack/startup","webpack/after-startup"],"sourcesContent":["var __dispose = Symbol.dispose || /* @__PURE__ */ Symbol.for(\"Symbol.dispose\");\nvar __asyncDispose = Symbol.asyncDispose || /* @__PURE__ */ Symbol.for(\"Symbol.asyncDispose\");\nvar __using = (stack, value, async) => {\n if (value != null) {\n if (typeof value !== \"object\" && typeof value !== \"function\")\n throw TypeError('Object expected to be assigned to \"using\" declaration');\n var dispose;\n if (async)\n dispose = value[__asyncDispose];\n if (dispose === undefined)\n dispose = value[__dispose];\n if (typeof dispose !== \"function\")\n throw TypeError(\"Object not disposable\");\n stack.push([async, dispose, value]);\n } else if (async) {\n stack.push([async]);\n }\n return value;\n};\nvar __callDispose = (stack, error, hasError) => {\n var E = typeof SuppressedError === \"function\" ? SuppressedError : function(e, s, m, _) {\n return _ = Error(m), _.name = \"SuppressedError\", _.error = e, _.suppressed = s, _;\n }, fail = (e) => error = hasError ? new E(e, error, \"An error was suppressed during disposal\") : (hasError = true, e), next = (it) => {\n while (it = stack.pop()) {\n try {\n var result = it[1] && it[1].call(it[2]);\n if (it[0])\n return Promise.resolve(result).then(next, (e) => (fail(e), next()));\n } catch (e) {\n fail(e);\n }\n }\n if (hasError)\n throw error;\n };\n return next();\n};\n\n// src/audio/audio.tsx\nimport { Internals as Internals19, useRemotionEnvironment as useRemotionEnvironment2 } from \"remotion\";\n\n// src/audio/audio-for-preview.tsx\nimport { useContext as useContext3, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from \"react\";\nimport {\n Internals as Internals10,\n Audio as RemotionAudio,\n useBufferState,\n useCurrentFrame as useCurrentFrame2,\n useVideoConfig as useVideoConfig2\n} from \"remotion\";\n\n// src/get-time-in-seconds.ts\nimport { Internals } from \"remotion\";\nvar getTimeInSeconds = ({\n loop,\n mediaDurationInSeconds,\n unloopedTimeInSeconds,\n src,\n trimAfter,\n trimBefore,\n fps,\n playbackRate,\n ifNoMediaDuration\n}) => {\n if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === \"fail\") {\n throw new Error(`Could not determine duration of ${src}, but \"loop\" was set.`);\n }\n const loopDuration = loop ? Internals.calculateMediaDuration({\n trimAfter,\n mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,\n playbackRate: 1,\n trimBefore\n }) / fps : Infinity;\n const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;\n if ((trimAfter ?? null) !== null && !loop) {\n const time = (trimAfter - (trimBefore ?? 0)) / fps;\n if (timeInSeconds >= time) {\n return null;\n }\n }\n return timeInSeconds + (trimBefore ?? 0) / fps;\n};\nvar calculateEndTime = ({\n mediaDurationInSeconds,\n ifNoMediaDuration,\n src,\n trimAfter,\n trimBefore,\n fps\n}) => {\n if (mediaDurationInSeconds === null && ifNoMediaDuration === \"fail\") {\n throw new Error(`Could not determine duration of ${src}, but \"loop\" was set.`);\n }\n const mediaDuration = Internals.calculateMediaDuration({\n trimAfter,\n mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,\n playbackRate: 1,\n trimBefore\n }) / fps;\n return mediaDuration + (trimBefore ?? 0) / fps;\n};\n\n// src/media-player.ts\nimport { ALL_FORMATS, Input, UrlSource } from \"mediabunny\";\nimport { Internals as Internals6 } from \"remotion\";\n\n// src/audio-iterator-manager.ts\nimport { AudioBufferSink, InputDisposedError } from \"mediabunny\";\nimport { Internals as Internals4 } from \"remotion\";\n\n// src/audio/audio-preview-iterator.ts\nimport { Internals as Internals3 } from \"remotion\";\n\n// src/helpers/round-to-4-digits.ts\nvar roundTo4Digits = (timestamp) => {\n return Math.round(timestamp * 1000) / 1000;\n};\n\n// src/set-global-time-anchor.ts\nimport { Internals as Internals2 } from \"remotion\";\nvar ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT = 0.1;\nvar setGlobalTimeAnchor = ({\n audioContext,\n audioSyncAnchor,\n absoluteTimeInSeconds,\n globalPlaybackRate,\n debugAudioScheduling,\n logLevel\n}) => {\n const newAnchor = audioContext.currentTime - absoluteTimeInSeconds / globalPlaybackRate;\n const shift = (newAnchor - audioSyncAnchor.value) * globalPlaybackRate;\n if (Math.abs(shift) < ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT) {\n return;\n }\n if (debugAudioScheduling) {\n Internals2.Log.info({ logLevel, tag: \"audio-scheduling\" }, \"Anchor changed from %s to %s with shift %s\", audioSyncAnchor.value, newAnchor, shift);\n }\n audioSyncAnchor.value = newAnchor;\n};\n\n// src/audio/audio-preview-iterator.ts\nvar makeAudioIterator = ({\n startFromSecond,\n maximumTimestamp,\n cache,\n debugAudioScheduling\n}) => {\n let destroyed = false;\n const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond, maximumTimestamp);\n const queuedAudioNodes = [];\n const audioChunksForAfterResuming = [];\n let mostRecentTimestamp = -Infinity;\n let pendingNext = null;\n const cleanupAudioQueue = (audioContext) => {\n for (const node of queuedAudioNodes) {\n try {\n const isAlreadyPlaying = node.scheduledTime - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT < audioContext.audioContext.currentTime;\n const wasScheduledForThisAnchor = node.scheduledAtAnchor === audioContext.audioSyncAnchor.value;\n if (isAlreadyPlaying && wasScheduledForThisAnchor) {\n continue;\n }\n if (debugAudioScheduling) {\n const currentlyHearing = audioContext.audioContext.getOutputTimestamp().contextTime;\n const nodeEndTime = node.scheduledTime + node.buffer.duration / node.playbackRate;\n Internals3.Log.info({ logLevel: \"trace\", tag: \"audio-scheduling\" }, `Stopping node ${node.timestamp.toFixed(3)}, currently hearing = ${currentlyHearing.toFixed(3)} currentTime = ${audioContext.audioContext.currentTime.toFixed(3)} nodeEndTime = ${nodeEndTime.toFixed(3)} scheduledTime = ${node.scheduledTime.toFixed(3)}`);\n }\n node.node.stop();\n } catch {}\n }\n queuedAudioNodes.length = 0;\n };\n const getNextOrNullIfNotAvailable = async () => {\n let next = pendingNext;\n if (!next) {\n next = iterator.next();\n }\n pendingNext = null;\n const result = await Promise.race([\n next,\n new Promise((resolve) => {\n Promise.resolve().then(() => resolve());\n })\n ]);\n if (!result) {\n pendingNext = next;\n return {\n type: \"need-to-wait-for-it\",\n waitPromise: async () => {\n const res = await next;\n return res.value;\n }\n };\n }\n if (result.value) {\n mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);\n pendingNext = iterator.next();\n return {\n type: \"got-buffer\",\n buffer: result.value\n };\n }\n return {\n type: \"got-end\",\n mostRecentTimestamp\n };\n };\n const tryToSatisfySeek = async (time, onBufferScheduled) => {\n if (time < startFromSecond) {\n return {\n type: \"not-satisfied\",\n reason: `time requested is before the start of the iterator`\n };\n }\n while (true) {\n const buffer = await getNextOrNullIfNotAvailable();\n if (buffer.type === \"need-to-wait-for-it\") {\n return {\n type: \"not-satisfied\",\n reason: \"iterator did not have buffer ready\"\n };\n }\n if (buffer.type === \"got-end\") {\n if (time >= mostRecentTimestamp) {\n return {\n type: \"ended\"\n };\n }\n return {\n type: \"not-satisfied\",\n reason: `iterator ended before the requested time`\n };\n }\n if (buffer.type === \"got-buffer\") {\n const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);\n const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);\n const timestamp = roundTo4Digits(time);\n if (timestamp < bufferTimestamp) {\n return {\n type: \"not-satisfied\",\n reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${timestamp}`\n };\n }\n if (bufferTimestamp <= timestamp && bufferEndTimestamp >= timestamp) {\n onBufferScheduled(buffer.buffer);\n return {\n type: \"satisfied\"\n };\n }\n onBufferScheduled(buffer.buffer);\n continue;\n }\n throw new Error(\"Unreachable\");\n }\n };\n const bufferAsFarAsPossible = async (onBufferScheduled, maxTimestamp) => {\n while (true) {\n if (mostRecentTimestamp >= maxTimestamp) {\n return { type: \"max-reached\" };\n }\n const buffer = await getNextOrNullIfNotAvailable();\n if (buffer.type === \"need-to-wait-for-it\") {\n return { type: \"waiting\" };\n }\n if (buffer.type === \"got-end\") {\n return { type: \"ended\" };\n }\n if (buffer.type === \"got-buffer\") {\n onBufferScheduled(buffer.buffer);\n continue;\n }\n throw new Error(\"Unreachable\");\n }\n };\n const removeAndReturnAllQueuedAudioNodes = () => {\n const nodes = queuedAudioNodes.slice();\n for (const node of nodes) {\n try {\n node.node.stop();\n } catch {}\n }\n queuedAudioNodes.length = 0;\n return nodes;\n };\n const addChunkForAfterResuming = (buffer, timestamp) => {\n audioChunksForAfterResuming.push({\n buffer,\n timestamp\n });\n };\n const moveQueuedChunksToPauseQueue = () => {\n const toQueue = removeAndReturnAllQueuedAudioNodes();\n for (const chunk of toQueue) {\n addChunkForAfterResuming(chunk.buffer, chunk.timestamp);\n }\n if (debugAudioScheduling && toQueue.length > 0) {\n Internals3.Log.trace({ logLevel: \"trace\", tag: \"audio-scheduling\" }, `Moved ${toQueue.length} ${toQueue.length === 1 ? \"chunk\" : \"chunks\"} to pause queue (${toQueue[0].timestamp.toFixed(3)}-${toQueue[toQueue.length - 1].timestamp + toQueue[toQueue.length - 1].buffer.duration.toFixed(3)})`);\n }\n };\n const getNumberOfChunksAfterResuming = () => {\n return audioChunksForAfterResuming.length;\n };\n return {\n destroy: (audioContext) => {\n cleanupAudioQueue(audioContext);\n destroyed = true;\n iterator.return().catch(() => {\n return;\n });\n audioChunksForAfterResuming.length = 0;\n },\n getNext: async () => {\n const next = await iterator.next();\n if (next.value) {\n mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);\n }\n return next;\n },\n isDestroyed: () => {\n return destroyed;\n },\n addQueuedAudioNode: ({\n node,\n timestamp,\n buffer,\n scheduledTime,\n playbackRate,\n scheduledAtAnchor\n }) => {\n queuedAudioNodes.push({\n node,\n timestamp,\n buffer,\n scheduledTime,\n playbackRate,\n scheduledAtAnchor\n });\n },\n removeQueuedAudioNode: (node) => {\n const index = queuedAudioNodes.findIndex((n) => n.node === node);\n if (index !== -1) {\n queuedAudioNodes.splice(index, 1);\n }\n },\n getAndClearAudioChunksForAfterResuming: () => {\n const chunks = audioChunksForAfterResuming.slice();\n audioChunksForAfterResuming.length = 0;\n return chunks;\n },\n getQueuedPeriod: () => {\n let until = -Infinity;\n let from = Infinity;\n for (const node of queuedAudioNodes) {\n until = Math.max(until, node.timestamp + node.buffer.duration);\n from = Math.min(from, node.timestamp);\n }\n for (const chunk of audioChunksForAfterResuming) {\n until = Math.max(until, chunk.timestamp + chunk.buffer.duration);\n from = Math.min(from, chunk.timestamp);\n }\n if (!Number.isFinite(from) || !Number.isFinite(until)) {\n return null;\n }\n return {\n from,\n until\n };\n },\n tryToSatisfySeek,\n bufferAsFarAsPossible,\n addChunkForAfterResuming,\n moveQueuedChunksToPauseQueue,\n getNumberOfChunksAfterResuming\n };\n};\nvar isAlreadyQueued = (time, queuedPeriod) => {\n if (!queuedPeriod) {\n return false;\n }\n return time >= queuedPeriod.from && time < queuedPeriod.until;\n};\n\n// src/make-iterator-with-priming.ts\nvar AUDIO_PRIMING_SECONDS = 0.5;\nvar PREDECODE_AHEAD_SECONDS = 8;\nfunction makePredecodingIterator(inner) {\n const buffer = [];\n let consumerEndTime = 0;\n let innerDone = false;\n let returned = false;\n let fetching = false;\n let waiter = null;\n const prefetch = () => {\n if (fetching || returned || innerDone) {\n return;\n }\n const lastBuffered = buffer.length > 0 ? buffer[buffer.length - 1] : null;\n const bufferedEndTime = lastBuffered ? lastBuffered.timestamp + lastBuffered.duration : consumerEndTime;\n if (bufferedEndTime >= consumerEndTime + PREDECODE_AHEAD_SECONDS) {\n return;\n }\n fetching = true;\n inner.next().then((result) => {\n fetching = false;\n if (returned) {\n return;\n }\n if (result.done) {\n innerDone = true;\n if (waiter) {\n const w = waiter;\n waiter = null;\n w({ value: undefined, done: true });\n }\n return;\n }\n if (waiter) {\n const w = waiter;\n waiter = null;\n const buf = result.value;\n consumerEndTime = buf.timestamp + buf.duration;\n w({ value: buf, done: false });\n prefetch();\n return;\n }\n buffer.push(result.value);\n prefetch();\n }, () => {\n fetching = false;\n innerDone = true;\n if (waiter) {\n const w = waiter;\n waiter = null;\n w({ value: undefined, done: true });\n }\n });\n };\n prefetch();\n const _return = () => {\n returned = true;\n buffer.length = 0;\n if (waiter) {\n const w = waiter;\n waiter = null;\n w({ value: undefined, done: true });\n }\n inner.return(undefined);\n return Promise.resolve({ value: undefined, done: true });\n };\n const iterator = {\n next() {\n if (buffer.length > 0) {\n const buf = buffer.shift();\n consumerEndTime = buf.timestamp + buf.duration;\n prefetch();\n return Promise.resolve({ value: buf, done: false });\n }\n if (innerDone) {\n return Promise.resolve({\n value: undefined,\n done: true\n });\n }\n return new Promise((resolve) => {\n waiter = resolve;\n prefetch();\n });\n },\n return: _return,\n throw(e) {\n returned = true;\n buffer.length = 0;\n return inner.throw(e);\n },\n [Symbol.asyncIterator]() {\n return iterator;\n }\n };\n return iterator;\n}\nasync function* makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp) {\n const primingStart = Math.max(0, timeToSeek - AUDIO_PRIMING_SECONDS);\n const iterator = audioSink.buffers(primingStart, maximumTimestamp);\n for await (const buffer of iterator) {\n if (buffer.timestamp + buffer.duration <= timeToSeek) {\n continue;\n }\n yield buffer;\n }\n}\nvar makeIteratorWithPriming = ({\n audioSink,\n timeToSeek,\n maximumTimestamp\n}) => {\n return makePredecodingIterator(makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp));\n};\n\n// src/prewarm-iterator-for-looping.ts\nvar makePrewarmedVideoIteratorCache = (videoSink) => {\n const prewarmedVideoIterators = new Map;\n const prewarmIteratorForLooping = ({ timeToSeek }) => {\n if (!prewarmedVideoIterators.has(timeToSeek)) {\n prewarmedVideoIterators.set(timeToSeek, videoSink.canvases(timeToSeek));\n }\n };\n const makeIteratorOrUsePrewarmed = (timeToSeek) => {\n const prewarmedIterator = prewarmedVideoIterators.get(timeToSeek);\n if (prewarmedIterator) {\n prewarmedVideoIterators.delete(timeToSeek);\n return prewarmedIterator;\n }\n const iterator = videoSink.canvases(timeToSeek);\n return iterator;\n };\n const destroy = () => {\n for (const iterator of prewarmedVideoIterators.values()) {\n iterator.return();\n }\n prewarmedVideoIterators.clear();\n };\n return {\n prewarmIteratorForLooping,\n makeIteratorOrUsePrewarmed,\n destroy\n };\n};\nvar makeKey = (timeToSeek, maximumTimestamp) => {\n return `${timeToSeek}-${maximumTimestamp}`;\n};\nvar makePrewarmedAudioIteratorCache = (audioSink) => {\n const prewarmedAudioIterators = new Map;\n const prewarmIteratorForLooping = ({\n timeToSeek,\n maximumTimestamp\n }) => {\n if (!prewarmedAudioIterators.has(makeKey(timeToSeek, maximumTimestamp))) {\n prewarmedAudioIterators.set(makeKey(timeToSeek, maximumTimestamp), makeIteratorWithPriming({ audioSink, timeToSeek, maximumTimestamp }));\n }\n };\n const makeIteratorOrUsePrewarmed = (timeToSeek, maximumTimestamp) => {\n const prewarmedIterator = prewarmedAudioIterators.get(makeKey(timeToSeek, maximumTimestamp));\n if (prewarmedIterator) {\n prewarmedAudioIterators.delete(makeKey(timeToSeek, maximumTimestamp));\n return prewarmedIterator;\n }\n const iterator = makeIteratorWithPriming({\n audioSink,\n timeToSeek,\n maximumTimestamp\n });\n return iterator;\n };\n const destroy = () => {\n for (const iterator of prewarmedAudioIterators.values()) {\n iterator.return();\n }\n prewarmedAudioIterators.clear();\n };\n return {\n prewarmIteratorForLooping,\n makeIteratorOrUsePrewarmed,\n destroy\n };\n};\n\n// src/audio-iterator-manager.ts\nvar MAX_BUFFER_AHEAD_SECONDS = 8;\nvar audioIteratorManager = ({\n audioTrack,\n delayPlaybackHandleIfNotPremounting,\n sharedAudioContext,\n getIsLooping,\n getEndTime,\n getStartTime,\n initialMuted,\n drawDebugOverlay\n}) => {\n let muted = initialMuted;\n let currentVolume = 1;\n const gainNode = sharedAudioContext.audioContext.createGain();\n gainNode.connect(sharedAudioContext.audioContext.destination);\n const audioSink = new AudioBufferSink(audioTrack);\n const prewarmedAudioIteratorCache = makePrewarmedAudioIteratorCache(audioSink);\n let audioBufferIterator = null;\n let audioIteratorsCreated = 0;\n let currentDelayHandle = null;\n const scheduleAudioChunk = ({\n buffer,\n mediaTimestamp,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n }) => {\n if (!audioBufferIterator) {\n throw new Error(\"Audio buffer iterator not found\");\n }\n if (sharedAudioContext.audioContext.state !== \"running\") {\n throw new Error(\"Tried to schedule node while audio context is not running\");\n }\n if (muted) {\n return;\n }\n const node = sharedAudioContext.audioContext.createBufferSource();\n node.buffer = buffer;\n node.playbackRate.value = playbackRate;\n node.connect(gainNode);\n const started = scheduleAudioNode(node, mediaTimestamp);\n if (started.type === \"not-started\") {\n if (debugAudioScheduling) {\n Internals4.Log.info({ logLevel: \"trace\", tag: \"audio-scheduling\" }, \"not started, disconnected: %s %s\", mediaTimestamp.toFixed(3), buffer.duration.toFixed(3));\n }\n node.disconnect();\n return;\n }\n const iterator = audioBufferIterator;\n iterator.addQueuedAudioNode({\n node,\n timestamp: mediaTimestamp,\n buffer,\n scheduledTime: started.scheduledTime,\n playbackRate,\n scheduledAtAnchor: sharedAudioContext.audioSyncAnchor.value\n });\n node.onended = () => {\n setTimeout(() => {\n iterator.removeQueuedAudioNode(node);\n }, 30);\n };\n };\n const resumeScheduledAudioChunks = ({\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n }) => {\n if (muted) {\n return;\n }\n if (!audioBufferIterator) {\n return;\n }\n for (const chunk of audioBufferIterator.getAndClearAudioChunksForAfterResuming()) {\n scheduleAudioChunk({\n buffer: chunk.buffer,\n mediaTimestamp: chunk.timestamp,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n }\n };\n const onAudioChunk = ({\n getIsPlaying,\n buffer,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n }) => {\n if (muted) {\n return;\n }\n const startTime = getStartTime();\n const endTime = getEndTime();\n if (buffer.timestamp + buffer.duration <= startTime) {\n return;\n }\n if (buffer.timestamp >= endTime) {\n return;\n }\n if (getIsPlaying() && sharedAudioContext.audioContext.state === \"running\" && (sharedAudioContext.audioContext.getOutputTimestamp().contextTime ?? 0) > 0) {\n resumeScheduledAudioChunks({\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n scheduleAudioChunk({\n buffer: buffer.buffer,\n mediaTimestamp: buffer.timestamp,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n } else {\n if (!audioBufferIterator) {\n throw new Error(\"Audio buffer iterator not found\");\n }\n if (debugAudioScheduling) {\n Internals4.Log.info({ logLevel: \"trace\", tag: \"audio-scheduling\" }, \"not ready, added to queue: %s %s\", buffer.timestamp.toFixed(3), buffer.duration.toFixed(3));\n }\n audioBufferIterator.addChunkForAfterResuming(buffer.buffer, buffer.timestamp);\n }\n drawDebugOverlay();\n };\n const startAudioIterator = async ({\n nonce,\n playbackRate,\n startFromSecond,\n getIsPlaying,\n scheduleAudioNode,\n debugAudioScheduling\n }) => {\n let __stack = [];\n try {\n if (muted) {\n return;\n }\n audioBufferIterator?.destroy(sharedAudioContext);\n const delayHandle = __using(__stack, delayPlaybackHandleIfNotPremounting(), 0);\n currentDelayHandle = delayHandle;\n const iterator = makeAudioIterator({\n startFromSecond,\n maximumTimestamp: getEndTime(),\n cache: prewarmedAudioIteratorCache,\n debugAudioScheduling\n });\n audioIteratorsCreated++;\n audioBufferIterator = iterator;\n try {\n for (let i = 0;i < 6; i++) {\n const result = await iterator.getNext();\n if (iterator.isDestroyed()) {\n return;\n }\n if (nonce.isStale()) {\n return;\n }\n if (!result.value) {\n return;\n }\n onAudioChunk({\n getIsPlaying,\n buffer: result.value,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n }\n await iterator.bufferAsFarAsPossible((buffer) => {\n if (!nonce.isStale()) {\n onAudioChunk({\n getIsPlaying,\n buffer,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n }\n }, Math.min(startFromSecond + MAX_BUFFER_AHEAD_SECONDS, getEndTime()));\n } catch (e) {\n if (e instanceof InputDisposedError) {\n return;\n }\n throw e;\n }\n } catch (_catch) {\n var _err = _catch, _hasErr = 1;\n } finally {\n __callDispose(__stack, _err, _hasErr);\n }\n };\n const pausePlayback = () => {\n if (!audioBufferIterator) {\n return;\n }\n audioBufferIterator.moveQueuedChunksToPauseQueue();\n };\n const seek = async ({\n newTime,\n nonce,\n playbackRate,\n getIsPlaying,\n scheduleAudioNode,\n debugAudioScheduling\n }) => {\n if (muted) {\n return;\n }\n if (getIsLooping()) {\n if (getEndTime() - newTime < 1) {\n prewarmedAudioIteratorCache.prewarmIteratorForLooping({\n timeToSeek: getStartTime(),\n maximumTimestamp: getEndTime()\n });\n }\n }\n if (!audioBufferIterator) {\n await startAudioIterator({\n nonce,\n playbackRate,\n startFromSecond: newTime,\n getIsPlaying,\n scheduleAudioNode,\n debugAudioScheduling\n });\n return;\n }\n const queuedPeriod = audioBufferIterator.getQueuedPeriod();\n const queuedPeriodMinusLatency = queuedPeriod ? {\n from: queuedPeriod.from - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT - sharedAudioContext.audioContext.baseLatency - sharedAudioContext.audioContext.outputLatency,\n until: queuedPeriod.until\n } : null;\n const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriodMinusLatency);\n if (!currentTimeIsAlreadyQueued) {\n const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, (buffer) => {\n if (!nonce.isStale()) {\n onAudioChunk({\n getIsPlaying,\n buffer,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n }\n });\n if (nonce.isStale()) {\n return;\n }\n if (audioSatisfyResult.type === \"ended\") {\n return;\n }\n if (audioSatisfyResult.type === \"not-satisfied\") {\n await startAudioIterator({\n nonce,\n playbackRate,\n startFromSecond: newTime,\n getIsPlaying,\n scheduleAudioNode,\n debugAudioScheduling\n });\n return;\n }\n if (audioSatisfyResult.type === \"satisfied\") {}\n }\n await audioBufferIterator.bufferAsFarAsPossible((buffer) => {\n if (!nonce.isStale()) {\n onAudioChunk({\n getIsPlaying,\n buffer,\n playbackRate,\n scheduleAudioNode,\n debugAudioScheduling\n });\n }\n }, Math.min(newTime + MAX_BUFFER_AHEAD_SECONDS, getEndTime()));\n };\n return {\n startAudioIterator,\n resumeScheduledAudioChunks,\n pausePlayback,\n getAudioBufferIterator: () => audioBufferIterator,\n destroyIterator: () => {\n prewarmedAudioIteratorCache.destroy();\n audioBufferIterator?.destroy(sharedAudioContext);\n audioBufferIterator = null;\n if (currentDelayHandle) {\n currentDelayHandle.unblock();\n currentDelayHandle = null;\n }\n },\n seek,\n getAudioIteratorsCreated: () => audioIteratorsCreated,\n setMuted: (newMuted) => {\n muted = newMuted;\n gainNode.gain.value = muted ? 0 : currentVolume;\n },\n setVolume: (volume) => {\n currentVolume = Math.max(0, volume);\n gainNode.gain.value = muted ? 0 : currentVolume;\n },\n scheduleAudioChunk\n };\n};\n\n// src/debug-overlay/preview-overlay.ts\nvar drawPreviewOverlay = ({\n context,\n audioTime,\n audioContextState,\n audioSyncAnchor,\n playing,\n audioIteratorManager: audioIteratorManager2,\n videoIteratorManager,\n playbackRate\n}) => {\n const anchorValue = audioSyncAnchor?.value ?? 0;\n const lines = [\n \"Debug overlay\",\n `Video iterators created: ${videoIteratorManager?.getVideoIteratorsCreated()}`,\n `Audio iterators created: ${audioIteratorManager2?.getAudioIteratorsCreated()}`,\n `Frames rendered: ${videoIteratorManager?.getFramesRendered()}`,\n `Audio context state: ${audioContextState}`,\n audioTime ? `Audio time: ${((audioTime - anchorValue) * playbackRate).toFixed(3)}s` : null\n ].filter(Boolean);\n if (audioIteratorManager2) {\n const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod();\n if (queuedPeriod) {\n const aheadText = audioTime ? ` (${(queuedPeriod.until - (audioTime - anchorValue) * playbackRate).toFixed(3)}s ahead)` : \"\";\n lines.push(`Audio queued until ${queuedPeriod.until.toFixed(3)}s${aheadText}`);\n }\n lines.push(`Playing: ${playing}`);\n }\n const lineHeight = 30;\n const boxPaddingX = 10;\n const boxPaddingY = 10;\n const boxLeft = 20;\n const boxTop = 20;\n const boxWidth = 600;\n const boxHeight = lines.length * lineHeight + 2 * boxPaddingY;\n context.fillStyle = \"rgba(0, 0, 0, 1)\";\n context.fillRect(boxLeft, boxTop, boxWidth, boxHeight);\n context.fillStyle = \"white\";\n context.font = \"24px sans-serif\";\n context.textBaseline = \"top\";\n for (let i = 0;i < lines.length; i++) {\n context.fillText(lines[i], boxLeft + boxPaddingX, boxTop + boxPaddingY + i * lineHeight);\n }\n};\n\n// src/is-type-of-error.ts\nfunction isNetworkError(error) {\n if (error.message.includes(\"Failed to fetch\") || error.message.includes(\"Load failed\") || error.message.includes(\"NetworkError when attempting to fetch resource\")) {\n return true;\n }\n return false;\n}\nfunction isUnsupportedConfigurationError(error) {\n return error.message.includes(\"Unsupported configuration\");\n}\n\n// src/nonce-manager.ts\nvar makeNonceManager = () => {\n let nonce = 0;\n const createAsyncOperation = () => {\n nonce++;\n const currentNonce = nonce;\n return {\n isStale: () => nonce !== currentNonce\n };\n };\n return {\n createAsyncOperation\n };\n};\n\n// src/video-iterator-manager.ts\nimport { CanvasSink } from \"mediabunny\";\nimport { Internals as Internals5 } from \"remotion\";\n\n// src/video/video-preview-iterator.ts\nvar createVideoIterator = async (timeToSeek, cache) => {\n let destroyed = false;\n const iterator = cache.makeIteratorOrUsePrewarmed(timeToSeek);\n let iteratorEnded = false;\n const initialFrame = (await iterator.next())?.value ?? null;\n let lastReturnedFrame = initialFrame;\n const getNextOrNullIfNotAvailable = async () => {\n const next = iterator.next();\n const result = await Promise.race([\n next,\n new Promise((resolve) => {\n Promise.resolve().then(() => resolve());\n })\n ]);\n if (!result) {\n return {\n type: \"need-to-wait-for-it\",\n waitPromise: async () => {\n const res = await next;\n if (res.value) {\n lastReturnedFrame = res.value;\n } else {\n iteratorEnded = true;\n }\n return res.value;\n }\n };\n }\n if (result.value) {\n lastReturnedFrame = result.value;\n } else {\n iteratorEnded = true;\n }\n return {\n type: \"got-frame-or-end\",\n frame: result.value ?? null\n };\n };\n const destroy = () => {\n destroyed = true;\n lastReturnedFrame = null;\n iterator.return().catch(() => {\n return;\n });\n };\n const tryToSatisfySeek = async (time) => {\n if (lastReturnedFrame) {\n const frameTimestamp = roundTo4Digits(lastReturnedFrame.timestamp);\n if (roundTo4Digits(time) < frameTimestamp) {\n const lastFrameWasInitialFrame = lastReturnedFrame === initialFrame;\n const firstFrameDoesSatisfy = lastFrameWasInitialFrame && roundTo4Digits(time) >= roundTo4Digits(timeToSeek);\n if (firstFrameDoesSatisfy) {\n return {\n type: \"satisfied\",\n frame: lastReturnedFrame\n };\n }\n return {\n type: \"not-satisfied\",\n reason: `iterator is too far, most recently returned ${frameTimestamp}`\n };\n }\n const frameEndTimestamp = roundTo4Digits(lastReturnedFrame.timestamp + lastReturnedFrame.duration);\n const timestamp = roundTo4Digits(time);\n if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {\n return {\n type: \"satisfied\",\n frame: lastReturnedFrame\n };\n }\n }\n if (iteratorEnded) {\n if (lastReturnedFrame) {\n return {\n type: \"satisfied\",\n frame: lastReturnedFrame\n };\n }\n return {\n type: \"not-satisfied\",\n reason: \"iterator ended\"\n };\n }\n while (true) {\n const frame = await getNextOrNullIfNotAvailable();\n if (frame.type === \"need-to-wait-for-it\") {\n return {\n type: \"not-satisfied\",\n reason: \"iterator did not have frame ready\"\n };\n }\n if (frame.type === \"got-frame-or-end\") {\n if (frame.frame === null) {\n iteratorEnded = true;\n if (lastReturnedFrame) {\n return {\n type: \"satisfied\",\n frame: lastReturnedFrame\n };\n }\n return {\n type: \"not-satisfied\",\n reason: \"iterator ended and did not have frame ready\"\n };\n }\n const frameTimestamp = roundTo4Digits(frame.frame.timestamp);\n const frameEndTimestamp = roundTo4Digits(frame.frame.timestamp + frame.frame.duration);\n const timestamp = roundTo4Digits(time);\n if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {\n return {\n type: \"satisfied\",\n frame: frame.frame\n };\n }\n continue;\n }\n throw new Error(\"Unreachable\");\n }\n };\n return {\n destroy,\n initialFrame,\n isDestroyed: () => {\n return destroyed;\n },\n tryToSatisfySeek\n };\n};\n\n// src/video-iterator-manager.ts\nvar videoIteratorManager = ({\n delayPlaybackHandleIfNotPremounting,\n canvas,\n context,\n drawDebugOverlay,\n logLevel,\n getOnVideoFrameCallback,\n videoTrack,\n getEndTime,\n getStartTime,\n getIsLooping\n}) => {\n let videoIteratorsCreated = 0;\n let videoFrameIterator = null;\n let framesRendered = 0;\n let currentDelayHandle = null;\n if (canvas) {\n canvas.width = videoTrack.displayWidth;\n canvas.height = videoTrack.displayHeight;\n }\n const canvasSink = new CanvasSink(videoTrack, {\n poolSize: 2,\n fit: \"contain\",\n alpha: true\n });\n const prewarmedVideoIteratorCache = makePrewarmedVideoIteratorCache(canvasSink);\n const drawFrame = (frame) => {\n if (context && canvas) {\n context.clearRect(0, 0, canvas.width, canvas.height);\n context.drawImage(frame.canvas, 0, 0);\n }\n framesRendered++;\n drawDebugOverlay();\n const callback = getOnVideoFrameCallback();\n if (callback) {\n callback(frame.canvas);\n }\n Internals5.Log.trace({ logLevel, tag: \"@remotion/media\" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);\n };\n const startVideoIterator = async (timeToSeek, nonce) => {\n let __stack = [];\n try {\n videoFrameIterator?.destroy();\n const delayHandle = __using(__stack, delayPlaybackHandleIfNotPremounting(), 0);\n currentDelayHandle = delayHandle;\n const iterator = await createVideoIterator(timeToSeek, prewarmedVideoIteratorCache);\n videoIteratorsCreated++;\n videoFrameIterator = iterator;\n if (iterator.isDestroyed()) {\n return;\n }\n if (nonce.isStale()) {\n return;\n }\n if (videoFrameIterator.isDestroyed()) {\n return;\n }\n if (!iterator.initialFrame) {\n return;\n }\n drawFrame(iterator.initialFrame);\n } catch (_catch) {\n var _err = _catch, _hasErr = 1;\n } finally {\n __callDispose(__stack, _err, _hasErr);\n }\n };\n const seek = async ({ newTime, nonce }) => {\n if (!videoFrameIterator) {\n return;\n }\n if (getIsLooping()) {\n if (getEndTime() - newTime < 1) {\n prewarmedVideoIteratorCache.prewarmIteratorForLooping({\n timeToSeek: getStartTime()\n });\n }\n }\n const videoSatisfyResult = await videoFrameIterator.tryToSatisfySeek(newTime);\n if (videoSatisfyResult.type === \"satisfied\") {\n drawFrame(videoSatisfyResult.frame);\n return;\n }\n if (nonce.isStale()) {\n return;\n }\n await startVideoIterator(newTime, nonce);\n };\n return {\n startVideoIterator,\n getVideoIteratorsCreated: () => videoIteratorsCreated,\n seek,\n destroy: () => {\n prewarmedVideoIteratorCache.destroy();\n videoFrameIterator?.destroy();\n if (context && canvas) {\n context.clearRect(0, 0, canvas.width, canvas.height);\n }\n if (currentDelayHandle) {\n currentDelayHandle.unblock();\n currentDelayHandle = null;\n }\n videoFrameIterator = null;\n },\n getVideoFrameIterator: () => videoFrameIterator,\n drawFrame,\n getFramesRendered: () => framesRendered\n };\n};\n\n// src/media-player.ts\nclass MediaPlayer {\n canvas;\n context;\n src;\n logLevel;\n playbackRate;\n globalPlaybackRate;\n audioStreamIndex;\n sharedAudioContext;\n audioIteratorManager = null;\n videoIteratorManager = null;\n sequenceOffset;\n playing = false;\n loop = false;\n fps;\n trimBefore;\n trimAfter;\n durationInFrames;\n totalDuration;\n debugOverlay = false;\n debugAudioScheduling = false;\n nonceManager;\n onVideoFrameCallback = null;\n initializationPromise = null;\n bufferState;\n isPremounting;\n isPostmounting;\n seekPromiseChain = Promise.resolve();\n constructor({\n canvas,\n src,\n logLevel,\n sharedAudioContext,\n loop,\n trimBefore,\n trimAfter,\n playbackRate,\n globalPlaybackRate,\n audioStreamIndex,\n fps,\n debugOverlay,\n debugAudioScheduling,\n bufferState,\n isPremounting,\n isPostmounting,\n durationInFrames,\n onVideoFrameCallback,\n playing,\n sequenceOffset\n }) {\n this.canvas = canvas ?? null;\n this.src = src;\n this.logLevel = logLevel;\n this.sharedAudioContext = sharedAudioContext;\n this.playbackRate = playbackRate;\n this.globalPlaybackRate = globalPlaybackRate;\n this.loop = loop;\n this.trimBefore = trimBefore;\n this.trimAfter = trimAfter;\n this.audioStreamIndex = audioStreamIndex ?? 0;\n this.fps = fps;\n this.debugOverlay = debugOverlay;\n this.debugAudioScheduling = debugAudioScheduling;\n this.bufferState = bufferState;\n this.isPremounting = isPremounting;\n this.isPostmounting = isPostmounting;\n this.durationInFrames = durationInFrames;\n this.nonceManager = makeNonceManager();\n this.onVideoFrameCallback = onVideoFrameCallback;\n this.playing = playing;\n this.sequenceOffset = sequenceOffset;\n this.input = new Input({\n source: new UrlSource(this.src),\n formats: ALL_FORMATS\n });\n if (canvas) {\n const context = canvas.getContext(\"2d\", {\n alpha: true,\n desynchronized: true\n });\n if (!context) {\n throw new Error(\"Could not get 2D context from canvas\");\n }\n this.context = context;\n } else {\n this.context = null;\n }\n }\n input;\n isDisposalError() {\n return this.input.disposed === true;\n }\n initialize(startTimeUnresolved, initialMuted) {\n const promise = this._initialize(startTimeUnresolved, initialMuted);\n this.initializationPromise = promise;\n this.seekPromiseChain = promise;\n return promise;\n }\n getStartTime() {\n return (this.trimBefore ?? 0) / this.fps;\n }\n getEndTime() {\n const mediaEndTime = calculateEndTime({\n mediaDurationInSeconds: this.totalDuration,\n ifNoMediaDuration: \"fail\",\n src: this.src,\n trimAfter: this.trimAfter,\n trimBefore: this.trimBefore,\n fps: this.fps\n });\n if (this.loop) {\n return mediaEndTime;\n }\n const sequenceEndMediaTime = this.durationInFrames / this.fps * this.playbackRate + (this.trimBefore ?? 0) / this.fps;\n return Math.min(mediaEndTime, sequenceEndMediaTime);\n }\n async _initialize(startTimeUnresolved, initialMuted) {\n let __stack = [];\n try {\n const _ = __using(__stack, this.delayPlaybackHandleIfNotPremounting(), 0);\n try {\n if (this.input.disposed) {\n return { type: \"disposed\" };\n }\n try {\n await this.input.getFormat();\n } catch (error) {\n if (this.isDisposalError()) {\n return { type: \"disposed\" };\n }\n const err = error;\n if (isNetworkError(err)) {\n throw error;\n }\n Internals6.Log.error({ logLevel: this.logLevel, tag: \"@remotion/media\" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);\n return { type: \"unknown-container-format\" };\n }\n const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([\n this.input.computeDuration(),\n this.input.getPrimaryVideoTrack(),\n this.input.getAudioTracks()\n ]);\n if (this.input.disposed) {\n return { type: \"disposed\" };\n }\n this.totalDuration = durationInSeconds;\n const audioTrack = audioTracks[this.audioStreamIndex] ?? null;\n if (!videoTrack && !audioTrack) {\n return { type: \"no-tracks\" };\n }\n if (videoTrack) {\n const canDecode = await videoTrack.canDecode();\n if (!canDecode) {\n return { type: \"cannot-decode\" };\n }\n if (this.input.disposed) {\n return { type: \"disposed\" };\n }\n this.videoIteratorManager = videoIteratorManager({\n videoTrack,\n delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,\n context: this.context,\n canvas: this.canvas,\n getOnVideoFrameCallback: () => this.onVideoFrameCallback,\n logLevel: this.logLevel,\n drawDebugOverlay: this.drawDebugOverlay,\n getEndTime: () => this.getEndTime(),\n getStartTime: () => this.getStartTime(),\n getIsLooping: () => this.loop\n });\n }\n const startTime = this.getTrimmedTime(startTimeUnresolved);\n if (startTime === null) {\n throw new Error(`should have asserted that the time is not null`);\n }\n if (audioTrack && this.sharedAudioContext) {\n const canDecode = await audioTrack.canDecode();\n if (!canDecode) {\n return { type: \"cannot-decode\" };\n }\n if (this.input.disposed) {\n return { type: \"disposed\" };\n }\n this.audioIteratorManager = audioIteratorManager({\n audioTrack,\n delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,\n sharedAudioContext: this.sharedAudioContext,\n getIsLooping: () => this.loop,\n getEndTime: () => this.getEndTime(),\n getStartTime: () => this.getStartTime(),\n initialMuted,\n drawDebugOverlay: this.drawDebugOverlay\n });\n }\n const nonce = this.nonceManager.createAsyncOperation();\n try {\n await Promise.all([\n this.audioIteratorManager ? this.audioIteratorManager.startAudioIterator({\n nonce,\n playbackRate: this.playbackRate * this.globalPlaybackRate,\n startFromSecond: startTime,\n getIsPlaying: () => this.playing,\n scheduleAudioNode: this.scheduleAudioNode,\n debugAudioScheduling: this.debugAudioScheduling\n }) : Promise.resolve(),\n this.videoIteratorManager ? this.videoIteratorManager.startVideoIterator(startTime, nonce) : Promise.resolve()\n ]);\n } catch (error) {\n if (this.isDisposalError()) {\n return { type: \"disposed\" };\n }\n Internals6.Log.error({ logLevel: this.logLevel, tag: \"@remotion/media\" }, \"[MediaPlayer] Failed to start audio and video iterators\", error);\n }\n return { type: \"success\", durationInSeconds };\n } catch (error) {\n const err = error;\n if (isNetworkError(err)) {\n Internals6.Log.error({ logLevel: this.logLevel, tag: \"@remotion/media\" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);\n return { type: \"network-error\" };\n }\n Internals6.Log.error({ logLevel: this.logLevel, tag: \"@remotion/media\" }, \"[MediaPlayer] Failed to initialize\", error);\n throw error;\n }\n } catch (_catch) {\n var _err = _catch, _hasErr = 1;\n } finally {\n __callDispose(__stack, _err, _hasErr);\n }\n }\n seekToWithQueue = async (newTime) => {\n const nonce = this.nonceManager.createAsyncOperation();\n await this.seekPromiseChain;\n this.seekPromiseChain = this.seekToDoNotCallDirectly(newTime, nonce);\n await this.seekPromiseChain;\n };\n async seekTo(time) {\n const newTime = this.getTrimmedTime(time);\n if (newTime === null) {\n throw new Error(`should have asserted that the time is not null`);\n }\n await this.seekToWithQueue(newTime);\n }\n async seekToDoNotCallDirectly(newTime, nonce) {\n if (nonce.isStale()) {\n return;\n }\n const shouldSeekAudio = this.audioIteratorManager && this.getAudioPlaybackTime(this.sharedAudioContext?.audioContext.currentTime ?? 0) !== newTime;\n try {\n await Promise.all([\n this.videoIteratorManager?.seek({\n newTime,\n nonce\n }),\n shouldSeekAudio ? this.audioIteratorManager?.seek({\n newTime,\n nonce,\n playbackRate: this.playbackRate * this.globalPlaybackRate,\n getIsPlaying: () => this.playing,\n scheduleAudioNode: this.scheduleAudioNode,\n debugAudioScheduling: this.debugAudioScheduling\n }) : null\n ]);\n } catch (error) {\n if (this.isDisposalError()) {\n return;\n }\n throw error;\n }\n }\n playAudio() {\n if (this.audioIteratorManager && this.sharedAudioContext?.audioContext.state === \"running\" && (this.sharedAudioContext?.audioContext?.getOutputTimestamp().contextTime ?? 0) > 0) {\n this.audioIteratorManager.resumeScheduledAudioChunks({\n playbackRate: this.playbackRate * this.globalPlaybackRate,\n scheduleAudioNode: this.scheduleAudioNode,\n debugAudioScheduling: this.debugAudioScheduling\n });\n }\n }\n play() {\n this.playAudio();\n if (this.playing) {\n return;\n }\n this.playing = true;\n this.drawDebugOverlay();\n }\n delayPlaybackHandleIfNotPremounting = () => {\n if (this.isPremounting || this.isPostmounting) {\n return {\n unblock: () => {},\n [Symbol.dispose]: () => {}\n };\n }\n const { unblock } = this.bufferState.delayPlayback();\n return {\n unblock,\n [Symbol.dispose]: () => {\n unblock();\n }\n };\n };\n pause() {\n if (!this.playing) {\n return;\n }\n this.playing = false;\n this.audioIteratorManager?.pausePlayback();\n this.drawDebugOverlay();\n }\n setMuted(muted) {\n this.audioIteratorManager?.setMuted(muted);\n }\n setVolume(volume) {\n if (!this.audioIteratorManager) {\n return;\n }\n this.audioIteratorManager.setVolume(volume);\n }\n getTrimmedTime(unloopedTimeInSeconds) {\n return getTimeInSeconds({\n unloopedTimeInSeconds,\n playbackRate: this.playbackRate,\n loop: this.loop,\n trimBefore: this.trimBefore,\n trimAfter: this.trimAfter,\n mediaDurationInSeconds: this.totalDuration ?? null,\n fps: this.fps,\n ifNoMediaDuration: \"infinity\",\n src: this.src\n });\n }\n async updateAfterTrimChange(unloopedTimeInSeconds) {\n if (!this.audioIteratorManager && !this.videoIteratorManager) {\n return;\n }\n const newMediaTime = this.getTrimmedTime(unloopedTimeInSeconds);\n this.audioIteratorManager?.destroyIterator();\n if (newMediaTime !== null) {\n if (!this.playing && this.videoIteratorManager) {\n await this.seekToWithQueue(newMediaTime);\n }\n }\n }\n async setTrimBefore(trimBefore, unloopedTimeInSeconds) {\n if (this.trimBefore !== trimBefore) {\n this.trimBefore = trimBefore;\n await this.updateAfterTrimChange(unloopedTimeInSeconds);\n }\n }\n async setTrimAfter(trimAfter, unloopedTimeInSeconds) {\n if (this.trimAfter !== trimAfter) {\n this.trimAfter = trimAfter;\n await this.updateAfterTrimChange(unloopedTimeInSeconds);\n }\n }\n setDebugOverlay(debugOverlay) {\n this.debugOverlay = debugOverlay;\n }\n setDebugAudioScheduling(debugAudioScheduling) {\n this.debugAudioScheduling = debugAudioScheduling;\n }\n rescheduleAudioChunks() {\n if (!this.audioIteratorManager) {\n return;\n }\n if (!this.sharedAudioContext) {\n return;\n }\n const iterator = this.audioIteratorManager.getAudioBufferIterator();\n if (!iterator) {\n return;\n }\n iterator.moveQueuedChunksToPauseQueue();\n if (this.playing && this.sharedAudioContext.audioContext.state === \"running\" && (this.sharedAudioContext.audioContext?.getOutputTimestamp().contextTime ?? 0) > 0) {\n this.audioIteratorManager.resumeScheduledAudioChunks({\n playbackRate: this.playbackRate * this.globalPlaybackRate,\n scheduleAudioNode: this.scheduleAudioNode,\n debugAudioScheduling: this.debugAudioScheduling\n });\n }\n }\n async setPlaybackRate(rate, unloopedTimeInSeconds) {\n const previousRate = this.playbackRate;\n if (previousRate !== rate) {\n this.playbackRate = rate;\n this.rescheduleAudioChunks();\n await this.seekTo(unloopedTimeInSeconds);\n }\n }\n setGlobalPlaybackRate(rate) {\n const previousRate = this.globalPlaybackRate;\n if (previousRate !== rate) {\n this.globalPlaybackRate = rate;\n this.rescheduleAudioChunks();\n }\n }\n setFps(fps) {\n this.fps = fps;\n }\n setIsPremounting(isPremounting) {\n this.isPremounting = isPremounting;\n }\n setIsPostmounting(isPostmounting) {\n this.isPostmounting = isPostmounting;\n }\n setLoop(loop) {\n this.loop = loop;\n }\n setSequenceOffset(offset) {\n this.sequenceOffset = offset;\n }\n setDurationInFrames(durationInFrames) {\n this.durationInFrames = durationInFrames;\n }\n async dispose() {\n if (this.initializationPromise) {\n try {\n await this.initializationPromise;\n } catch {}\n }\n this.nonceManager.createAsyncOperation();\n this.videoIteratorManager?.destroy();\n this.audioIteratorManager?.destroyIterator();\n this.input.dispose();\n }\n scheduleAudioNode = (node, mediaTimestamp) => {\n if (!this.sharedAudioContext) {\n throw new Error(\"Shared audio context not found\");\n }\n const { audioContext } = this.sharedAudioContext;\n const { currentTime } = audioContext;\n const globalTime = (currentTime - this.sharedAudioContext.audioSyncAnchor.value) * this.globalPlaybackRate;\n const timeInSeconds = globalTime - this.sequenceOffset;\n const localTime = this.getTrimmedTime(timeInSeconds);\n if (localTime === null) {\n throw new Error(\"hmm, should not render!\");\n }\n const targetTime = (mediaTimestamp - localTime) / (this.playbackRate * this.globalPlaybackRate);\n return this.sharedAudioContext.scheduleAudioNode({\n node,\n mediaTimestamp,\n targetTime,\n currentTime,\n sequenceEndTime: this.getEndTime(),\n sequenceStartTime: this.getStartTime(),\n debugAudioScheduling: this.debugAudioScheduling\n });\n };\n getAudioPlaybackTime(currentTime) {\n if (!this.sharedAudioContext) {\n throw new Error(\"Shared audio context not found\");\n }\n const globalTime = (currentTime - this.sharedAudioContext.audioSyncAnchor.value) * this.globalPlaybackRate;\n const localTime = globalTime - this.sequenceOffset;\n const trimmedTime = this.getTrimmedTime(localTime);\n if (trimmedTime !== null) {\n return trimmedTime;\n }\n return localTime * this.playbackRate + (this.trimBefore ?? 0) / this.fps;\n }\n setVideoFrameCallback(callback) {\n this.onVideoFrameCallback = callback;\n }\n drawDebugOverlay = () => {\n if (!this.debugOverlay)\n return;\n if (this.context && this.canvas) {\n drawPreviewOverlay({\n context: this.context,\n audioTime: this.sharedAudioContext?.audioContext.currentTime ?? null,\n audioContextState: this.sharedAudioContext?.audioContext.state ?? null,\n audioSyncAnchor: this.sharedAudioContext?.audioSyncAnchor ?? null,\n audioIteratorManager: this.audioIteratorManager,\n playing: this.playing,\n videoIteratorManager: this.videoIteratorManager,\n playbackRate: this.playbackRate * this.globalPlaybackRate\n });\n }\n };\n}\n\n// src/on-error.ts\nvar callOnErrorAndResolve = ({\n onError,\n error,\n disallowFallback,\n isClientSideRendering,\n clientSideError\n}) => {\n const result = onError?.(error);\n if (isClientSideRendering) {\n return [\"fail\", clientSideError];\n }\n if (result) {\n return [result, error];\n }\n if (disallowFallback) {\n return [\"fail\", error];\n }\n return [\"fallback\", error];\n};\n\n// src/show-in-timeline.ts\nimport { useMemo } from \"react\";\nimport { Internals as Internals7, useVideoConfig } from \"remotion\";\nvar useLoopDisplay = ({\n loop,\n mediaDurationInSeconds,\n playbackRate,\n trimAfter,\n trimBefore\n}) => {\n const { durationInFrames: compDuration, fps } = useVideoConfig();\n const loopDisplay = useMemo(() => {\n if (!loop || !mediaDurationInSeconds) {\n return;\n }\n const durationInFrames = Internals7.calculateMediaDuration({\n mediaDurationInFrames: mediaDurationInSeconds * fps,\n playbackRate,\n trimAfter,\n trimBefore\n });\n const maxTimes = compDuration / durationInFrames;\n return {\n numberOfTimes: maxTimes,\n startOffset: 0,\n durationInFrames\n };\n }, [\n compDuration,\n fps,\n loop,\n mediaDurationInSeconds,\n playbackRate,\n trimAfter,\n trimBefore\n ]);\n return loopDisplay;\n};\n\n// src/use-common-effects.ts\nimport { useContext, useLayoutEffect } from \"react\";\nimport { Internals as Internals8 } from \"remotion\";\nvar useCommonEffects = ({\n mediaPlayerRef,\n mediaPlayerReady,\n currentTimeRef,\n playing,\n isPlayerBuffering,\n frame,\n trimBefore,\n trimAfter,\n effectiveMuted,\n userPreferredVolume,\n playbackRate,\n globalPlaybackRate,\n fps,\n sequenceOffset,\n loop,\n debugAudioScheduling,\n durationInFrames,\n isPremounting,\n isPostmounting,\n currentTime,\n logLevel,\n sharedAudioContext,\n label\n}) => {\n const absoluteTime = Internals8.useAbsoluteTimelinePosition();\n const { playing: playingWhilePremounting } = useContext(Internals8.PremountContext);\n useLayoutEffect(() => {\n if (sharedAudioContext?.audioContext && sharedAudioContext.audioSyncAnchor) {\n setGlobalTimeAnchor({\n audioContext: sharedAudioContext.audioContext,\n audioSyncAnchor: sharedAudioContext.audioSyncAnchor,\n absoluteTimeInSeconds: absoluteTime / fps,\n globalPlaybackRate,\n debugAudioScheduling,\n logLevel\n });\n }\n }, [\n absoluteTime,\n globalPlaybackRate,\n sharedAudioContext,\n fps,\n debugAudioScheduling,\n logLevel\n ]);\n if (playingWhilePremounting) {\n mediaPlayerRef.current?.playAudio();\n }\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer)\n return;\n if (playing && !isPlayerBuffering) {\n mediaPlayer.play();\n } else {\n mediaPlayer.pause();\n }\n }, [\n isPlayerBuffering,\n playing,\n logLevel,\n mediaPlayerReady,\n frame,\n mediaPlayerRef\n ]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);\n }, [trimBefore, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);\n }, [trimAfter, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady)\n return;\n mediaPlayer.setMuted(effectiveMuted);\n }, [effectiveMuted, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setVolume(userPreferredVolume);\n }, [userPreferredVolume, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setPlaybackRate(playbackRate, currentTimeRef.current);\n }, [playbackRate, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);\n }, [globalPlaybackRate, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setLoop(loop);\n }, [loop, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setDurationInFrames(durationInFrames);\n }, [durationInFrames, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setIsPremounting(isPremounting);\n }, [isPremounting, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setIsPostmounting(isPostmounting);\n }, [isPostmounting, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setFps(fps);\n }, [fps, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setSequenceOffset(sequenceOffset);\n }, [sequenceOffset, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setDebugAudioScheduling(debugAudioScheduling);\n }, [debugAudioScheduling, mediaPlayerReady, mediaPlayerRef]);\n useLayoutEffect(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady)\n return;\n mediaPlayer.seekTo(currentTime).catch(() => {});\n Internals8.Log.trace({ logLevel, tag: \"@remotion/media\" }, `[${label}] Updating target time to ${currentTime.toFixed(3)}s`);\n }, [currentTime, logLevel, mediaPlayerReady, label, mediaPlayerRef]);\n};\n\n// src/use-media-in-timeline.ts\nimport { useContext as useContext2, useState, useEffect } from \"react\";\nimport { Internals as Internals9, useCurrentFrame } from \"remotion\";\nvar useMediaInTimeline = ({\n volume,\n mediaVolume,\n src,\n mediaType,\n playbackRate,\n displayName,\n stack,\n showInTimeline,\n premountDisplay,\n postmountDisplay,\n loopDisplay,\n trimBefore,\n trimAfter,\n controls\n}) => {\n const parentSequence = useContext2(Internals9.SequenceContext);\n const startsAt = Internals9.useMediaStartsAt();\n const { registerSequence, unregisterSequence } = useContext2(Internals9.SequenceManager);\n const [sequenceId] = useState(() => String(Math.random()));\n const [mediaId] = useState(() => String(Math.random()));\n const frame = useCurrentFrame();\n const {\n volumes,\n duration,\n doesVolumeChange,\n nonce,\n rootId,\n isStudio,\n finalDisplayName\n } = Internals9.useBasicMediaInTimeline({\n volume,\n mediaVolume,\n mediaType,\n src,\n displayName,\n trimBefore,\n trimAfter,\n playbackRate\n });\n useEffect(() => {\n if (!src) {\n throw new Error(\"No src passed\");\n }\n if (!isStudio && window.process?.env?.NODE_ENV !== \"test\") {\n return;\n }\n if (!showInTimeline) {\n return;\n }\n const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;\n if (loopDisplay) {\n registerSequence({\n type: \"sequence\",\n premountDisplay,\n postmountDisplay,\n parent: parentSequence?.id ?? null,\n displayName: finalDisplayName,\n rootId,\n showInTimeline: true,\n nonce: nonce.get(),\n loopDisplay,\n stack,\n from: 0,\n duration,\n id: sequenceId,\n controls: null\n });\n }\n registerSequence({\n type: mediaType,\n src,\n id: mediaId,\n duration: loopDisplay?.durationInFrames ?? duration,\n from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,\n parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,\n displayName: finalDisplayName,\n rootId,\n volume: volumes,\n showInTimeline: true,\n nonce: nonce.get(),\n startMediaFrom: 0 - startsAt + (trimBefore ?? 0),\n doesVolumeChange,\n loopDisplay: undefined,\n playbackRate,\n stack,\n premountDisplay: null,\n postmountDisplay: null,\n controls: controls ?? null\n });\n return () => {\n if (loopDisplay) {\n unregisterSequence(sequenceId);\n }\n unregisterSequence(mediaId);\n };\n }, [\n controls,\n doesVolumeChange,\n duration,\n finalDisplayName,\n isStudio,\n loopDisplay,\n mediaId,\n mediaType,\n nonce,\n parentSequence?.id,\n playbackRate,\n postmountDisplay,\n premountDisplay,\n registerSequence,\n rootId,\n sequenceId,\n showInTimeline,\n src,\n stack,\n startsAt,\n unregisterSequence,\n volumes,\n frame,\n trimBefore\n ]);\n return {\n id: mediaId\n };\n};\n\n// src/audio/audio-for-preview.tsx\nimport { jsx } from \"react/jsx-runtime\";\nvar {\n useUnsafeVideoConfig,\n Timeline,\n SharedAudioContext,\n useMediaMutedState,\n useMediaVolumeState,\n useFrameForVolumeProp,\n evaluateVolume,\n warnAboutTooHighVolume,\n usePreload,\n SequenceContext\n} = Internals10;\nvar AudioForPreviewAssertedShowing = ({\n src,\n playbackRate,\n logLevel,\n muted,\n volume,\n loopVolumeCurveBehavior,\n loop,\n trimAfter,\n trimBefore,\n name,\n showInTimeline,\n stack,\n disallowFallbackToHtml5Audio,\n toneFrequency,\n audioStreamIndex,\n fallbackHtml5AudioProps,\n debugAudioScheduling,\n onError,\n controls\n}) => {\n const videoConfig = useUnsafeVideoConfig();\n const frame = useCurrentFrame2();\n const mediaPlayerRef = useRef(null);\n const initialTrimBeforeRef = useRef(trimBefore);\n const initialTrimAfterRef = useRef(trimAfter);\n const [mediaPlayerReady, setMediaPlayerReady] = useState2(false);\n const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState2(false);\n const [playing] = Timeline.usePlayingState();\n const timelineContext = Internals10.useTimelineContext();\n const globalPlaybackRate = timelineContext.playbackRate;\n const sharedAudioContext = useContext3(SharedAudioContext);\n const buffer = useBufferState();\n const [mediaMuted] = useMediaMutedState();\n const [mediaVolume] = useMediaVolumeState();\n const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState2(null);\n const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? \"repeat\");\n const userPreferredVolume = evaluateVolume({\n frame: volumePropFrame,\n volume,\n mediaVolume\n });\n warnAboutTooHighVolume(userPreferredVolume);\n if (!videoConfig) {\n throw new Error(\"No video config found\");\n }\n if (!src) {\n throw new TypeError(\"No `src` was passed to <NewAudioForPreview>.\");\n }\n const currentTime = frame / videoConfig.fps;\n const currentTimeRef = useRef(currentTime);\n currentTimeRef.current = currentTime;\n const preloadedSrc = usePreload(src);\n const parentSequence = useContext3(SequenceContext);\n const isPremounting = Boolean(parentSequence?.premounting);\n const isPostmounting = Boolean(parentSequence?.postmounting);\n const sequenceOffset = ((parentSequence?.cumulatedFrom ?? 0) + (parentSequence?.relativeFrom ?? 0)) / videoConfig.fps;\n const loopDisplay = useLoopDisplay({\n loop,\n mediaDurationInSeconds,\n playbackRate,\n trimAfter,\n trimBefore\n });\n useMediaInTimeline({\n volume,\n mediaVolume,\n mediaType: \"audio\",\n src,\n playbackRate,\n displayName: name ?? null,\n stack,\n showInTimeline,\n premountDisplay: parentSequence?.premountDisplay ?? null,\n postmountDisplay: parentSequence?.postmountDisplay ?? null,\n loopDisplay,\n trimAfter,\n trimBefore,\n controls\n });\n const bufferingContext = useContext3(Internals10.BufferingContextReact);\n if (!bufferingContext) {\n throw new Error(\"useMediaPlayback must be used inside a <BufferingContext>\");\n }\n const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;\n const isPlayerBuffering = Internals10.useIsPlayerBuffering(bufferingContext);\n const initialPlaying = useRef(playing && !isPlayerBuffering);\n const initialIsPremounting = useRef(isPremounting);\n const initialIsPostmounting = useRef(isPostmounting);\n const initialGlobalPlaybackRate = useRef(globalPlaybackRate);\n const initialPlaybackRate = useRef(playbackRate);\n const initialMuted = useRef(effectiveMuted);\n const initialSequenceOffset = useRef(sequenceOffset);\n useCommonEffects({\n mediaPlayerRef,\n mediaPlayerReady,\n currentTimeRef,\n playing,\n isPlayerBuffering,\n frame,\n trimBefore,\n trimAfter,\n effectiveMuted,\n userPreferredVolume,\n playbackRate,\n globalPlaybackRate,\n fps: videoConfig.fps,\n sequenceOffset,\n loop,\n debugAudioScheduling,\n durationInFrames: videoConfig.durationInFrames,\n isPremounting,\n isPostmounting,\n currentTime,\n logLevel,\n sharedAudioContext,\n label: \"AudioForPreview\"\n });\n useEffect2(() => {\n if (!sharedAudioContext)\n return;\n if (!sharedAudioContext.audioContext)\n return;\n const { audioContext, audioSyncAnchor, scheduleAudioNode } = sharedAudioContext;\n try {\n const player = new MediaPlayer({\n src: preloadedSrc,\n logLevel,\n sharedAudioContext: { audioContext, audioSyncAnchor, scheduleAudioNode },\n loop,\n trimAfter: initialTrimAfterRef.current,\n trimBefore: initialTrimBeforeRef.current,\n fps: videoConfig.fps,\n canvas: null,\n playbackRate: initialPlaybackRate.current,\n audioStreamIndex: audioStreamIndex ?? 0,\n debugOverlay: false,\n debugAudioScheduling,\n bufferState: buffer,\n isPostmounting: initialIsPostmounting.current,\n isPremounting: initialIsPremounting.current,\n globalPlaybackRate: initialGlobalPlaybackRate.current,\n durationInFrames: videoConfig.durationInFrames,\n onVideoFrameCallback: null,\n playing: initialPlaying.current,\n sequenceOffset: initialSequenceOffset.current\n });\n mediaPlayerRef.current = player;\n player.initialize(currentTimeRef.current, initialMuted.current).then((result) => {\n if (result.type === \"disposed\") {\n return;\n }\n const handleError = (error, fallbackMessage) => {\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error,\n disallowFallback: disallowFallbackToHtml5Audio,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n } else {\n Internals10.Log.warn({ logLevel, tag: \"@remotion/media\" }, fallbackMessage);\n setShouldFallbackToNativeAudio(true);\n }\n };\n if (result.type === \"unknown-container-format\") {\n handleError(new Error(`Unknown container format ${preloadedSrc}.`), `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"network-error\") {\n handleError(new Error(`Network error fetching ${preloadedSrc}.`), `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"cannot-decode\") {\n handleError(new Error(`Cannot decode ${preloadedSrc}.`), `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"no-tracks\") {\n handleError(new Error(`No video or audio tracks found for ${preloadedSrc}.`), `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"success\") {\n setMediaPlayerReady(true);\n setMediaDurationInSeconds(result.durationInSeconds);\n Internals10.Log.trace({ logLevel, tag: \"@remotion/media\" }, `[AudioForPreview] MediaPlayer initialized successfully`);\n }\n }).catch((error) => {\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error,\n disallowFallback: disallowFallbackToHtml5Audio,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n } else {\n Internals10.Log.error({ logLevel, tag: \"@remotion/media\" }, \"[AudioForPreview] Failed to initialize MediaPlayer\", error);\n setShouldFallbackToNativeAudio(true);\n }\n });\n } catch (error) {\n const [action, errorToUse] = callOnErrorAndResolve({\n error,\n onError,\n disallowFallback: disallowFallbackToHtml5Audio,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n }\n Internals10.Log.error({ logLevel, tag: \"@remotion/media\" }, \"[AudioForPreview] MediaPlayer initialization failed\", errorToUse);\n setShouldFallbackToNativeAudio(true);\n }\n return () => {\n if (mediaPlayerRef.current) {\n Internals10.Log.trace({ logLevel, tag: \"@remotion/media\" }, `[AudioForPreview] Disposing MediaPlayer`);\n mediaPlayerRef.current.dispose();\n mediaPlayerRef.current = null;\n }\n setMediaPlayerReady(false);\n setShouldFallbackToNativeAudio(false);\n };\n }, [\n preloadedSrc,\n logLevel,\n sharedAudioContext,\n loop,\n videoConfig.fps,\n audioStreamIndex,\n disallowFallbackToHtml5Audio,\n debugAudioScheduling,\n buffer,\n onError,\n videoConfig.durationInFrames\n ]);\n if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {\n return /* @__PURE__ */ jsx(RemotionAudio, {\n src,\n muted,\n volume,\n startFrom: trimBefore,\n endAt: trimAfter,\n playbackRate,\n loopVolumeCurveBehavior,\n name,\n loop,\n showInTimeline,\n stack: stack ?? undefined,\n toneFrequency,\n audioStreamIndex,\n pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering,\n crossOrigin: fallbackHtml5AudioProps?.crossOrigin,\n ...fallbackHtml5AudioProps\n });\n }\n return null;\n};\nvar AudioForPreview = ({\n loop = false,\n src,\n logLevel,\n muted,\n name,\n volume,\n loopVolumeCurveBehavior,\n playbackRate = 1,\n trimAfter,\n trimBefore,\n showInTimeline,\n stack,\n disallowFallbackToHtml5Audio,\n toneFrequency,\n audioStreamIndex,\n fallbackHtml5AudioProps,\n debugAudioScheduling,\n onError,\n controls\n}) => {\n const preloadedSrc = usePreload(src);\n const defaultLogLevel = Internals10.useLogLevel();\n const frame = useCurrentFrame2();\n const videoConfig = useVideoConfig2();\n const currentTime = frame / videoConfig.fps;\n const showShow = useMemo2(() => {\n return getTimeInSeconds({\n unloopedTimeInSeconds: currentTime,\n playbackRate,\n loop,\n trimBefore,\n trimAfter,\n mediaDurationInSeconds: Infinity,\n fps: videoConfig.fps,\n ifNoMediaDuration: \"infinity\",\n src\n }) !== null;\n }, [\n currentTime,\n playbackRate,\n src,\n trimAfter,\n trimBefore,\n videoConfig.fps,\n loop\n ]);\n if (!showShow) {\n return null;\n }\n return /* @__PURE__ */ jsx(AudioForPreviewAssertedShowing, {\n audioStreamIndex: audioStreamIndex ?? 0,\n src: preloadedSrc,\n playbackRate,\n logLevel: logLevel ?? defaultLogLevel,\n muted: muted ?? false,\n volume: volume ?? 1,\n loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? \"repeat\",\n loop,\n trimAfter,\n trimBefore,\n name,\n showInTimeline: showInTimeline ?? true,\n stack,\n disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false,\n toneFrequency,\n debugAudioScheduling: debugAudioScheduling ?? false,\n onError,\n fallbackHtml5AudioProps,\n controls\n });\n};\n\n// src/audio/audio-for-rendering.tsx\nimport { useContext as useContext4, useLayoutEffect as useLayoutEffect2, useMemo as useMemo3, useState as useState3 } from \"react\";\nimport {\n cancelRender as cancelRender2,\n Html5Audio,\n Internals as Internals18,\n random,\n useCurrentFrame as useCurrentFrame3,\n useDelayRender,\n useRemotionEnvironment\n} from \"remotion\";\n\n// src/caches.ts\nimport React2 from \"react\";\nimport { cancelRender, Internals as Internals15 } from \"remotion\";\n\n// src/audio-extraction/audio-manager.ts\nimport { Internals as Internals12 } from \"remotion\";\n\n// src/audio-extraction/audio-iterator.ts\nimport { Internals as Internals11 } from \"remotion\";\n\n// src/audio-extraction/audio-cache.ts\nvar makeAudioCache = () => {\n const timestamps = [];\n const samples = {};\n const addFrame = (sample) => {\n timestamps.push(sample.timestamp);\n samples[sample.timestamp] = sample;\n };\n const clearBeforeThreshold = (threshold) => {\n for (const timestamp of timestamps.slice()) {\n const endTimestamp = timestamp + samples[timestamp].duration;\n if (endTimestamp < threshold) {\n const isLast = timestamp === timestamps[timestamps.length - 1];\n if (isLast) {\n continue;\n }\n samples[timestamp].close();\n delete samples[timestamp];\n timestamps.splice(timestamps.indexOf(timestamp), 1);\n }\n }\n };\n const deleteAll = () => {\n for (const timestamp of timestamps) {\n samples[timestamp].close();\n delete samples[timestamp];\n }\n timestamps.length = 0;\n };\n const getSamples = (timestamp, durationInSeconds) => {\n const selected = [];\n for (let i = 0;i < timestamps.length; i++) {\n const sampleTimestamp = timestamps[i];\n const sample = samples[sampleTimestamp];\n if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {\n continue;\n }\n if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {\n break;\n }\n selected.push(sample);\n }\n return selected;\n };\n const getOpenTimestamps = () => {\n return timestamps;\n };\n const getOldestTimestamp = () => {\n return timestamps[0];\n };\n const getNewestTimestamp = () => {\n if (timestamps.length === 0) {\n return null;\n }\n const sample = samples[timestamps[timestamps.length - 1]];\n return sample.timestamp + sample.duration;\n };\n return {\n addFrame,\n clearBeforeThreshold,\n deleteAll,\n getSamples,\n getOldestTimestamp,\n getNewestTimestamp,\n getOpenTimestamps\n };\n};\n\n// src/audio-extraction/audio-iterator.ts\nvar EXTRA_THRESHOLD_IN_SECONDS = 1.5;\nvar safetyOutOfOrderThreshold = 0.2;\nvar warned = {};\nvar warnAboutMatroskaOnce = (src, logLevel) => {\n if (warned[src]) {\n return;\n }\n warned[src] = true;\n Internals11.Log.warn({ logLevel, tag: \"@remotion/media\" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);\n};\nvar makeAudioIterator2 = ({\n audioSampleSink,\n isMatroska,\n startTimestamp,\n src,\n actualMatroskaTimestamps,\n logLevel\n}) => {\n const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - EXTRA_THRESHOLD_IN_SECONDS));\n if (isMatroska) {\n warnAboutMatroskaOnce(src, logLevel);\n }\n let fullDuration = null;\n const cache = makeAudioCache();\n let lastUsed = Date.now();\n const getNextSample = async () => {\n lastUsed = Date.now();\n const { value: sample, done } = await sampleIterator.next();\n if (done) {\n fullDuration = cache.getNewestTimestamp();\n return null;\n }\n const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);\n if (realTimestamp !== null && realTimestamp !== sample.timestamp) {\n sample.setTimestamp(realTimestamp);\n }\n actualMatroskaTimestamps.observeTimestamp(sample.timestamp);\n actualMatroskaTimestamps.observeTimestamp(sample.timestamp + sample.duration);\n cache.addFrame(sample);\n return sample;\n };\n const getSamples = async (timestamp, durationInSeconds) => {\n lastUsed = Date.now();\n if (fullDuration !== null && timestamp > fullDuration) {\n cache.clearBeforeThreshold(fullDuration - safetyOutOfOrderThreshold);\n return [];\n }\n const samples = cache.getSamples(timestamp, durationInSeconds);\n const newestTimestamp = cache.getNewestTimestamp();\n if (newestTimestamp !== null) {\n if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {\n return samples;\n }\n }\n while (true) {\n const sample = await getNextSample();\n const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);\n cache.clearBeforeThreshold(deleteBefore - safetyOutOfOrderThreshold);\n if (sample === null) {\n break;\n }\n if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {\n continue;\n }\n if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {\n break;\n }\n samples.push(sample);\n }\n return samples;\n };\n const logOpenFrames = () => {\n const openTimestamps = cache.getOpenTimestamps();\n if (openTimestamps.length > 0) {\n const first = openTimestamps[0];\n const last = openTimestamps[openTimestamps.length - 1];\n Internals11.Log.verbose({ logLevel, tag: \"@remotion/media\" }, \"Open audio samples for src\", src, `${first.toFixed(3)}...${last.toFixed(3)}`);\n }\n };\n const getCacheStats = () => {\n return {\n count: cache.getOpenTimestamps().length,\n size: cache.getOpenTimestamps().reduce((acc, t) => acc + t, 0)\n };\n };\n const canSatisfyRequestedTime = (timestamp) => {\n const oldestTimestamp = cache.getOldestTimestamp() ?? startTimestamp;\n if (fullDuration !== null && timestamp > fullDuration) {\n return true;\n }\n return oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10;\n };\n const prepareForDeletion = () => {\n cache.deleteAll();\n sampleIterator.return().then((value) => {\n if (value.value) {\n value.value.close();\n }\n });\n fullDuration = null;\n };\n let op = Promise.resolve([]);\n return {\n src,\n getSamples: (ts, dur) => {\n op = op.then(() => getSamples(ts, dur));\n return op;\n },\n waitForCompletion: async () => {\n await op;\n return true;\n },\n canSatisfyRequestedTime,\n logOpenFrames,\n getCacheStats,\n getLastUsed: () => lastUsed,\n prepareForDeletion,\n startTimestamp,\n clearBeforeThreshold: cache.clearBeforeThreshold,\n getOldestTimestamp: cache.getOldestTimestamp,\n getNewestTimestamp: cache.getNewestTimestamp\n };\n};\n\n// src/audio-extraction/audio-manager.ts\nvar makeAudioManager = () => {\n const iterators = [];\n const makeIterator = ({\n timeInSeconds,\n src,\n audioSampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel\n }) => {\n const iterator = makeAudioIterator2({\n audioSampleSink,\n isMatroska,\n startTimestamp: timeInSeconds,\n src,\n actualMatroskaTimestamps,\n logLevel\n });\n iterators.push(iterator);\n return iterator;\n };\n const getIteratorMostInThePast = () => {\n let mostInThePast = null;\n let mostInThePastIterator = null;\n for (const iterator of iterators) {\n const lastUsed = iterator.getLastUsed();\n if (mostInThePast === null || lastUsed < mostInThePast) {\n mostInThePast = lastUsed;\n mostInThePastIterator = iterator;\n }\n }\n return mostInThePastIterator;\n };\n const deleteOldestIterator = () => {\n const iterator = getIteratorMostInThePast();\n if (iterator) {\n iterator.prepareForDeletion();\n iterators.splice(iterators.indexOf(iterator), 1);\n }\n };\n const deleteDuplicateIterators = (logLevel) => {\n const seenKeys = new Set;\n for (let i = 0;i < iterators.length; i++) {\n const iterator = iterators[i];\n const key = `${iterator.src}-${iterator.getOldestTimestamp()}-${iterator.getNewestTimestamp()}`;\n if (seenKeys.has(key)) {\n iterator.prepareForDeletion();\n iterators.splice(iterators.indexOf(iterator), 1);\n Internals12.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Deleted duplicate iterator for ${iterator.src}`);\n }\n seenKeys.add(key);\n }\n };\n const getIterator = async ({\n src,\n timeInSeconds,\n audioSampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel,\n maxCacheSize\n }) => {\n let attempts = 0;\n const maxAttempts = 3;\n while ((await getTotalCacheStats()).totalSize > maxCacheSize && attempts < maxAttempts) {\n deleteOldestIterator();\n attempts++;\n }\n if ((await getTotalCacheStats()).totalSize > maxCacheSize && attempts >= maxAttempts) {\n Internals12.Log.warn({ logLevel, tag: \"@remotion/media\" }, `Audio cache: Exceeded max cache size after ${maxAttempts} attempts. Still ${(await getTotalCacheStats()).totalSize} bytes used, target was ${maxCacheSize} bytes.`);\n }\n for (const iterator of iterators) {\n if (iterator.src === src && await iterator.waitForCompletion() && iterator.canSatisfyRequestedTime(timeInSeconds)) {\n return iterator;\n }\n }\n for (let i = 0;i < iterators.length; i++) {\n const iterator = iterators[i];\n if (iterator.src === src && iterator.startTimestamp === timeInSeconds) {\n iterator.prepareForDeletion();\n iterators.splice(iterators.indexOf(iterator), 1);\n }\n }\n deleteDuplicateIterators(logLevel);\n return makeIterator({\n src,\n timeInSeconds,\n audioSampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel\n });\n };\n const getCacheStats = () => {\n let totalCount = 0;\n let totalSize = 0;\n for (const iterator of iterators) {\n const { count, size } = iterator.getCacheStats();\n totalCount += count;\n totalSize += size;\n }\n return { count: totalCount, totalSize };\n };\n const logOpenFrames = () => {\n for (const iterator of iterators) {\n iterator.logOpenFrames();\n }\n };\n let queue = Promise.resolve(undefined);\n return {\n getIterator: ({\n src,\n timeInSeconds,\n audioSampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel,\n maxCacheSize\n }) => {\n queue = queue.then(() => getIterator({\n src,\n timeInSeconds,\n audioSampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel,\n maxCacheSize\n }));\n return queue;\n },\n getCacheStats,\n getIteratorMostInThePast,\n logOpenFrames,\n deleteDuplicateIterators\n };\n};\n\n// src/video-extraction/keyframe-manager.ts\nimport { Internals as Internals14 } from \"remotion\";\n\n// src/render-timestamp-range.ts\nvar renderTimestampRange = (timestamps) => {\n if (timestamps.length === 0) {\n return \"(none)\";\n }\n if (timestamps.length === 1) {\n return timestamps[0].toFixed(3);\n }\n return `${timestamps[0].toFixed(3)}...${timestamps[timestamps.length - 1].toFixed(3)}`;\n};\n\n// src/video-extraction/keyframe-bank.ts\nimport { Internals as Internals13 } from \"remotion\";\n\n// src/video-extraction/get-allocation-size.ts\nvar getAllocationSize = (sample) => {\n if (sample.format === null) {\n return sample.codedHeight * sample.codedWidth * 4;\n }\n return sample.allocationSize();\n};\n\n// src/video-extraction/keyframe-bank.ts\nvar BIGGEST_ALLOWED_JUMP_FORWARD_SECONDS = 3;\nvar makeKeyframeBank = async ({\n logLevel: parentLogLevel,\n src,\n videoSampleSink,\n initialTimestampRequest\n}) => {\n const sampleIterator = videoSampleSink.samples(roundTo4Digits(initialTimestampRequest));\n const frames = {};\n const frameTimestamps = [];\n let hasReachedEndOfVideo = false;\n let lastUsed = Date.now();\n let allocationSize = 0;\n const getDurationOfFrame = (timestamp) => {\n const index = frameTimestamps.indexOf(timestamp);\n if (index === -1) {\n throw new Error(`Frame ${timestamp} not found`);\n }\n const nextTimestamp = frameTimestamps[index + 1];\n if (!nextTimestamp) {\n return null;\n }\n return nextTimestamp - timestamp;\n };\n const deleteFrameAtTimestamp = (timestamp) => {\n allocationSize -= getAllocationSize(frames[timestamp]);\n frameTimestamps.splice(frameTimestamps.indexOf(timestamp), 1);\n frames[timestamp].close();\n delete frames[timestamp];\n };\n const deleteFramesBeforeTimestamp = ({\n logLevel,\n timestampInSeconds\n }) => {\n const deletedTimestamps = [];\n for (const frameTimestamp of frameTimestamps.slice()) {\n if (hasReachedEndOfVideo) {\n const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];\n if (isLast) {\n continue;\n }\n }\n if (!frames[frameTimestamp]) {\n continue;\n }\n const duration = getDurationOfFrame(frameTimestamp) ?? frames[frameTimestamp].duration;\n if (frameTimestamp + duration < timestampInSeconds) {\n deleteFrameAtTimestamp(frameTimestamp);\n deletedTimestamps.push(frameTimestamp);\n }\n }\n if (deletedTimestamps.length > 0) {\n Internals13.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? \"\" : \"s\"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);\n }\n };\n const hasDecodedEnoughForTimestamp = (timestamp) => {\n const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];\n if (!lastFrameTimestamp) {\n return false;\n }\n const lastFrame = frames[lastFrameTimestamp];\n if (!lastFrame) {\n return true;\n }\n const duration = getDurationOfFrame(lastFrameTimestamp) ?? lastFrame.duration;\n return roundTo4Digits(lastFrameTimestamp + duration) > roundTo4Digits(timestamp);\n };\n const addFrame = (frame, logLevel) => {\n if (frames[frame.timestamp]) {\n deleteFrameAtTimestamp(frame.timestamp);\n }\n frames[frame.timestamp] = frame;\n frameTimestamps.push(frame.timestamp);\n allocationSize += getAllocationSize(frame);\n lastUsed = Date.now();\n Internals13.Log.trace({ logLevel, tag: \"@remotion/media\" }, `Added frame at ${frame.timestamp}sec to bank`);\n };\n const ensureEnoughFramesForTimestamp = async (timestampInSeconds, logLevel, fps) => {\n while (!hasDecodedEnoughForTimestamp(timestampInSeconds)) {\n const sample = await sampleIterator.next();\n if (sample.value) {\n addFrame(sample.value, logLevel);\n }\n if (sample.done) {\n hasReachedEndOfVideo = true;\n break;\n }\n deleteFramesBeforeTimestamp({\n logLevel: parentLogLevel,\n timestampInSeconds: timestampInSeconds - getSafeWindowOfMonotonicity(fps)\n });\n }\n lastUsed = Date.now();\n };\n const getFrameFromTimestamp = async (timestampInSeconds, fps) => {\n lastUsed = Date.now();\n let adjustedTimestamp = timestampInSeconds;\n if (hasReachedEndOfVideo && roundTo4Digits(adjustedTimestamp) > roundTo4Digits(frameTimestamps[frameTimestamps.length - 1])) {\n adjustedTimestamp = frameTimestamps[frameTimestamps.length - 1];\n }\n await ensureEnoughFramesForTimestamp(adjustedTimestamp, parentLogLevel, fps);\n for (let i = frameTimestamps.length - 1;i >= 0; i--) {\n const sample = frames[frameTimestamps[i]];\n if (!sample) {\n return null;\n }\n if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(adjustedTimestamp) || Math.abs(sample.timestamp - adjustedTimestamp) <= 0.001) {\n return sample;\n }\n }\n return frames[frameTimestamps[0]] ?? null;\n };\n const hasTimestampInSecond = async (timestamp, fps) => {\n return await getFrameFromTimestamp(timestamp, fps) !== null;\n };\n const getOpenFrameCount = () => {\n return {\n size: allocationSize,\n timestamps: frameTimestamps\n };\n };\n const getLastUsed = () => {\n return lastUsed;\n };\n let queue = Promise.resolve(undefined);\n const firstFrame = await sampleIterator.next();\n if (!firstFrame.value) {\n throw new Error(\"No first frame found\");\n }\n const startTimestampInSeconds = firstFrame.value.timestamp;\n Internals13.Log.verbose({ logLevel: parentLogLevel, tag: \"@remotion/media\" }, `Creating keyframe bank from ${startTimestampInSeconds}sec`);\n addFrame(firstFrame.value, parentLogLevel);\n const getRangeOfTimestamps = () => {\n if (frameTimestamps.length === 0) {\n return null;\n }\n const firstTimestamp = frameTimestamps[0];\n const lastTimestamp = frameTimestamps[frameTimestamps.length - 1];\n const lastFrame = frames[lastTimestamp];\n const lastFrameDuration = getDurationOfFrame(lastTimestamp) ?? lastFrame.duration ?? 0;\n return {\n firstTimestamp,\n lastTimestamp: lastTimestamp + lastFrameDuration\n };\n };\n const prepareForDeletion = (logLevel, reason) => {\n const range = getRangeOfTimestamps();\n if (range) {\n Internals13.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Preparing for deletion (${reason}) of keyframe bank from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec`);\n }\n let framesDeleted = 0;\n for (const frameTimestamp of frameTimestamps.slice()) {\n if (!frames[frameTimestamp]) {\n continue;\n }\n deleteFrameAtTimestamp(frameTimestamp);\n framesDeleted++;\n }\n sampleIterator.return();\n frameTimestamps.length = 0;\n return { framesDeleted };\n };\n const canSatisfyTimestamp = (timestamp) => {\n if (frameTimestamps.length === 0) {\n return false;\n }\n const roundedTimestamp = roundTo4Digits(timestamp);\n const firstFrameTimestamp = roundTo4Digits(frameTimestamps[0]);\n const lastFrameTimestamp = roundTo4Digits(frameTimestamps[frameTimestamps.length - 1]);\n if (hasReachedEndOfVideo && roundedTimestamp > lastFrameTimestamp) {\n return true;\n }\n if (roundedTimestamp < firstFrameTimestamp) {\n const firstFrameIsInitialFrame = firstFrameTimestamp === startTimestampInSeconds;\n const firstFrameDoesSatisfy = firstFrameIsInitialFrame && roundedTimestamp >= initialTimestampRequest;\n return firstFrameDoesSatisfy;\n }\n if (roundedTimestamp - BIGGEST_ALLOWED_JUMP_FORWARD_SECONDS > lastFrameTimestamp) {\n return false;\n }\n return true;\n };\n const keyframeBank = {\n getFrameFromTimestamp: (timestamp, fps) => {\n queue = queue.then(() => getFrameFromTimestamp(timestamp, fps));\n return queue;\n },\n prepareForDeletion,\n hasTimestampInSecond: (timestamp, fps) => {\n queue = queue.then(() => hasTimestampInSecond(timestamp, fps));\n return queue;\n },\n addFrame,\n deleteFramesBeforeTimestamp,\n src,\n getOpenFrameCount,\n getLastUsed,\n canSatisfyTimestamp,\n getRangeOfTimestamps\n };\n return keyframeBank;\n};\n\n// src/video-extraction/keyframe-manager.ts\nvar makeKeyframeManager = () => {\n let sources = {};\n const addKeyframeBank = ({ src, bank }) => {\n sources[src] = sources[src] ?? [];\n sources[src].push(bank);\n };\n const logCacheStats = (logLevel) => {\n let count = 0;\n let totalSize = 0;\n for (const src in sources) {\n for (const bank of sources[src]) {\n const { size, timestamps } = bank.getOpenFrameCount();\n count += timestamps.length;\n totalSize += size;\n if (size === 0) {\n continue;\n }\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);\n }\n }\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);\n };\n const getCacheStats = () => {\n let count = 0;\n let totalSize = 0;\n for (const src in sources) {\n for (const bank of sources[src]) {\n const { timestamps, size } = bank.getOpenFrameCount();\n count += timestamps.length;\n totalSize += size;\n if (size === 0) {\n continue;\n }\n }\n }\n return { count, totalSize };\n };\n const getTheKeyframeBankMostInThePast = () => {\n let mostInThePast = null;\n let mostInThePastBank = null;\n let numberOfBanks = 0;\n for (const src in sources) {\n for (const bank of sources[src]) {\n const index = sources[src].indexOf(bank);\n const lastUsed = bank.getLastUsed();\n if (mostInThePast === null || lastUsed < mostInThePast) {\n mostInThePast = lastUsed;\n mostInThePastBank = { src, bank, index };\n }\n numberOfBanks++;\n }\n }\n if (!mostInThePastBank) {\n throw new Error(\"No keyframe bank found\");\n }\n return { mostInThePastBank, numberOfBanks };\n };\n const deleteOldestKeyframeBank = (logLevel) => {\n const {\n mostInThePastBank: {\n bank: mostInThePastBank,\n src: mostInThePastSrc,\n index: mostInThePastIndex\n },\n numberOfBanks\n } = getTheKeyframeBankMostInThePast();\n if (numberOfBanks < 2) {\n return { finish: true };\n }\n if (mostInThePastBank) {\n const range = mostInThePastBank.getRangeOfTimestamps();\n const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel, \"deleted oldest keyframe bank to stay under max cache size\");\n sources[mostInThePastSrc].splice(mostInThePastIndex, 1);\n if (range) {\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec to free up memory.`);\n }\n }\n return { finish: false };\n };\n const ensureToStayUnderMaxCacheSize = (logLevel, maxCacheSize) => {\n let cacheStats = getTotalCacheStats();\n let attempts = 0;\n const maxAttempts = 3;\n while (cacheStats.totalSize > maxCacheSize && attempts < maxAttempts) {\n const { finish } = deleteOldestKeyframeBank(logLevel);\n if (finish) {\n break;\n }\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, \"Deleted oldest keyframe bank to stay under max cache size\", (cacheStats.totalSize / 1024 / 1024).toFixed(1), \"out of\", (maxCacheSize / 1024 / 1024).toFixed(1));\n cacheStats = getTotalCacheStats();\n attempts++;\n }\n if (cacheStats.totalSize > maxCacheSize && attempts >= maxAttempts) {\n Internals14.Log.warn({ logLevel, tag: \"@remotion/media\" }, `Exceeded max cache size after ${maxAttempts} attempts. Remaining cache size: ${(cacheStats.totalSize / 1024 / 1024).toFixed(1)} MB, target was ${(maxCacheSize / 1024 / 1024).toFixed(1)} MB.`);\n }\n };\n const clearKeyframeBanksBeforeTime = ({\n timestampInSeconds,\n src,\n logLevel,\n fps\n }) => {\n const threshold = timestampInSeconds - getSafeWindowOfMonotonicity(fps);\n if (!sources[src]) {\n return;\n }\n const banks = sources[src];\n for (const bank of banks) {\n const range = bank.getRangeOfTimestamps();\n if (!range) {\n continue;\n }\n if (range.lastTimestamp < threshold) {\n bank.prepareForDeletion(logLevel, \"cleared before threshold \" + threshold);\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `[Video] Cleared frames for src ${src} from ${range.firstTimestamp}sec to ${range.lastTimestamp}sec`);\n const bankIndex = banks.indexOf(bank);\n delete sources[src][bankIndex];\n } else {\n bank.deleteFramesBeforeTimestamp({\n timestampInSeconds: threshold,\n logLevel\n });\n }\n }\n sources[src] = sources[src].filter((bank) => bank !== undefined);\n logCacheStats(logLevel);\n };\n const getKeyframeBankOrRefetch = async ({\n timestamp,\n videoSampleSink,\n src,\n logLevel\n }) => {\n const existingBanks = sources[src] ?? [];\n const existingBank = existingBanks?.find((bank) => bank.canSatisfyTimestamp(timestamp));\n if (!existingBank) {\n Internals14.Log.trace({ logLevel, tag: \"@remotion/media\" }, `Creating new keyframe bank for src ${src} at timestamp ${timestamp}`);\n const newKeyframeBank = await makeKeyframeBank({\n videoSampleSink,\n logLevel,\n src,\n initialTimestampRequest: timestamp\n });\n addKeyframeBank({ src, bank: newKeyframeBank });\n return newKeyframeBank;\n }\n if (existingBank.canSatisfyTimestamp(timestamp)) {\n Internals14.Log.trace({ logLevel, tag: \"@remotion/media\" }, `Keyframe bank exists and satisfies timestamp ${timestamp}`);\n return existingBank;\n }\n Internals14.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);\n existingBank.prepareForDeletion(logLevel, \"already existed but evicted\");\n sources[src] = sources[src].filter((bank) => bank !== existingBank);\n const replacementKeybank = await makeKeyframeBank({\n videoSampleSink,\n initialTimestampRequest: timestamp,\n logLevel,\n src\n });\n addKeyframeBank({ src, bank: replacementKeybank });\n return replacementKeybank;\n };\n const requestKeyframeBank = async ({\n timestamp,\n videoSampleSink,\n src,\n logLevel,\n maxCacheSize,\n fps\n }) => {\n ensureToStayUnderMaxCacheSize(logLevel, maxCacheSize);\n clearKeyframeBanksBeforeTime({\n timestampInSeconds: timestamp,\n src,\n logLevel,\n fps\n });\n const keyframeBank = await getKeyframeBankOrRefetch({\n timestamp,\n videoSampleSink,\n src,\n logLevel\n });\n return keyframeBank;\n };\n const clearAll = (logLevel) => {\n const srcs = Object.keys(sources);\n for (const src of srcs) {\n const banks = sources[src];\n for (const bank of banks) {\n bank.prepareForDeletion(logLevel, \"clearAll\");\n }\n sources[src] = [];\n }\n sources = {};\n };\n let queue = Promise.resolve(undefined);\n return {\n requestKeyframeBank: ({\n timestamp,\n videoSampleSink,\n src,\n logLevel,\n maxCacheSize,\n fps\n }) => {\n queue = queue.then(() => requestKeyframeBank({\n timestamp,\n videoSampleSink,\n src,\n logLevel,\n maxCacheSize,\n fps\n }));\n return queue;\n },\n getCacheStats,\n clearAll\n };\n};\n\n// src/caches.ts\nvar getSafeWindowOfMonotonicity = (fps) => 0.2 * 30 / fps;\nvar keyframeManager = makeKeyframeManager();\nvar audioManager = makeAudioManager();\nvar getTotalCacheStats = () => {\n const keyframeManagerCacheStats = keyframeManager.getCacheStats();\n const audioManagerCacheStats = audioManager.getCacheStats();\n return {\n count: keyframeManagerCacheStats.count + audioManagerCacheStats.count,\n totalSize: keyframeManagerCacheStats.totalSize + audioManagerCacheStats.totalSize\n };\n};\nvar getUncachedMaxCacheSize = (logLevel) => {\n if (typeof window !== \"undefined\" && window.remotion_mediaCacheSizeInBytes !== undefined && window.remotion_mediaCacheSizeInBytes !== null) {\n if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {\n cancelRender(new Error(`The minimum value for the \"mediaCacheSizeInBytes\" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));\n }\n if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {\n cancelRender(new Error(`The maximum value for the \"mediaCacheSizeInBytes\" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));\n }\n Internals15.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Using cache size set using \"mediaCacheSizeInBytes\": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);\n return window.remotion_mediaCacheSizeInBytes;\n }\n if (typeof window !== \"undefined\" && window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {\n const value = window.remotion_initialMemoryAvailable / 2;\n if (value < 500 * 1024 * 1024) {\n Internals15.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);\n return 500 * 1024 * 1024;\n }\n if (value > 20000 * 1024 * 1024) {\n Internals15.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);\n return 20000 * 1024 * 1024;\n }\n Internals15.Log.verbose({ logLevel, tag: \"@remotion/media\" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);\n return value;\n }\n return 1000 * 1000 * 1000;\n};\nvar cachedMaxCacheSize = null;\nvar getMaxVideoCacheSize = (logLevel) => {\n if (cachedMaxCacheSize !== null) {\n return cachedMaxCacheSize;\n }\n cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);\n return cachedMaxCacheSize;\n};\nvar useMaxMediaCacheSize = (logLevel) => {\n const context = React2.useContext(Internals15.MaxMediaCacheSizeContext);\n if (context === null) {\n return getMaxVideoCacheSize(logLevel);\n }\n return context;\n};\n\n// src/convert-audiodata/apply-volume.ts\nvar applyVolume = (array, volume) => {\n if (volume === 1) {\n return;\n }\n for (let i = 0;i < array.length; i++) {\n const newValue = array[i] * volume;\n if (newValue < -32768) {\n array[i] = -32768;\n } else if (newValue > 32767) {\n array[i] = 32767;\n } else {\n array[i] = newValue;\n }\n }\n};\n\n// src/convert-audiodata/resample-audiodata.ts\nvar TARGET_NUMBER_OF_CHANNELS = 2;\nvar TARGET_SAMPLE_RATE = 48000;\nvar fixFloatingPoint = (value) => {\n if (value % 1 < 0.0000001) {\n return Math.floor(value);\n }\n if (value % 1 > 0.9999999) {\n return Math.ceil(value);\n }\n return value;\n};\nvar resampleAudioData = ({\n srcNumberOfChannels,\n sourceChannels,\n destination,\n targetFrames,\n chunkSize\n}) => {\n const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {\n const start = fixFloatingPoint(startUnfixed);\n const end = fixFloatingPoint(endUnfixed);\n const startFloor = Math.floor(start);\n const startCeil = Math.ceil(start);\n const startFraction = start - startFloor;\n const endFraction = end - Math.floor(end);\n const endFloor = Math.floor(end);\n let weightedSum = 0;\n let totalWeight = 0;\n if (startFraction > 0) {\n const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];\n weightedSum += firstSample * (1 - startFraction);\n totalWeight += 1 - startFraction;\n }\n for (let k = startCeil;k < endFloor; k++) {\n const num = sourceChannels[k * srcNumberOfChannels + channelIndex];\n weightedSum += num;\n totalWeight += 1;\n }\n if (endFraction > 0) {\n const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];\n weightedSum += lastSample * endFraction;\n totalWeight += endFraction;\n }\n const average = weightedSum / totalWeight;\n return average;\n };\n for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {\n const start = newFrameIndex * chunkSize;\n const end = start + chunkSize;\n if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {\n for (let i = 0;i < srcNumberOfChannels; i++) {\n destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);\n }\n }\n if (srcNumberOfChannels === 1) {\n const m = getSourceValues(start, end, 0);\n destination[newFrameIndex * 2 + 0] = m;\n destination[newFrameIndex * 2 + 1] = m;\n } else if (srcNumberOfChannels === 4) {\n const l = getSourceValues(start, end, 0);\n const r = getSourceValues(start, end, 1);\n const sl = getSourceValues(start, end, 2);\n const sr = getSourceValues(start, end, 3);\n const l2 = 0.5 * (l + sl);\n const r2 = 0.5 * (r + sr);\n destination[newFrameIndex * 2 + 0] = l2;\n destination[newFrameIndex * 2 + 1] = r2;\n } else if (srcNumberOfChannels === 6) {\n const l = getSourceValues(start, end, 0);\n const r = getSourceValues(start, end, 1);\n const c = getSourceValues(start, end, 2);\n const sl = getSourceValues(start, end, 3);\n const sr = getSourceValues(start, end, 4);\n const sq = Math.sqrt(1 / 2);\n const l2 = l + sq * (c + sl);\n const r2 = r + sq * (c + sr);\n destination[newFrameIndex * 2 + 0] = l2;\n destination[newFrameIndex * 2 + 1] = r2;\n } else {\n for (let i = 0;i < srcNumberOfChannels; i++) {\n destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);\n }\n }\n }\n};\n\n// src/looped-frame.ts\nvar frameForVolumeProp = ({\n behavior,\n loop,\n assetDurationInSeconds,\n fps,\n frame,\n startsAt\n}) => {\n if (!loop) {\n return frame + startsAt;\n }\n if (behavior === \"extend\") {\n return frame + startsAt;\n }\n const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;\n return frame % assetDurationInFrames + startsAt;\n};\n\n// src/convert-audiodata/convert-audiodata.ts\nvar FORMAT = \"s16\";\nvar fixFloatingPoint2 = (value) => {\n const decimal = Math.abs(value % 1);\n if (decimal < 0.0000001) {\n return value < 0 ? Math.ceil(value) : Math.floor(value);\n }\n if (decimal > 0.9999999) {\n return value < 0 ? Math.floor(value) : Math.ceil(value);\n }\n return value;\n};\nvar ceilButNotIfFloatingPointIssue = (value) => {\n const fixed = fixFloatingPoint2(value);\n return Math.ceil(fixed);\n};\nvar convertAudioData = ({\n audioData,\n trimStartInSeconds,\n trimEndInSeconds,\n playbackRate,\n audioDataTimestamp,\n isLast\n}) => {\n const {\n numberOfChannels: srcNumberOfChannels,\n sampleRate: currentSampleRate,\n numberOfFrames\n } = audioData;\n const ratio = currentSampleRate / TARGET_SAMPLE_RATE;\n const frameOffset = Math.floor(fixFloatingPoint2(trimStartInSeconds * audioData.sampleRate));\n const unroundedFrameCount = numberOfFrames - trimEndInSeconds * audioData.sampleRate - frameOffset;\n const frameCount = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount) : Math.round(unroundedFrameCount);\n const newNumberOfFrames = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount / ratio / playbackRate) : Math.round(unroundedFrameCount / ratio / playbackRate);\n if (newNumberOfFrames === 0) {\n throw new Error(\"Cannot resample - the given sample rate would result in less than 1 sample\");\n }\n const srcChannels = new Int16Array(srcNumberOfChannels * frameCount);\n const isF32 = audioData.format === \"f32\" || audioData.format === \"f32-planar\";\n if (isF32) {\n const bytesPerPlane = frameCount * 4;\n const f32Buffer = new ArrayBuffer(srcNumberOfChannels * bytesPerPlane);\n for (let ch = 0;ch < srcNumberOfChannels; ch++) {\n audioData.copyTo(new Float32Array(f32Buffer, ch * bytesPerPlane, frameCount), { planeIndex: ch, frameOffset, frameCount, format: \"f32-planar\" });\n }\n const f32AudioData = new AudioData({\n format: \"f32-planar\",\n sampleRate: currentSampleRate,\n numberOfFrames: frameCount,\n numberOfChannels: srcNumberOfChannels,\n timestamp: audioData.timestamp,\n data: f32Buffer\n });\n f32AudioData.copyTo(srcChannels, {\n planeIndex: 0,\n format: FORMAT,\n frameOffset: 0,\n frameCount\n });\n f32AudioData.close();\n } else {\n audioData.copyTo(srcChannels, {\n planeIndex: 0,\n format: FORMAT,\n frameOffset,\n frameCount\n });\n }\n const data = new Int16Array(newNumberOfFrames * TARGET_NUMBER_OF_CHANNELS);\n const chunkSize = frameCount / newNumberOfFrames;\n const timestampOffsetMicroseconds = frameOffset / audioData.sampleRate * 1e6;\n if (newNumberOfFrames === frameCount && TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels && playbackRate === 1) {\n return {\n data: srcChannels,\n numberOfFrames: newNumberOfFrames,\n timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),\n durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)\n };\n }\n resampleAudioData({\n srcNumberOfChannels,\n sourceChannels: srcChannels,\n destination: data,\n targetFrames: newNumberOfFrames,\n chunkSize\n });\n const newAudioData = {\n data,\n numberOfFrames: newNumberOfFrames,\n timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),\n durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)\n };\n return newAudioData;\n};\n\n// src/convert-audiodata/combine-audiodata.ts\nvar combineAudioDataAndClosePrevious = (audioDataArray) => {\n let numberOfFrames = 0;\n let durationInMicroSeconds = 0;\n const { timestamp } = audioDataArray[0];\n for (const audioData of audioDataArray) {\n numberOfFrames += audioData.numberOfFrames;\n durationInMicroSeconds += audioData.durationInMicroSeconds;\n }\n const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);\n let offset = 0;\n for (const audioData of audioDataArray) {\n arr.set(audioData.data, offset);\n offset += audioData.data.length;\n }\n return {\n data: arr,\n numberOfFrames,\n timestamp: fixFloatingPoint2(timestamp),\n durationInMicroSeconds: fixFloatingPoint2(durationInMicroSeconds)\n };\n};\n\n// src/get-sink.ts\nimport { Internals as Internals16 } from \"remotion\";\n\n// src/video-extraction/get-frames-since-keyframe.ts\nimport {\n ALL_FORMATS as ALL_FORMATS2,\n AudioSampleSink,\n EncodedPacketSink,\n Input as Input2,\n MATROSKA,\n UrlSource as UrlSource2,\n VideoSampleSink,\n WEBM\n} from \"mediabunny\";\n\n// src/browser-can-use-webgl2.ts\nvar browserCanUseWebGl2 = null;\nvar browserCanUseWebGl2Uncached = () => {\n const canvas = new OffscreenCanvas(1, 1);\n const context = canvas.getContext(\"webgl2\");\n return context !== null;\n};\nvar canBrowserUseWebGl2 = () => {\n if (browserCanUseWebGl2 !== null) {\n return browserCanUseWebGl2;\n }\n browserCanUseWebGl2 = browserCanUseWebGl2Uncached();\n return browserCanUseWebGl2;\n};\n\n// src/video-extraction/remember-actual-matroska-timestamps.ts\nvar rememberActualMatroskaTimestamps = (isMatroska) => {\n const observations = [];\n const observeTimestamp = (startTime) => {\n if (!isMatroska) {\n return;\n }\n observations.push(startTime);\n };\n const getRealTimestamp = (observedTimestamp) => {\n if (!isMatroska) {\n return observedTimestamp;\n }\n return observations.find((observation) => Math.abs(observedTimestamp - observation) < 0.001) ?? null;\n };\n return {\n observeTimestamp,\n getRealTimestamp\n };\n};\n\n// src/video-extraction/get-frames-since-keyframe.ts\nvar getRetryDelay = () => {\n return null;\n};\nvar getFormatOrNullOrNetworkError = async (input) => {\n try {\n return await input.getFormat();\n } catch (err) {\n if (isNetworkError(err)) {\n return \"network-error\";\n }\n return null;\n }\n};\nvar getSinks = async (src) => {\n const input = new Input2({\n formats: ALL_FORMATS2,\n source: new UrlSource2(src, {\n getRetryDelay\n })\n });\n const format = await getFormatOrNullOrNetworkError(input);\n const isMatroska = format === MATROSKA || format === WEBM;\n const getVideoSinks = async () => {\n if (format === \"network-error\") {\n return \"network-error\";\n }\n if (format === null) {\n return \"unknown-container-format\";\n }\n const videoTrack = await input.getPrimaryVideoTrack();\n if (!videoTrack) {\n return \"no-video-track\";\n }\n const canDecode = await videoTrack.canDecode();\n if (!canDecode) {\n return \"cannot-decode\";\n }\n const sampleSink = new VideoSampleSink(videoTrack);\n const packetSink = new EncodedPacketSink(videoTrack);\n const startPacket = await packetSink.getFirstPacket({\n verifyKeyPackets: true\n });\n const hasAlpha = startPacket?.sideData.alpha;\n if (hasAlpha && !canBrowserUseWebGl2()) {\n return \"cannot-decode-alpha\";\n }\n return {\n sampleSink\n };\n };\n let videoSinksPromise = null;\n const getVideoSinksPromise = () => {\n if (videoSinksPromise) {\n return videoSinksPromise;\n }\n videoSinksPromise = getVideoSinks();\n return videoSinksPromise;\n };\n const audioSinksPromise = {};\n const getAudioSinks = async (index) => {\n if (format === null) {\n return \"unknown-container-format\";\n }\n if (format === \"network-error\") {\n return \"network-error\";\n }\n const audioTracks = await input.getAudioTracks();\n const audioTrack = audioTracks[index];\n if (!audioTrack) {\n return \"no-audio-track\";\n }\n const canDecode = await audioTrack.canDecode();\n if (!canDecode) {\n return \"cannot-decode-audio\";\n }\n return {\n sampleSink: new AudioSampleSink(audioTrack)\n };\n };\n const getAudioSinksPromise = (index) => {\n if (audioSinksPromise[index]) {\n return audioSinksPromise[index];\n }\n audioSinksPromise[index] = getAudioSinks(index);\n return audioSinksPromise[index];\n };\n return {\n getVideo: () => getVideoSinksPromise(),\n getAudio: (index) => getAudioSinksPromise(index),\n actualMatroskaTimestamps: rememberActualMatroskaTimestamps(isMatroska),\n isMatroska,\n getDuration: () => {\n return input.computeDuration();\n }\n };\n};\n\n// src/get-sink.ts\nvar sinkPromises = {};\nvar getSink = (src, logLevel) => {\n let promise = sinkPromises[src];\n if (!promise) {\n Internals16.Log.verbose({\n logLevel,\n tag: \"@remotion/media\"\n }, `Sink for ${src} was not found, creating new sink`);\n promise = getSinks(src);\n sinkPromises[src] = promise;\n }\n return promise;\n};\n\n// src/audio-extraction/extract-audio.ts\nvar extractAudioInternal = async ({\n src,\n timeInSeconds: unloopedTimeInSeconds,\n durationInSeconds: durationNotYetApplyingPlaybackRate,\n logLevel,\n loop,\n playbackRate,\n audioStreamIndex,\n trimBefore,\n trimAfter,\n fps,\n maxCacheSize\n}) => {\n const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSink(src, logLevel);\n let mediaDurationInSeconds = null;\n if (loop) {\n mediaDurationInSeconds = await getDuration();\n }\n const audio = await getAudio(audioStreamIndex);\n if (audio === \"network-error\") {\n return \"network-error\";\n }\n if (audio === \"no-audio-track\") {\n return { data: null, durationInSeconds: null };\n }\n if (audio === \"cannot-decode-audio\") {\n return \"cannot-decode\";\n }\n if (audio === \"unknown-container-format\") {\n return \"unknown-container-format\";\n }\n const timeInSeconds = getTimeInSeconds({\n loop,\n mediaDurationInSeconds,\n unloopedTimeInSeconds,\n src,\n trimAfter,\n playbackRate,\n trimBefore,\n fps,\n ifNoMediaDuration: \"fail\"\n });\n if (timeInSeconds === null) {\n return { data: null, durationInSeconds: mediaDurationInSeconds };\n }\n try {\n const sampleIterator = await audioManager.getIterator({\n src,\n timeInSeconds,\n audioSampleSink: audio.sampleSink,\n isMatroska,\n actualMatroskaTimestamps,\n logLevel,\n maxCacheSize\n });\n const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;\n const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);\n audioManager.logOpenFrames();\n const audioDataArray = [];\n for (let i = 0;i < samples.length; i++) {\n const sample = samples[i];\n if (Math.abs(sample.timestamp - (timeInSeconds + durationInSeconds)) * sample.sampleRate < 1) {\n continue;\n }\n if (sample.timestamp + sample.duration <= timeInSeconds) {\n continue;\n }\n const isFirstSample = i === 0;\n const isLastSample = i === samples.length - 1;\n const audioDataRaw = sample.toAudioData();\n let trimStartInSeconds = 0;\n let trimEndInSeconds = 0;\n let leadingSilence = null;\n if (isFirstSample) {\n trimStartInSeconds = fixFloatingPoint2(timeInSeconds - sample.timestamp);\n if (trimStartInSeconds < 0) {\n const silenceFrames = Math.ceil(fixFloatingPoint2(-trimStartInSeconds * TARGET_SAMPLE_RATE));\n leadingSilence = {\n data: new Int16Array(silenceFrames * TARGET_NUMBER_OF_CHANNELS),\n numberOfFrames: silenceFrames,\n timestamp: timeInSeconds * 1e6,\n durationInMicroSeconds: silenceFrames / TARGET_SAMPLE_RATE * 1e6\n };\n trimStartInSeconds = 0;\n }\n }\n if (isLastSample) {\n trimEndInSeconds = Math.max(0, sample.timestamp + sample.duration - (timeInSeconds + durationInSeconds));\n }\n const audioData = convertAudioData({\n audioData: audioDataRaw,\n trimStartInSeconds,\n trimEndInSeconds,\n playbackRate,\n audioDataTimestamp: sample.timestamp,\n isLast: isLastSample\n });\n audioDataRaw.close();\n if (audioData.numberOfFrames === 0) {\n continue;\n }\n if (leadingSilence) {\n audioDataArray.push(leadingSilence);\n }\n audioDataArray.push(audioData);\n }\n if (audioDataArray.length === 0) {\n return { data: null, durationInSeconds: mediaDurationInSeconds };\n }\n const combined = combineAudioDataAndClosePrevious(audioDataArray);\n return { data: combined, durationInSeconds: mediaDurationInSeconds };\n } catch (err) {\n const error = err;\n if (isNetworkError(error)) {\n return \"network-error\";\n }\n if (isUnsupportedConfigurationError(error)) {\n return \"cannot-decode\";\n }\n throw err;\n }\n};\nvar queue = Promise.resolve(undefined);\nvar extractAudio = (params) => {\n queue = queue.then(() => extractAudioInternal(params));\n return queue;\n};\n\n// src/video-extraction/extract-frame.ts\nimport { Internals as Internals17 } from \"remotion\";\nvar extractFrameInternal = async ({\n src,\n timeInSeconds: unloopedTimeInSeconds,\n logLevel,\n loop,\n trimAfter,\n trimBefore,\n playbackRate,\n fps,\n maxCacheSize\n}) => {\n const sink = await getSink(src, logLevel);\n const [video, mediaDurationInSecondsRaw] = await Promise.all([\n sink.getVideo(),\n loop ? sink.getDuration() : Promise.resolve(null)\n ]);\n const mediaDurationInSeconds = loop ? mediaDurationInSecondsRaw : null;\n if (video === \"no-video-track\") {\n throw new Error(`No video track found for ${src}`);\n }\n if (video === \"cannot-decode\") {\n return { type: \"cannot-decode\", durationInSeconds: mediaDurationInSeconds };\n }\n if (video === \"unknown-container-format\") {\n return { type: \"unknown-container-format\" };\n }\n if (video === \"network-error\") {\n return { type: \"network-error\" };\n }\n if (video === \"cannot-decode-alpha\") {\n return {\n type: \"cannot-decode-alpha\",\n durationInSeconds: mediaDurationInSeconds\n };\n }\n const timeInSeconds = getTimeInSeconds({\n loop,\n mediaDurationInSeconds,\n unloopedTimeInSeconds,\n src,\n trimAfter,\n playbackRate,\n trimBefore,\n fps,\n ifNoMediaDuration: \"fail\"\n });\n if (timeInSeconds === null) {\n return {\n type: \"success\",\n frame: null,\n rotation: 0,\n durationInSeconds: await sink.getDuration()\n };\n }\n try {\n const keyframeBank = await keyframeManager.requestKeyframeBank({\n videoSampleSink: video.sampleSink,\n timestamp: timeInSeconds,\n src,\n logLevel,\n maxCacheSize,\n fps\n });\n if (!keyframeBank) {\n return {\n type: \"success\",\n frame: null,\n rotation: 0,\n durationInSeconds: await sink.getDuration()\n };\n }\n const frame = await keyframeBank.getFrameFromTimestamp(timeInSeconds, fps);\n const rotation = frame?.rotation ?? 0;\n return {\n type: \"success\",\n frame: frame?.toVideoFrame() ?? null,\n rotation,\n durationInSeconds: await sink.getDuration()\n };\n } catch (err) {\n Internals17.Log.info({ logLevel, tag: \"@remotion/media\" }, `Error decoding ${src} at time ${timeInSeconds}: ${err}`, err);\n return { type: \"cannot-decode\", durationInSeconds: mediaDurationInSeconds };\n }\n};\nvar queue2 = Promise.resolve(undefined);\nvar extractFrame = (params) => {\n queue2 = queue2.then(() => extractFrameInternal(params));\n return queue2;\n};\n\n// src/video-extraction/rotate-frame.ts\nvar rotateFrame = async ({\n frame,\n rotation\n}) => {\n if (rotation === 0) {\n const directBitmap = await createImageBitmap(frame);\n frame.close();\n return directBitmap;\n }\n const width = rotation === 90 || rotation === 270 ? frame.displayHeight : frame.displayWidth;\n const height = rotation === 90 || rotation === 270 ? frame.displayWidth : frame.displayHeight;\n const canvas = new OffscreenCanvas(width, height);\n const ctx = canvas.getContext(\"2d\");\n if (!ctx) {\n throw new Error(\"Could not get 2d context\");\n }\n canvas.width = width;\n canvas.height = height;\n if (rotation === 90) {\n ctx.translate(width, 0);\n } else if (rotation === 180) {\n ctx.translate(width, height);\n } else if (rotation === 270) {\n ctx.translate(0, height);\n }\n ctx.rotate(rotation * (Math.PI / 180));\n ctx.drawImage(frame, 0, 0);\n const bitmap = await createImageBitmap(canvas);\n frame.close();\n return bitmap;\n};\n\n// src/extract-frame-and-audio.ts\nvar extractFrameAndAudio = async ({\n src,\n timeInSeconds,\n logLevel,\n durationInSeconds,\n playbackRate,\n includeAudio,\n includeVideo,\n loop,\n audioStreamIndex,\n trimAfter,\n trimBefore,\n fps,\n maxCacheSize\n}) => {\n try {\n const [video, audio] = await Promise.all([\n includeVideo ? extractFrame({\n src,\n timeInSeconds,\n logLevel,\n loop,\n trimAfter,\n playbackRate,\n trimBefore,\n fps,\n maxCacheSize\n }) : null,\n includeAudio ? extractAudio({\n src,\n timeInSeconds,\n durationInSeconds,\n logLevel,\n loop,\n playbackRate,\n audioStreamIndex,\n trimAfter,\n fps,\n trimBefore,\n maxCacheSize\n }) : null\n ]);\n if (video?.type === \"cannot-decode\") {\n return {\n type: \"cannot-decode\",\n durationInSeconds: video.durationInSeconds\n };\n }\n if (video?.type === \"unknown-container-format\") {\n return { type: \"unknown-container-format\" };\n }\n if (video?.type === \"cannot-decode-alpha\") {\n return {\n type: \"cannot-decode-alpha\",\n durationInSeconds: video.durationInSeconds\n };\n }\n if (video?.type === \"network-error\") {\n return { type: \"network-error\" };\n }\n if (audio === \"unknown-container-format\") {\n return { type: \"unknown-container-format\" };\n }\n if (audio === \"network-error\") {\n return { type: \"network-error\" };\n }\n if (audio === \"cannot-decode\") {\n return {\n type: \"cannot-decode\",\n durationInSeconds: video?.type === \"success\" ? video.durationInSeconds : null\n };\n }\n return {\n type: \"success\",\n frame: video?.frame ? await rotateFrame({\n frame: video.frame,\n rotation: video.rotation\n }) : null,\n audio: audio?.data ?? null,\n durationInSeconds: audio?.durationInSeconds ?? null\n };\n } catch (err) {\n const error = err;\n if (isNetworkError(error)) {\n return { type: \"network-error\" };\n }\n throw err;\n }\n};\n\n// src/video-extraction/add-broadcast-channel-listener.ts\nvar emitReadiness = (channel) => {\n channel.postMessage({\n type: \"main-tab-ready\"\n });\n setInterval(() => {\n channel.postMessage({\n type: \"main-tab-ready\"\n });\n }, 300);\n};\nvar addBroadcastChannelListener = () => {\n if (!(typeof window !== \"undefined\" && window.remotion_broadcastChannel && window.remotion_isMainTab)) {\n return;\n }\n window.remotion_broadcastChannel.addEventListener(\"message\", async (event) => {\n const data = event.data;\n if (data.type === \"request\") {\n try {\n const result = await extractFrameAndAudio({\n src: data.src,\n timeInSeconds: data.timeInSeconds,\n logLevel: data.logLevel,\n durationInSeconds: data.durationInSeconds,\n playbackRate: data.playbackRate,\n includeAudio: data.includeAudio,\n includeVideo: data.includeVideo,\n loop: data.loop,\n audioStreamIndex: data.audioStreamIndex,\n trimAfter: data.trimAfter,\n trimBefore: data.trimBefore,\n fps: data.fps,\n maxCacheSize: data.maxCacheSize\n });\n if (result.type === \"cannot-decode\") {\n const cannotDecodeResponse = {\n type: \"response-cannot-decode\",\n id: data.id,\n durationInSeconds: result.durationInSeconds\n };\n window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);\n return;\n }\n if (result.type === \"cannot-decode-alpha\") {\n const cannotDecodeAlphaResponse = {\n type: \"response-cannot-decode-alpha\",\n id: data.id,\n durationInSeconds: result.durationInSeconds\n };\n window.remotion_broadcastChannel.postMessage(cannotDecodeAlphaResponse);\n return;\n }\n if (result.type === \"network-error\") {\n const networkErrorResponse = {\n type: \"response-network-error\",\n id: data.id\n };\n window.remotion_broadcastChannel.postMessage(networkErrorResponse);\n return;\n }\n if (result.type === \"unknown-container-format\") {\n const unknownContainerFormatResponse = {\n type: \"response-unknown-container-format\",\n id: data.id\n };\n window.remotion_broadcastChannel.postMessage(unknownContainerFormatResponse);\n return;\n }\n const { frame, audio, durationInSeconds } = result;\n const imageBitmap = frame ? await createImageBitmap(frame) : null;\n if (frame) {\n frame.close();\n }\n const response = {\n type: \"response-success\",\n id: data.id,\n frame: imageBitmap,\n audio,\n durationInSeconds: durationInSeconds ?? null\n };\n window.remotion_broadcastChannel.postMessage(response);\n } catch (error) {\n const response = {\n type: \"response-error\",\n id: data.id,\n errorStack: error.stack ?? \"No stack trace\"\n };\n window.remotion_broadcastChannel.postMessage(response);\n }\n } else if (data.type === \"main-tab-ready\") {} else {\n throw new Error(\"Invalid message: \" + JSON.stringify(data));\n }\n });\n emitReadiness(window.remotion_broadcastChannel);\n};\nvar mainTabIsReadyProm = null;\nvar waitForMainTabToBeReady = (channel) => {\n if (mainTabIsReadyProm) {\n return mainTabIsReadyProm;\n }\n mainTabIsReadyProm = new Promise((resolve) => {\n const onMessage = (event) => {\n const data = event.data;\n if (data.type === \"main-tab-ready\") {\n resolve();\n channel.removeEventListener(\"message\", onMessage);\n }\n };\n channel.addEventListener(\"message\", onMessage);\n });\n return mainTabIsReadyProm;\n};\n\n// src/video-extraction/extract-frame-via-broadcast-channel.ts\naddBroadcastChannelListener();\nvar extractFrameViaBroadcastChannel = async ({\n src,\n timeInSeconds,\n logLevel,\n durationInSeconds,\n playbackRate,\n includeAudio,\n includeVideo,\n isClientSideRendering,\n loop,\n audioStreamIndex,\n trimAfter,\n trimBefore,\n fps,\n maxCacheSize\n}) => {\n if (isClientSideRendering || window.remotion_isMainTab) {\n return extractFrameAndAudio({\n logLevel,\n src,\n timeInSeconds,\n durationInSeconds,\n playbackRate,\n includeAudio,\n includeVideo,\n loop,\n audioStreamIndex,\n trimAfter,\n trimBefore,\n fps,\n maxCacheSize\n });\n }\n await waitForMainTabToBeReady(window.remotion_broadcastChannel);\n const requestId = crypto.randomUUID();\n const resolvePromise = new Promise((resolve, reject) => {\n const onMessage = (event) => {\n const data = event.data;\n if (!data) {\n return;\n }\n if (data.type === \"main-tab-ready\") {\n return;\n }\n if (data.id !== requestId) {\n return;\n }\n if (data.type === \"response-success\") {\n resolve({\n type: \"success\",\n frame: data.frame ? data.frame : null,\n audio: data.audio ? data.audio : null,\n durationInSeconds: data.durationInSeconds ? data.durationInSeconds : null\n });\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n if (data.type === \"response-error\") {\n reject(data.errorStack);\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n if (data.type === \"response-cannot-decode\") {\n resolve({\n type: \"cannot-decode\",\n durationInSeconds: data.durationInSeconds\n });\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n if (data.type === \"response-network-error\") {\n resolve({ type: \"network-error\" });\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n if (data.type === \"response-unknown-container-format\") {\n resolve({ type: \"unknown-container-format\" });\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n if (data.type === \"response-cannot-decode-alpha\") {\n resolve({\n type: \"cannot-decode-alpha\",\n durationInSeconds: data.durationInSeconds\n });\n window.remotion_broadcastChannel.removeEventListener(\"message\", onMessage);\n return;\n }\n throw new Error(`Invalid message: ${JSON.stringify(data)}`);\n };\n window.remotion_broadcastChannel.addEventListener(\"message\", onMessage);\n });\n const request = {\n type: \"request\",\n src,\n timeInSeconds,\n id: requestId,\n logLevel,\n durationInSeconds,\n playbackRate,\n includeAudio,\n includeVideo,\n loop,\n audioStreamIndex,\n trimAfter,\n trimBefore,\n fps,\n maxCacheSize\n };\n window.remotion_broadcastChannel.postMessage(request);\n let timeoutId;\n return Promise.race([\n resolvePromise.then((res) => {\n clearTimeout(timeoutId);\n return res;\n }),\n new Promise((_, reject) => {\n timeoutId = setTimeout(() => {\n reject(new Error(`Timeout while extracting frame at time ${timeInSeconds}sec from ${src}`));\n }, Math.max(3000, window.remotion_puppeteerTimeout - 5000));\n })\n ]);\n};\n\n// src/audio/audio-for-rendering.tsx\nimport { jsx as jsx2 } from \"react/jsx-runtime\";\nvar AudioForRendering = ({\n volume: volumeProp,\n playbackRate,\n src,\n muted,\n loopVolumeCurveBehavior,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n logLevel: overriddenLogLevel,\n loop,\n fallbackHtml5AudioProps,\n audioStreamIndex,\n showInTimeline,\n style,\n name,\n disallowFallbackToHtml5Audio,\n toneFrequency,\n trimAfter,\n trimBefore,\n onError\n}) => {\n const defaultLogLevel = Internals18.useLogLevel();\n const logLevel = overriddenLogLevel ?? defaultLogLevel;\n const frame = useCurrentFrame3();\n const absoluteFrame = Internals18.useTimelinePosition();\n const videoConfig = Internals18.useUnsafeVideoConfig();\n const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals18.RenderAssetManager);\n const startsAt = Internals18.useMediaStartsAt();\n const environment = useRemotionEnvironment();\n if (!videoConfig) {\n throw new Error(\"No video config found\");\n }\n if (!src) {\n throw new TypeError(\"No `src` was passed to <Audio>.\");\n }\n const { fps } = videoConfig;\n const { delayRender, continueRender } = useDelayRender();\n const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);\n const sequenceContext = useContext4(Internals18.SequenceContext);\n const id = useMemo3(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [\n src,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.relativeFrom,\n sequenceContext?.durationInFrames\n ]);\n const maxCacheSize = useMaxMediaCacheSize(logLevel);\n const audioEnabled = Internals18.useAudioEnabled();\n useLayoutEffect2(() => {\n const timestamp = frame / fps;\n const durationInSeconds = 1 / fps;\n const shouldRenderAudio = (() => {\n if (!audioEnabled) {\n return false;\n }\n if (muted) {\n return false;\n }\n return true;\n })();\n if (!shouldRenderAudio) {\n return;\n }\n if (replaceWithHtml5Audio) {\n return;\n }\n const newHandle = delayRender(`Extracting audio for frame ${frame}`, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n extractFrameViaBroadcastChannel({\n src,\n timeInSeconds: timestamp,\n durationInSeconds,\n playbackRate: playbackRate ?? 1,\n logLevel,\n includeAudio: shouldRenderAudio,\n includeVideo: false,\n isClientSideRendering: environment.isClientSideRendering,\n loop: loop ?? false,\n audioStreamIndex: audioStreamIndex ?? 0,\n trimAfter,\n trimBefore,\n fps,\n maxCacheSize\n }).then((result) => {\n const handleError = (error, clientSideError, fallbackMessage) => {\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error,\n disallowFallback: disallowFallbackToHtml5Audio ?? false,\n isClientSideRendering: environment.isClientSideRendering,\n clientSideError\n });\n if (action === \"fail\") {\n cancelRender2(errorToUse);\n }\n Internals18.Log.warn({ logLevel, tag: \"@remotion/media\" }, fallbackMessage);\n setReplaceWithHtml5Audio(true);\n };\n if (result.type === \"unknown-container-format\") {\n handleError(new Error(`Unknown container format ${src}.`), new Error(`Cannot render audio \"${src}\": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`), `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"cannot-decode\") {\n handleError(new Error(`Cannot decode ${src}.`), new Error(`Cannot render audio \"${src}\": The audio could not be decoded by the browser.`), `Cannot decode ${src}, falling back to <Html5Audio>`);\n return;\n }\n if (result.type === \"cannot-decode-alpha\") {\n throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);\n }\n if (result.type === \"network-error\") {\n handleError(new Error(`Network error fetching ${src}.`), new Error(`Cannot render audio \"${src}\": Network error while fetching the audio (possibly CORS).`), `Network error fetching ${src}, falling back to <Html5Audio>`);\n return;\n }\n const { audio, durationInSeconds: assetDurationInSeconds } = result;\n const volumePropsFrame = frameForVolumeProp({\n behavior: loopVolumeCurveBehavior ?? \"repeat\",\n loop: loop ?? false,\n assetDurationInSeconds: assetDurationInSeconds ?? 0,\n fps,\n frame,\n startsAt\n });\n const volume = Internals18.evaluateVolume({\n volume: volumeProp,\n frame: volumePropsFrame,\n mediaVolume: 1\n });\n Internals18.warnAboutTooHighVolume(volume);\n if (audio && volume > 0) {\n applyVolume(audio.data, volume);\n registerRenderAsset({\n type: \"inline-audio\",\n id,\n audio: environment.isClientSideRendering ? audio.data : Array.from(audio.data),\n frame: absoluteFrame,\n timestamp: audio.timestamp,\n duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,\n toneFrequency: toneFrequency ?? 1\n });\n }\n continueRender(newHandle);\n }).catch((error) => {\n cancelRender2(error);\n });\n return () => {\n continueRender(newHandle);\n unregisterRenderAsset(id);\n };\n }, [\n absoluteFrame,\n continueRender,\n delayRender,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n disallowFallbackToHtml5Audio,\n environment.isClientSideRendering,\n fps,\n frame,\n id,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n playbackRate,\n registerRenderAsset,\n src,\n startsAt,\n unregisterRenderAsset,\n volumeProp,\n audioStreamIndex,\n toneFrequency,\n trimAfter,\n trimBefore,\n replaceWithHtml5Audio,\n maxCacheSize,\n audioEnabled,\n onError\n ]);\n if (replaceWithHtml5Audio) {\n return /* @__PURE__ */ jsx2(Html5Audio, {\n src,\n playbackRate,\n muted,\n loop,\n volume: volumeProp,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n style,\n loopVolumeCurveBehavior,\n audioStreamIndex,\n useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi,\n onError: fallbackHtml5AudioProps?.onError,\n toneFrequency,\n acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds,\n name,\n showInTimeline\n });\n }\n return null;\n};\n\n// src/audio/audio.tsx\nimport { jsx as jsx3 } from \"react/jsx-runtime\";\nvar { validateMediaProps } = Internals19;\nvar audioSchema = {\n volume: {\n type: \"number\",\n min: 0,\n max: 20,\n step: 0.01,\n default: 1,\n description: \"Volume\"\n },\n playbackRate: {\n type: \"number\",\n min: 0.1,\n step: 0.01,\n default: 1,\n description: \"Playback Rate\"\n },\n loop: { type: \"boolean\", default: false, description: \"Loop\" }\n};\nvar AudioInner = (props) => {\n const { name, stack, showInTimeline, controls, ...otherProps } = props;\n const environment = useRemotionEnvironment2();\n if (typeof props.src !== \"string\") {\n throw new TypeError(`The \\`<Audio>\\` tag requires a string for \\`src\\`, but got ${JSON.stringify(props.src)} instead.`);\n }\n validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, \"Audio\");\n if (environment.isRendering) {\n return /* @__PURE__ */ jsx3(AudioForRendering, {\n ...otherProps\n });\n }\n return /* @__PURE__ */ jsx3(AudioForPreview, {\n name,\n ...otherProps,\n stack: stack ?? null,\n controls\n });\n};\nvar Audio = Internals19.wrapInSchema(AudioInner, audioSchema);\nInternals19.addSequenceStackTraces(Audio);\n\n// src/video/video.tsx\nimport { Internals as Internals22, useRemotionEnvironment as useRemotionEnvironment4 } from \"remotion\";\n\n// src/video/video-for-preview.tsx\nimport {\n useContext as useContext5,\n useEffect as useEffect3,\n useLayoutEffect as useLayoutEffect3,\n useMemo as useMemo4,\n useRef as useRef2,\n useState as useState4\n} from \"react\";\nimport {\n Html5Video,\n Internals as Internals20,\n useBufferState as useBufferState2,\n useCurrentFrame as useCurrentFrame4,\n useVideoConfig as useVideoConfig3\n} from \"remotion\";\nimport { jsx as jsx4 } from \"react/jsx-runtime\";\nvar {\n useUnsafeVideoConfig: useUnsafeVideoConfig2,\n Timeline: Timeline2,\n SharedAudioContext: SharedAudioContext2,\n useMediaMutedState: useMediaMutedState2,\n useMediaVolumeState: useMediaVolumeState2,\n useFrameForVolumeProp: useFrameForVolumeProp2,\n evaluateVolume: evaluateVolume2,\n warnAboutTooHighVolume: warnAboutTooHighVolume2,\n usePreload: usePreload2,\n SequenceContext: SequenceContext2,\n SequenceVisibilityToggleContext\n} = Internals20;\nvar VideoForPreviewAssertedShowing = ({\n src: unpreloadedSrc,\n style,\n playbackRate,\n logLevel,\n className,\n muted,\n volume,\n loopVolumeCurveBehavior,\n onVideoFrame,\n showInTimeline,\n loop,\n name,\n trimAfter,\n trimBefore,\n stack,\n disallowFallbackToOffthreadVideo,\n fallbackOffthreadVideoProps,\n audioStreamIndex,\n debugOverlay,\n debugAudioScheduling,\n headless,\n onError,\n controls\n}) => {\n const src = usePreload2(unpreloadedSrc);\n const canvasRef = useRef2(null);\n const videoConfig = useUnsafeVideoConfig2();\n const frame = useCurrentFrame4();\n const mediaPlayerRef = useRef2(null);\n const initialTrimBeforeRef = useRef2(trimBefore);\n const initialTrimAfterRef = useRef2(trimAfter);\n const initialOnVideoFrameRef = useRef2(onVideoFrame);\n const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);\n const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);\n const [playing] = Timeline2.usePlayingState();\n const timelineContext = Internals20.useTimelineContext();\n const globalPlaybackRate = timelineContext.playbackRate;\n const sharedAudioContext = useContext5(SharedAudioContext2);\n const buffer = useBufferState2();\n const [mediaMuted] = useMediaMutedState2();\n const [mediaVolume] = useMediaVolumeState2();\n const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState4(null);\n const { hidden } = useContext5(SequenceVisibilityToggleContext);\n const volumePropFrame = useFrameForVolumeProp2(loopVolumeCurveBehavior);\n const userPreferredVolume = evaluateVolume2({\n frame: volumePropFrame,\n volume,\n mediaVolume\n });\n if (!videoConfig) {\n throw new Error(\"No video config found\");\n }\n warnAboutTooHighVolume2(userPreferredVolume);\n const parentSequence = useContext5(SequenceContext2);\n const isPremounting = Boolean(parentSequence?.premounting);\n const isPostmounting = Boolean(parentSequence?.postmounting);\n const sequenceOffset = ((parentSequence?.cumulatedFrom ?? 0) + (parentSequence?.relativeFrom ?? 0)) / videoConfig.fps;\n const loopDisplay = useLoopDisplay({\n loop,\n mediaDurationInSeconds,\n playbackRate,\n trimAfter,\n trimBefore\n });\n const { id: timelineId } = useMediaInTimeline({\n volume,\n mediaType: \"video\",\n src,\n playbackRate,\n displayName: name ?? null,\n stack,\n showInTimeline,\n premountDisplay: parentSequence?.premountDisplay ?? null,\n postmountDisplay: parentSequence?.postmountDisplay ?? null,\n loopDisplay,\n mediaVolume,\n trimAfter,\n trimBefore,\n controls\n });\n const isSequenceHidden = hidden[timelineId] ?? false;\n const currentTime = frame / videoConfig.fps;\n const currentTimeRef = useRef2(currentTime);\n currentTimeRef.current = currentTime;\n const preloadedSrc = usePreload2(src);\n const buffering = useContext5(Internals20.BufferingContextReact);\n if (!buffering) {\n throw new Error(\"useMediaPlayback must be used inside a <BufferingContext>\");\n }\n const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;\n const isPlayerBuffering = Internals20.useIsPlayerBuffering(buffering);\n const initialPlaying = useRef2(playing && !isPlayerBuffering);\n const initialIsPremounting = useRef2(isPremounting);\n const initialIsPostmounting = useRef2(isPostmounting);\n const initialGlobalPlaybackRate = useRef2(globalPlaybackRate);\n const initialPlaybackRate = useRef2(playbackRate);\n const initialMuted = useRef2(effectiveMuted);\n const initialSequenceOffset = useRef2(sequenceOffset);\n useEffect3(() => {\n if (!sharedAudioContext)\n return;\n if (!sharedAudioContext.audioContext)\n return;\n const { audioContext, audioSyncAnchor, scheduleAudioNode } = sharedAudioContext;\n try {\n const player = new MediaPlayer({\n canvas: canvasRef.current,\n src: preloadedSrc,\n logLevel,\n sharedAudioContext: { audioContext, audioSyncAnchor, scheduleAudioNode },\n loop,\n trimAfter: initialTrimAfterRef.current,\n trimBefore: initialTrimBeforeRef.current,\n fps: videoConfig.fps,\n playbackRate: initialPlaybackRate.current,\n audioStreamIndex,\n debugOverlay,\n debugAudioScheduling,\n bufferState: buffer,\n isPremounting: initialIsPremounting.current,\n isPostmounting: initialIsPostmounting.current,\n globalPlaybackRate: initialGlobalPlaybackRate.current,\n durationInFrames: videoConfig.durationInFrames,\n onVideoFrameCallback: initialOnVideoFrameRef.current ?? null,\n playing: initialPlaying.current,\n sequenceOffset: initialSequenceOffset.current\n });\n mediaPlayerRef.current = player;\n player.initialize(currentTimeRef.current, initialMuted.current).then((result) => {\n if (result.type === \"disposed\") {\n return;\n }\n const handleError = (error, fallbackMessage) => {\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error,\n disallowFallback: disallowFallbackToOffthreadVideo,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n }\n Internals20.Log.warn({ logLevel, tag: \"@remotion/media\" }, fallbackMessage);\n setShouldFallbackToNativeVideo(true);\n };\n if (result.type === \"unknown-container-format\") {\n handleError(new Error(`Unknown container format ${preloadedSrc}.`), `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);\n return;\n }\n if (result.type === \"network-error\") {\n handleError(new Error(`Network error fetching ${preloadedSrc}.`), `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);\n return;\n }\n if (result.type === \"cannot-decode\") {\n handleError(new Error(`Cannot decode ${preloadedSrc}.`), `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);\n return;\n }\n if (result.type === \"no-tracks\") {\n handleError(new Error(`No video or audio tracks found for ${preloadedSrc}.`), `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);\n return;\n }\n if (result.type === \"success\") {\n setMediaPlayerReady(true);\n setMediaDurationInSeconds(result.durationInSeconds);\n }\n }).catch((error) => {\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error,\n disallowFallback: disallowFallbackToOffthreadVideo,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n }\n Internals20.Log.error({ logLevel, tag: \"@remotion/media\" }, \"[VideoForPreview] Failed to initialize MediaPlayer\", errorToUse);\n setShouldFallbackToNativeVideo(true);\n });\n } catch (error) {\n const [action, errorToUse] = callOnErrorAndResolve({\n error,\n onError,\n disallowFallback: disallowFallbackToOffthreadVideo,\n isClientSideRendering: false,\n clientSideError: error\n });\n if (action === \"fail\") {\n throw errorToUse;\n }\n Internals20.Log.error({ logLevel, tag: \"@remotion/media\" }, \"[VideoForPreview] MediaPlayer initialization failed\", errorToUse);\n setShouldFallbackToNativeVideo(true);\n }\n return () => {\n if (mediaPlayerRef.current) {\n Internals20.Log.trace({ logLevel, tag: \"@remotion/media\" }, `[VideoForPreview] Disposing MediaPlayer`);\n mediaPlayerRef.current.dispose();\n mediaPlayerRef.current = null;\n }\n setMediaPlayerReady(false);\n setShouldFallbackToNativeVideo(false);\n };\n }, [\n audioStreamIndex,\n buffer,\n debugOverlay,\n debugAudioScheduling,\n disallowFallbackToOffthreadVideo,\n logLevel,\n loop,\n preloadedSrc,\n sharedAudioContext,\n videoConfig.fps,\n onError,\n videoConfig.durationInFrames\n ]);\n const classNameValue = useMemo4(() => {\n return [Internals20.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals20.truthy).join(\" \");\n }, [className]);\n useCommonEffects({\n mediaPlayerRef,\n mediaPlayerReady,\n currentTimeRef,\n playing,\n isPlayerBuffering,\n frame,\n trimBefore,\n trimAfter,\n effectiveMuted,\n userPreferredVolume,\n playbackRate,\n globalPlaybackRate,\n fps: videoConfig.fps,\n sequenceOffset,\n loop,\n debugAudioScheduling,\n durationInFrames: videoConfig.durationInFrames,\n isPremounting,\n isPostmounting,\n currentTime,\n logLevel,\n sharedAudioContext,\n label: \"VideoForPreview\"\n });\n useLayoutEffect3(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setDebugOverlay(debugOverlay);\n }, [debugOverlay, mediaPlayerReady]);\n useLayoutEffect3(() => {\n const mediaPlayer = mediaPlayerRef.current;\n if (!mediaPlayer || !mediaPlayerReady) {\n return;\n }\n mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);\n }, [onVideoFrame, mediaPlayerReady]);\n const actualStyle = useMemo4(() => {\n return {\n ...style,\n opacity: isSequenceHidden ? 0 : style?.opacity ?? 1\n };\n }, [isSequenceHidden, style]);\n if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {\n return /* @__PURE__ */ jsx4(Html5Video, {\n src,\n style: actualStyle,\n className,\n muted,\n volume,\n trimAfter,\n trimBefore,\n playbackRate,\n loopVolumeCurveBehavior,\n name,\n loop,\n showInTimeline,\n stack: stack ?? undefined,\n ...fallbackOffthreadVideoProps\n });\n }\n if (headless) {\n return null;\n }\n return /* @__PURE__ */ jsx4(\"canvas\", {\n ref: canvasRef,\n style: actualStyle,\n className: classNameValue\n });\n};\nvar VideoForPreview = (props) => {\n const frame = useCurrentFrame4();\n const videoConfig = useVideoConfig3();\n const currentTime = frame / videoConfig.fps;\n const showShow = useMemo4(() => {\n return getTimeInSeconds({\n unloopedTimeInSeconds: currentTime,\n playbackRate: props.playbackRate,\n loop: props.loop,\n trimBefore: props.trimBefore,\n trimAfter: props.trimAfter,\n mediaDurationInSeconds: Infinity,\n fps: videoConfig.fps,\n ifNoMediaDuration: \"infinity\",\n src: props.src\n }) !== null;\n }, [\n currentTime,\n props.loop,\n props.playbackRate,\n props.src,\n videoConfig.fps,\n props.trimBefore,\n props.trimAfter\n ]);\n if (!showShow) {\n return null;\n }\n return /* @__PURE__ */ jsx4(VideoForPreviewAssertedShowing, {\n ...props,\n controls: props.controls\n });\n};\n\n// src/video/video-for-rendering.tsx\nimport {\n useContext as useContext6,\n useLayoutEffect as useLayoutEffect4,\n useMemo as useMemo5,\n useRef as useRef3,\n useState as useState5\n} from \"react\";\nimport {\n Internals as Internals21,\n Loop,\n random as random2,\n useCurrentFrame as useCurrentFrame5,\n useDelayRender as useDelayRender2,\n useRemotionEnvironment as useRemotionEnvironment3,\n useVideoConfig as useVideoConfig4\n} from \"remotion\";\nimport { jsx as jsx5 } from \"react/jsx-runtime\";\nvar VideoForRendering = ({\n volume: volumeProp,\n playbackRate,\n src,\n muted,\n loopVolumeCurveBehavior,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n onVideoFrame,\n logLevel,\n loop,\n style,\n className,\n fallbackOffthreadVideoProps,\n audioStreamIndex,\n name,\n disallowFallbackToOffthreadVideo,\n stack,\n toneFrequency,\n trimAfterValue,\n trimBeforeValue,\n headless,\n onError\n}) => {\n if (!src) {\n throw new TypeError(\"No `src` was passed to <Video>.\");\n }\n const frame = useCurrentFrame5();\n const absoluteFrame = Internals21.useTimelinePosition();\n const { fps } = useVideoConfig4();\n const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals21.RenderAssetManager);\n const startsAt = Internals21.useMediaStartsAt();\n const sequenceContext = useContext6(Internals21.SequenceContext);\n const id = useMemo5(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [\n src,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.relativeFrom,\n sequenceContext?.durationInFrames\n ]);\n const environment = useRemotionEnvironment3();\n const { delayRender, continueRender, cancelRender: cancelRender3 } = useDelayRender2();\n const canvasRef = useRef3(null);\n const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);\n const audioEnabled = Internals21.useAudioEnabled();\n const videoEnabled = Internals21.useVideoEnabled();\n const maxCacheSize = useMaxMediaCacheSize(logLevel);\n const [error, setError] = useState5(null);\n if (error) {\n throw error;\n }\n useLayoutEffect4(() => {\n if (!canvasRef.current && !headless) {\n return;\n }\n if (replaceWithOffthreadVideo) {\n return;\n }\n if (!canvasRef.current?.getContext && !headless) {\n return setError(new Error(\"Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag.\"));\n }\n const timestamp = frame / fps;\n const durationInSeconds = 1 / fps;\n const newHandle = delayRender(`Extracting frame at time ${timestamp} from ${src}`, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n const shouldRenderAudio = (() => {\n if (!audioEnabled) {\n return false;\n }\n if (muted) {\n return false;\n }\n return true;\n })();\n extractFrameViaBroadcastChannel({\n src,\n timeInSeconds: timestamp,\n durationInSeconds,\n playbackRate,\n logLevel,\n includeAudio: shouldRenderAudio,\n includeVideo: videoEnabled,\n isClientSideRendering: environment.isClientSideRendering,\n loop,\n audioStreamIndex,\n trimAfter: trimAfterValue,\n trimBefore: trimBeforeValue,\n fps,\n maxCacheSize\n }).then((result) => {\n const handleError = (err, clientSideError, fallbackMessage, mediaDurationInSeconds) => {\n if (environment.isClientSideRendering) {\n cancelRender3(clientSideError);\n return;\n }\n const [action, errorToUse] = callOnErrorAndResolve({\n onError,\n error: err,\n disallowFallback: disallowFallbackToOffthreadVideo,\n isClientSideRendering: environment.isClientSideRendering,\n clientSideError: err\n });\n if (action === \"fail\") {\n cancelRender3(errorToUse);\n return;\n }\n if (window.remotion_isMainTab) {\n Internals21.Log.warn({ logLevel, tag: \"@remotion/media\" }, fallbackMessage);\n }\n setReplaceWithOffthreadVideo({\n durationInSeconds: mediaDurationInSeconds\n });\n };\n if (result.type === \"unknown-container-format\") {\n handleError(new Error(`Unknown container format ${src}.`), new Error(`Cannot render video \"${src}\": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`), `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`, null);\n return;\n }\n if (result.type === \"cannot-decode\") {\n handleError(new Error(`Cannot decode ${src}.`), new Error(`Cannot render video \"${src}\": The video could not be decoded by the browser.`), `Cannot decode ${src}, falling back to <OffthreadVideo>`, result.durationInSeconds);\n return;\n }\n if (result.type === \"cannot-decode-alpha\") {\n handleError(new Error(`Cannot decode alpha component for ${src}.`), new Error(`Cannot render video \"${src}\": The alpha channel could not be decoded by the browser.`), `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`, result.durationInSeconds);\n return;\n }\n if (result.type === \"network-error\") {\n handleError(new Error(`Network error fetching ${src}.`), new Error(`Cannot render video \"${src}\": Network error while fetching the video (possibly CORS).`), `Network error fetching ${src} (no CORS?), falling back to <OffthreadVideo>`, null);\n return;\n }\n const {\n frame: imageBitmap,\n audio,\n durationInSeconds: assetDurationInSeconds\n } = result;\n if (imageBitmap) {\n onVideoFrame?.(imageBitmap);\n const context = canvasRef.current?.getContext(\"2d\", {\n alpha: true\n });\n if (context) {\n context.canvas.width = imageBitmap.width;\n context.canvas.height = imageBitmap.height;\n context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;\n context.drawImage(imageBitmap, 0, 0);\n }\n imageBitmap.close();\n } else if (videoEnabled) {\n const context = canvasRef.current?.getContext(\"2d\", {\n alpha: true\n });\n if (context) {\n context.clearRect(0, 0, context.canvas.width, context.canvas.height);\n }\n }\n const volumePropsFrame = frameForVolumeProp({\n behavior: loopVolumeCurveBehavior,\n loop,\n assetDurationInSeconds: assetDurationInSeconds ?? 0,\n fps,\n frame,\n startsAt\n });\n const volume = Internals21.evaluateVolume({\n volume: volumeProp,\n frame: volumePropsFrame,\n mediaVolume: 1\n });\n Internals21.warnAboutTooHighVolume(volume);\n if (audio && volume > 0) {\n applyVolume(audio.data, volume);\n registerRenderAsset({\n type: \"inline-audio\",\n id,\n audio: environment.isClientSideRendering ? audio.data : Array.from(audio.data),\n frame: absoluteFrame,\n timestamp: audio.timestamp,\n duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,\n toneFrequency\n });\n }\n continueRender(newHandle);\n }).catch((err) => {\n cancelRender3(err);\n });\n return () => {\n continueRender(newHandle);\n unregisterRenderAsset(id);\n };\n }, [\n absoluteFrame,\n continueRender,\n delayRender,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n environment.isClientSideRendering,\n fps,\n frame,\n id,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n onVideoFrame,\n playbackRate,\n registerRenderAsset,\n src,\n startsAt,\n unregisterRenderAsset,\n volumeProp,\n replaceWithOffthreadVideo,\n audioStreamIndex,\n disallowFallbackToOffthreadVideo,\n toneFrequency,\n trimAfterValue,\n trimBeforeValue,\n audioEnabled,\n videoEnabled,\n maxCacheSize,\n cancelRender3,\n headless,\n onError\n ]);\n const classNameValue = useMemo5(() => {\n return [Internals21.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals21.truthy).join(\" \");\n }, [className]);\n if (replaceWithOffthreadVideo) {\n const fallback = /* @__PURE__ */ jsx5(Internals21.InnerOffthreadVideo, {\n src,\n playbackRate: playbackRate ?? 1,\n muted: muted ?? false,\n acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds,\n loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? \"repeat\",\n delayRenderRetries: delayRenderRetries ?? undefined,\n delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,\n style,\n allowAmplificationDuringRender: true,\n transparent: fallbackOffthreadVideoProps?.transparent ?? true,\n toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true,\n audioStreamIndex: audioStreamIndex ?? 0,\n name,\n className,\n onVideoFrame,\n volume: volumeProp,\n id,\n onError: fallbackOffthreadVideoProps?.onError,\n toneFrequency,\n showInTimeline: false,\n crossOrigin: fallbackOffthreadVideoProps?.crossOrigin,\n onAutoPlayError: fallbackOffthreadVideoProps?.onAutoPlayError ?? null,\n pauseWhenBuffering: fallbackOffthreadVideoProps?.pauseWhenBuffering ?? false,\n trimAfter: trimAfterValue,\n trimBefore: trimBeforeValue,\n useWebAudioApi: fallbackOffthreadVideoProps?.useWebAudioApi ?? false,\n startFrom: undefined,\n endAt: undefined,\n stack,\n _remotionInternalNativeLoopPassed: false\n });\n if (loop) {\n if (!replaceWithOffthreadVideo.durationInSeconds) {\n const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, \"loop\" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);\n cancelRender3(err);\n throw err;\n }\n return /* @__PURE__ */ jsx5(Loop, {\n layout: \"none\",\n durationInFrames: Internals21.calculateMediaDuration({\n trimAfter: trimAfterValue,\n mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,\n playbackRate,\n trimBefore: trimBeforeValue\n }),\n children: fallback\n });\n }\n return fallback;\n }\n if (headless) {\n return null;\n }\n return /* @__PURE__ */ jsx5(\"canvas\", {\n ref: canvasRef,\n style,\n className: classNameValue\n });\n};\n\n// src/video/video.tsx\nimport { jsx as jsx6 } from \"react/jsx-runtime\";\nvar { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals22;\nvar videoSchema = {\n volume: {\n type: \"number\",\n min: 0,\n max: 20,\n step: 0.01,\n default: 1,\n description: \"Volume\"\n },\n playbackRate: {\n type: \"number\",\n min: 0.1,\n step: 0.01,\n default: 1,\n description: \"Playback Rate\"\n },\n loop: { type: \"boolean\", default: false, description: \"Loop\" },\n \"style.translate\": {\n type: \"translate\",\n step: 1,\n default: \"0px 0px\",\n description: \"Position\"\n },\n \"style.scale\": {\n type: \"number\",\n min: 0.05,\n max: 100,\n step: 0.01,\n default: 1,\n description: \"Scale\"\n },\n \"style.rotate\": {\n type: \"rotation\",\n step: 1,\n default: \"0deg\",\n description: \"Rotation\"\n },\n \"style.opacity\": {\n type: \"number\",\n min: 0,\n max: 1,\n step: 0.01,\n default: 1,\n description: \"Opacity\"\n }\n};\nvar InnerVideo = ({\n src,\n audioStreamIndex,\n className,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n disallowFallbackToOffthreadVideo,\n fallbackOffthreadVideoProps,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n name,\n onVideoFrame,\n playbackRate,\n style,\n trimAfter,\n trimBefore,\n volume,\n stack,\n toneFrequency,\n showInTimeline,\n debugOverlay,\n debugAudioScheduling,\n headless,\n onError,\n controls\n}) => {\n const environment = useRemotionEnvironment4();\n if (typeof src !== \"string\") {\n throw new TypeError(`The \\`<Video>\\` tag requires a string for \\`src\\`, but got ${JSON.stringify(src)} instead.`);\n }\n validateMediaTrimProps({\n startFrom: undefined,\n endAt: undefined,\n trimBefore,\n trimAfter\n });\n const { trimBeforeValue, trimAfterValue } = resolveTrimProps({\n startFrom: undefined,\n endAt: undefined,\n trimBefore,\n trimAfter\n });\n validateMediaProps2({ playbackRate, volume }, \"Video\");\n if (environment.isRendering) {\n return /* @__PURE__ */ jsx6(VideoForRendering, {\n audioStreamIndex: audioStreamIndex ?? 0,\n className,\n delayRenderRetries: delayRenderRetries ?? null,\n delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,\n disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,\n name,\n fallbackOffthreadVideoProps,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n onVideoFrame,\n playbackRate,\n src,\n stack,\n style,\n volume,\n toneFrequency,\n trimAfterValue,\n trimBeforeValue,\n headless,\n onError\n });\n }\n return /* @__PURE__ */ jsx6(VideoForPreview, {\n audioStreamIndex: audioStreamIndex ?? 0,\n className,\n name,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n onVideoFrame,\n playbackRate,\n src,\n style,\n volume,\n showInTimeline,\n trimAfter: trimAfterValue,\n trimBefore: trimBeforeValue,\n stack: stack ?? null,\n disallowFallbackToOffthreadVideo,\n fallbackOffthreadVideoProps,\n debugOverlay: debugOverlay ?? false,\n debugAudioScheduling: debugAudioScheduling ?? false,\n headless: headless ?? false,\n onError,\n controls\n });\n};\nvar VideoInner = ({\n src,\n audioStreamIndex,\n className,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n disallowFallbackToOffthreadVideo,\n fallbackOffthreadVideoProps,\n logLevel,\n loop,\n loopVolumeCurveBehavior,\n muted,\n name,\n onVideoFrame,\n playbackRate,\n showInTimeline,\n style,\n trimAfter,\n trimBefore,\n volume,\n stack,\n toneFrequency,\n debugOverlay,\n debugAudioScheduling,\n headless,\n onError,\n controls\n}) => {\n const fallbackLogLevel = Internals22.useLogLevel();\n return /* @__PURE__ */ jsx6(InnerVideo, {\n audioStreamIndex: audioStreamIndex ?? 0,\n className,\n delayRenderRetries: delayRenderRetries ?? null,\n delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,\n disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,\n fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {},\n logLevel: logLevel ?? fallbackLogLevel,\n loop: loop ?? false,\n loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? \"repeat\",\n muted: muted ?? false,\n name,\n onVideoFrame,\n playbackRate: playbackRate ?? 1,\n showInTimeline: showInTimeline ?? true,\n src,\n style: style ?? {},\n trimAfter,\n trimBefore,\n volume: volume ?? 1,\n toneFrequency: toneFrequency ?? 1,\n stack,\n debugOverlay: debugOverlay ?? false,\n debugAudioScheduling: debugAudioScheduling ?? false,\n headless: headless ?? false,\n onError,\n controls\n });\n};\nvar Video = Internals22.wrapInSchema(VideoInner, videoSchema);\nInternals22.addSequenceStackTraces(Video);\n\n// src/index.ts\nvar experimental_Audio = Audio;\nvar experimental_Video = Video;\nexport {\n experimental_Video,\n experimental_Audio,\n Video,\n AudioForPreview,\n Audio\n};\n","import React from \"react\";\nimport { interpolate, useCurrentFrame, useVideoConfig } from \"remotion\";\nimport type { TerminalTab } from \"../Root\";\n\n// Method color coding\nconst METHOD_COLORS: Record<string, string> = {\n GET: \"#A6E3A1\",\n POST: \"#F9E2AF\",\n PUT: \"#89B4FA\",\n PATCH: \"#CBA6F7\",\n DELETE: \"#F38BA8\",\n};\n\ntype TerminalWindowDarkProps = {\n children?: React.ReactNode;\n title?: string;\n opacity?: number;\n scale?: number;\n width?: number | string;\n height?: number | string;\n tabs?: TerminalTab[];\n /** Frames each tab stays visible */\n tabDurationFrames?: number;\n /** Render function receiving active tab index — used instead of children when tabs provided */\n renderTab?: (activeIndex: number) => React.ReactNode;\n};\n\nexport const TerminalWindowDark: React.FC<TerminalWindowDarkProps> = ({\n children,\n title = \"Terminal\",\n opacity = 1,\n scale = 1,\n width = \"100%\",\n height = \"100%\",\n tabs,\n tabDurationFrames = 100,\n renderTab,\n}) => {\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n const hasTabs = tabs && tabs.length > 0;\n\n // Auto-cycle through tabs based on frame\n let activeTabIndex = 0;\n if (hasTabs && tabs.length > 1) {\n activeTabIndex = Math.floor(frame / tabDurationFrames) % tabs.length;\n }\n\n // Fade transition between tabs\n const tabLocalFrame = frame % tabDurationFrames;\n const fadeIn = interpolate(tabLocalFrame, [0, 8], [0, 1], {\n extrapolateRight: \"clamp\",\n });\n const fadeOut = interpolate(\n tabLocalFrame,\n [tabDurationFrames - 8, tabDurationFrames],\n [1, 0],\n { extrapolateLeft: \"clamp\", extrapolateRight: \"clamp\" }\n );\n const tabOpacity = hasTabs && tabs.length > 1 ? Math.min(fadeIn, fadeOut) : 1;\n\n return (\n <div\n style={{\n width,\n height,\n opacity,\n transform: `scale(${scale})`,\n borderRadius: 14,\n overflow: \"hidden\",\n backgroundColor: \"#1E1E2E\",\n border: \"1px solid #313244\",\n boxShadow: \"0 8px 32px rgba(0,0,0,0.4)\",\n display: \"flex\",\n flexDirection: \"column\",\n }}\n >\n {/* Title bar */}\n <div\n style={{\n backgroundColor: \"#181825\",\n borderBottom: \"1px solid #313244\",\n display: \"flex\",\n flexDirection: \"column\",\n flexShrink: 0,\n }}\n >\n {/* Traffic lights + title row */}\n <div\n style={{\n height: 40,\n display: \"flex\",\n alignItems: \"center\",\n padding: \"0 14px\",\n gap: 8,\n }}\n >\n {/* Traffic lights */}\n <div style={{ display: \"flex\", gap: 7 }}>\n <div\n style={{\n width: 12,\n height: 12,\n borderRadius: \"50%\",\n backgroundColor: \"#F38BA8\",\n }}\n />\n <div\n style={{\n width: 12,\n height: 12,\n borderRadius: \"50%\",\n backgroundColor: \"#F9E2AF\",\n }}\n />\n <div\n style={{\n width: 12,\n height: 12,\n borderRadius: \"50%\",\n backgroundColor: \"#A6E3A1\",\n }}\n />\n </div>\n <div\n style={{\n flex: 1,\n textAlign: \"center\",\n fontSize: 13,\n fontWeight: 500,\n color: \"#6C7086\",\n fontFamily: \"system-ui, -apple-system, sans-serif\",\n }}\n >\n {hasTabs ? (tabs[activeTabIndex]?.label || title) : title}\n </div>\n {/* Spacer to balance the dots */}\n <div style={{ width: 55 }} />\n </div>\n\n {/* Tab bar (only if multiple tabs) */}\n {hasTabs && tabs.length > 1 && (\n <div\n style={{\n display: \"flex\",\n gap: 2,\n padding: \"0 14px 6px\",\n overflowX: \"hidden\",\n flexWrap: \"nowrap\",\n }}\n >\n {tabs.map((tab, idx) => {\n const isActive = idx === activeTabIndex;\n const method = tab.method.toUpperCase();\n const methodColor = METHOD_COLORS[method] || \"#CDD6F4\";\n return (\n <div\n key={idx}\n style={{\n display: \"flex\",\n alignItems: \"center\",\n gap: 4,\n padding: \"3px 8px\",\n borderRadius: 6,\n fontSize: 10,\n fontWeight: 600,\n fontFamily: \"monospace\",\n backgroundColor: isActive\n ? \"rgba(205,214,244,0.1)\"\n : \"transparent\",\n opacity: isActive ? 1 : 0.4,\n transition: \"none\",\n whiteSpace: \"nowrap\",\n flexShrink: 0,\n }}\n >\n <span style={{ color: methodColor }}>{method}</span>\n <span style={{ color: \"#6C7086\" }}>\n {tab.route.length > 18\n ? tab.route.slice(0, 18) + \"…\"\n : tab.route}\n </span>\n </div>\n );\n })}\n </div>\n )}\n </div>\n\n {/* Content area */}\n <div\n style={{\n flex: 1,\n padding: \"24px 28px\",\n overflow: \"hidden\",\n opacity: tabOpacity,\n }}\n >\n {renderTab ? renderTab(activeTabIndex) : children}\n </div>\n </div>\n );\n};\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/FiraCode.ts\nvar getInfo = () => ({\n fontFamily: \"Fira Code\",\n importName: \"FiraCode\",\n version: \"v27\",\n url: \"https://fonts.googleapis.com/css2?family=Fira+Code:ital,wght@0,300;0,400;0,500;0,600;0,700\",\n unicodeRanges: {\n \"cyrillic-ext\": \"U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F\",\n cyrillic: \"U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116\",\n \"greek-ext\": \"U+1F00-1FFF\",\n greek: \"U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF\",\n symbols2: \"U+2000-2001, U+2004-2008, U+200A, U+23B8-23BD, U+2500-259F\",\n \"latin-ext\": \"U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF\",\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n normal: {\n \"300\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0NSDulI.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh2dSDulI.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0dSDulI.woff2\",\n greek: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3tSDulI.woff2\",\n symbols2: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bhZ_Wmh2uX.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh09SDulI.woff2\",\n latin: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3dSD.woff2\"\n },\n \"400\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0NSDulI.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh2dSDulI.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0dSDulI.woff2\",\n greek: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3tSDulI.woff2\",\n symbols2: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bhZ_Wmh2uX.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh09SDulI.woff2\",\n latin: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3dSD.woff2\"\n },\n \"500\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0NSDulI.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh2dSDulI.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0dSDulI.woff2\",\n greek: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3tSDulI.woff2\",\n symbols2: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bhZ_Wmh2uX.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh09SDulI.woff2\",\n latin: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3dSD.woff2\"\n },\n \"600\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0NSDulI.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh2dSDulI.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0dSDulI.woff2\",\n greek: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3tSDulI.woff2\",\n symbols2: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bhZ_Wmh2uX.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh09SDulI.woff2\",\n latin: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3dSD.woff2\"\n },\n \"700\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0NSDulI.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh2dSDulI.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh0dSDulI.woff2\",\n greek: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3tSDulI.woff2\",\n symbols2: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bhZ_Wmh2uX.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh09SDulI.woff2\",\n latin: \"https://fonts.gstatic.com/s/firacode/v27/uU9NCBsR6Z2vfE9aq3bh3dSD.woff2\"\n }\n }\n },\n subsets: [\n \"cyrillic\",\n \"cyrillic-ext\",\n \"greek\",\n \"greek-ext\",\n \"latin\",\n \"latin-ext\",\n \"symbols2\"\n ]\n});\nvar fontFamily = \"Fira Code\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","import React from \"react\";\nimport { interpolate, useCurrentFrame } from \"remotion\";\nimport { loadFont } from \"@remotion/google-fonts/FiraCode\";\n\nconst { fontFamily: firaCode } = loadFont();\n\ntype CodeTypewriterProps = {\n code: string;\n charFrames?: number;\n delay?: number;\n fontSize?: number;\n tokenColors?: Partial<Record<TokenType, string>>;\n};\n\ntype TokenType = \"keyword\" | \"string\" | \"comment\" | \"decorator\" | \"function\" | \"number\" | \"normal\";\n\nconst DARK_TOKEN_COLORS: Record<TokenType, string> = {\n keyword: \"#89B4FA\",\n string: \"#A6E3A1\",\n comment: \"#6C7086\",\n decorator: \"#CBA6F7\",\n function: \"#F5C2E7\",\n number: \"#F9E2AF\",\n normal: \"#CDD6F4\",\n};\n\ntype Token = { text: string; type: TokenType };\n\nconst KEYWORDS = new Set([\n \"from\", \"import\", \"def\", \"async\", \"return\", \"if\", \"else\", \"class\", \"True\", \"False\", \"None\",\n \"export\", \"const\", \"function\", \"let\", \"var\", \"await\", \"new\", \"throw\", \"try\", \"catch\",\n \"null\", \"true\", \"false\", \"method\", \"status\", \"url\",\n]);\n\nconst tokenizeLine = (line: string): Token[] => {\n const tokens: Token[] = [];\n let i = 0;\n\n // Comment (// or #)\n const trimmed = line.trimStart();\n if (trimmed.startsWith(\"#\") || trimmed.startsWith(\"//\")) {\n return [{ text: line, type: \"comment\" }];\n }\n\n // Decorator\n if (trimmed.startsWith(\"@\")) {\n return [{ text: line, type: \"decorator\" }];\n }\n\n while (i < line.length) {\n // Whitespace\n if (line[i] === \" \" || line[i] === \"\\t\") {\n let end = i;\n while (end < line.length && (line[end] === \" \" || line[end] === \"\\t\")) end++;\n tokens.push({ text: line.slice(i, end), type: \"normal\" });\n i = end;\n continue;\n }\n\n // Strings (single or double quotes)\n if (line[i] === '\"' || line[i] === \"'\") {\n const quote = line[i];\n let end = i + 1;\n while (end < line.length && line[end] !== quote) end++;\n end = Math.min(end + 1, line.length);\n tokens.push({ text: line.slice(i, end), type: \"string\" });\n i = end;\n continue;\n }\n\n // Numbers\n if (/[0-9]/.test(line[i])) {\n let end = i;\n while (end < line.length && /[0-9.]/.test(line[end])) end++;\n tokens.push({ text: line.slice(i, end), type: \"number\" });\n i = end;\n continue;\n }\n\n // Words (keywords, functions, identifiers)\n if (/[a-zA-Z_]/.test(line[i])) {\n let end = i;\n while (end < line.length && /[a-zA-Z_0-9]/.test(line[end])) end++;\n\n const word = line.slice(i, end);\n if (KEYWORDS.has(word)) {\n tokens.push({ text: word, type: \"keyword\" });\n } else if (end < line.length && line[end] === \"(\") {\n tokens.push({ text: word, type: \"function\" });\n } else {\n tokens.push({ text: word, type: \"normal\" });\n }\n i = end;\n continue;\n }\n\n // Other characters (operators, parens, braces, etc.)\n tokens.push({ text: line[i], type: \"normal\" });\n i++;\n }\n\n return tokens;\n};\n\nconst CURSOR_BLINK_FRAMES = 16;\n\nexport const CodeTypewriter: React.FC<CodeTypewriterProps> = ({\n code,\n charFrames = 2,\n delay = 0,\n fontSize = 15,\n tokenColors,\n}) => {\n const frame = useCurrentFrame();\n const localFrame = Math.max(0, frame - delay);\n\n const colors = tokenColors\n ? { ...DARK_TOKEN_COLORS, ...tokenColors }\n : DARK_TOKEN_COLORS;\n\n const totalChars = code.length;\n const typedCount = Math.min(totalChars, Math.floor(localFrame / charFrames));\n\n const typedCode = code.slice(0, typedCount);\n const lines = typedCode.split(\"\\n\");\n\n // Cursor blink\n const cursorOpacity = interpolate(\n frame % CURSOR_BLINK_FRAMES,\n [0, CURSOR_BLINK_FRAMES / 2, CURSOR_BLINK_FRAMES],\n [1, 0, 1],\n { extrapolateLeft: \"clamp\", extrapolateRight: \"clamp\" },\n );\n\n return (\n <div\n style={{\n fontFamily: firaCode,\n fontSize,\n lineHeight: 1.7,\n whiteSpace: \"pre\",\n color: colors.normal,\n }}\n >\n {lines.map((line, lineIdx) => {\n const tokens = tokenizeLine(line);\n const isLastLine = lineIdx === lines.length - 1;\n\n return (\n <div key={lineIdx} style={{ minHeight: fontSize * 1.7 }}>\n {tokens.map((token, tIdx) => (\n <span key={tIdx} style={{ color: colors[token.type] }}>\n {token.text}\n </span>\n ))}\n {isLastLine && typedCount < totalChars && (\n <span\n style={{\n opacity: cursorOpacity,\n color: colors.keyword,\n fontWeight: 700,\n }}\n >\n |\n </span>\n )}\n </div>\n );\n })}\n </div>\n );\n};\n","export const SPRING_CONFIGS = {\n smooth: { damping: 200 },\n snappy: { damping: 20, stiffness: 200 },\n bouncy: { damping: 12 },\n heavy: { damping: 15, stiffness: 80, mass: 2 },\n} as const;\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/Inter.ts\nvar getInfo = () => ({\n fontFamily: \"Inter\",\n importName: \"Inter\",\n version: \"v20\",\n url: \"https://fonts.googleapis.com/css2?family=Inter:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900\",\n unicodeRanges: {\n \"cyrillic-ext\": \"U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F\",\n cyrillic: \"U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116\",\n \"greek-ext\": \"U+1F00-1FFF\",\n greek: \"U+0370-0377, U+037A-037F, U+0384-038A, U+038C, U+038E-03A1, U+03A3-03FF\",\n vietnamese: \"U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB\",\n \"latin-ext\": \"U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF\",\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n italic: {\n \"100\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"200\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"300\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"400\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"500\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"600\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"700\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"800\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n },\n \"900\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L0UUMJng.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L9UUMJng.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L1UUMJng.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L6UUMJng.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L2UUMJng.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L3UUMJng.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC53FwrK3iLTcvneQg7Ca725JhhKnNqk6L5UUM.woff2\"\n }\n },\n normal: {\n \"100\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"200\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"300\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"400\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"500\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"600\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"700\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"800\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n },\n \"900\": {\n \"cyrillic-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2JL7SUc.woff2\",\n cyrillic: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa0ZL7SUc.woff2\",\n \"greek-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2\",\n greek: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2\",\n vietnamese: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2\",\n latin: \"https://fonts.gstatic.com/s/inter/v20/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2\"\n }\n }\n },\n subsets: [\n \"cyrillic\",\n \"cyrillic-ext\",\n \"greek\",\n \"greek-ext\",\n \"latin\",\n \"latin-ext\",\n \"vietnamese\"\n ]\n});\nvar fontFamily = \"Inter\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","import React from \"react\";\nimport { interpolate, spring, useCurrentFrame, useVideoConfig } from \"remotion\";\nimport { SPRING_CONFIGS } from \"../utils/springs\";\nimport { loadFont } from \"@remotion/google-fonts/Inter\";\n\nconst { fontFamily } = loadFont();\n\ntype FadeInTextProps = {\n children: React.ReactNode;\n delay?: number;\n fontSize?: number;\n fontWeight?: number;\n color?: string;\n slideDistance?: number;\n style?: React.CSSProperties;\n};\n\nexport const FadeInText: React.FC<FadeInTextProps> = ({\n children,\n delay = 0,\n fontSize = 48,\n fontWeight = 600,\n color = \"#FFFFFF\",\n slideDistance = 30,\n style,\n}) => {\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n\n const progress = spring({\n frame,\n fps,\n delay,\n config: SPRING_CONFIGS.smooth,\n });\n\n const opacity = interpolate(progress, [0, 1], [0, 1]);\n const translateY = interpolate(progress, [0, 1], [slideDistance, 0]);\n\n return (\n <div\n style={{\n fontFamily,\n fontSize,\n fontWeight,\n color,\n opacity,\n transform: `translateY(${translateY}px)`,\n ...style,\n }}\n >\n {children}\n </div>\n );\n};\n","import React from \"react\";\nimport { Img, staticFile } from \"remotion\";\n\ntype Platform = \"ios\" | \"android\" | \"web\";\n\ninterface DeviceFrameProps {\n platform: Platform;\n children: React.ReactNode;\n width?: number;\n height?: number;\n}\n\n// iPhone mockup proportions (matches mockup-ios-iphone-17-pro.png from tuka)\nconst IOS_ASPECT = 1043 / 2063;\n// Android mockup proportions (matches mockup-android-google-pixel-9-pro.png)\nconst ANDROID_ASPECT = 353 / 745;\nconst SCREEN_INSET_X = 0.027;\nconst SCREEN_INSET_Y = 0.014;\nconst SCREEN_RADIUS_RATIO = 0.13;\n\nconst IosFrame: React.FC<{ children: React.ReactNode; width: number; height: number }> = ({\n children,\n width,\n height,\n}) => {\n const screenInsetX = width * SCREEN_INSET_X;\n const screenInsetY = height * SCREEN_INSET_Y;\n const screenRadius = width * SCREEN_RADIUS_RATIO;\n const screenWidth = width - screenInsetX * 2;\n const screenHeight = height - screenInsetY * 2;\n\n return (\n <div style={{ position: \"relative\", width, height }}>\n {/* Screen content */}\n <div\n style={{\n position: \"absolute\",\n left: screenInsetX,\n top: screenInsetY,\n width: screenWidth,\n height: screenHeight,\n borderRadius: screenRadius,\n overflow: \"hidden\",\n backgroundColor: \"#000\",\n }}\n >\n {children}\n </div>\n\n {/* PNG mockup frame overlay */}\n <Img\n src={staticFile(\"mockup-ios-iphone-17-pro.png\")}\n style={{\n position: \"absolute\",\n top: 0,\n left: 0,\n width,\n height,\n zIndex: 10,\n pointerEvents: \"none\",\n }}\n />\n </div>\n );\n};\n\nconst AndroidFrame: React.FC<{ children: React.ReactNode; width: number; height: number }> = ({\n children,\n width,\n height,\n}) => {\n const screenInsetX = width * 0.03;\n const screenInsetY = height * 0.016;\n const screenRadius = width * 0.10;\n const screenWidth = width - screenInsetX * 2;\n const screenHeight = height - screenInsetY * 2;\n\n return (\n <div style={{ position: \"relative\", width, height }}>\n {/* Screen content */}\n <div\n style={{\n position: \"absolute\",\n left: screenInsetX,\n top: screenInsetY,\n width: screenWidth,\n height: screenHeight,\n borderRadius: screenRadius,\n overflow: \"hidden\",\n backgroundColor: \"#000\",\n }}\n >\n {children}\n </div>\n\n {/* PNG mockup frame overlay */}\n <Img\n src={staticFile(\"mockup-android.png\")}\n style={{\n position: \"absolute\",\n top: 0,\n left: 0,\n width,\n height,\n zIndex: 10,\n pointerEvents: \"none\",\n }}\n />\n </div>\n );\n};\n\nconst WebFrame: React.FC<{ children: React.ReactNode; width: number; height: number }> = ({\n children,\n width,\n height,\n}) => {\n const titleBarHeight = 36;\n\n return (\n <div\n style={{\n position: \"relative\",\n width,\n height,\n borderRadius: 12,\n overflow: \"hidden\",\n backgroundColor: \"#1E1E2E\",\n border: \"1px solid #313244\",\n boxShadow: \"0 12px 48px rgba(0,0,0,0.5)\",\n display: \"flex\",\n flexDirection: \"column\",\n }}\n >\n {/* Browser chrome */}\n <div\n style={{\n height: titleBarHeight,\n backgroundColor: \"#181825\",\n borderBottom: \"1px solid #313244\",\n display: \"flex\",\n alignItems: \"center\",\n padding: \"0 14px\",\n gap: 8,\n flexShrink: 0,\n }}\n >\n {/* Traffic lights */}\n <div style={{ display: \"flex\", gap: 6 }}>\n <div style={{ width: 10, height: 10, borderRadius: \"50%\", backgroundColor: \"#F38BA8\" }} />\n <div style={{ width: 10, height: 10, borderRadius: \"50%\", backgroundColor: \"#F9E2AF\" }} />\n <div style={{ width: 10, height: 10, borderRadius: \"50%\", backgroundColor: \"#A6E3A1\" }} />\n </div>\n {/* URL bar */}\n <div\n style={{\n flex: 1,\n height: 22,\n backgroundColor: \"#11111B\",\n borderRadius: 6,\n display: \"flex\",\n alignItems: \"center\",\n padding: \"0 10px\",\n fontSize: 11,\n color: \"#6C7086\",\n fontFamily: \"system-ui, -apple-system, sans-serif\",\n }}\n >\n localhost:3847\n </div>\n </div>\n\n {/* Content area */}\n <div style={{ flex: 1, overflow: \"hidden\" }}>\n {children}\n </div>\n </div>\n );\n};\n\nexport const DeviceFrame: React.FC<DeviceFrameProps> = ({\n platform,\n children,\n width,\n height,\n}) => {\n if (platform === \"ios\") {\n const h = height || 820;\n const w = width || h * IOS_ASPECT;\n return <IosFrame width={w} height={h}>{children}</IosFrame>;\n }\n\n if (platform === \"android\") {\n const h = height || 820;\n const w = width || h * ANDROID_ASPECT;\n return <AndroidFrame width={w} height={h}>{children}</AndroidFrame>;\n }\n\n // Web\n const w = width || 900;\n const h = height || 580;\n return <WebFrame width={w} height={h}>{children}</WebFrame>;\n};\n","import React from \"react\";\nimport {\n AbsoluteFill,\n interpolate,\n spring,\n useCurrentFrame,\n useVideoConfig,\n} from \"remotion\";\nimport { Video } from \"@remotion/media\";\nimport { TerminalWindowDark } from \"../shared/TerminalWindowDark\";\nimport { CodeTypewriter } from \"../shared/CodeTypewriter\";\nimport { FadeInText } from \"../shared/FadeInText\";\nimport { DeviceFrame } from \"../shared/DeviceFrame\";\nimport { SPRING_CONFIGS } from \"../utils/springs\";\nimport type { TemplateProps } from \"../Root\";\n\n/**\n * Studio Template (based on tuka's CreativeV4CodeTerminal)\n *\n * Layout:\n * WITH network data: Terminal (left ~42%) + Device mockup (right ~58%)\n * WITHOUT network data + title: Title centered above device\n * WITHOUT network data + no title: Device centered only\n * Timeline (10s = 300 frames):\n * 0-25: Device enters from right with bouncy spring\n * 25-60: Terminal appears from left with scale morph (if tabs)\n * 60+: CodeTypewriter animates terminal content\n */\nexport const Studio: React.FC<TemplateProps> = ({\n videoUrl,\n platform,\n title,\n terminalTabs,\n}) => {\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n const hasTabs = terminalTabs && terminalTabs.length > 0;\n const hasTitle = title && title.trim().length > 0;\n\n // --- Device entrance from right ---\n const deviceProgress = spring({\n frame,\n fps,\n delay: 0,\n config: SPRING_CONFIGS.bouncy,\n });\n const deviceScale = interpolate(deviceProgress, [0, 1], [0.8, 1]);\n const deviceOpacity = interpolate(deviceProgress, [0, 1], [0, 1]);\n const deviceSlideX = interpolate(deviceProgress, [0, 1], [150, 0]);\n\n // --- Terminal entrance with morph/scale ---\n const terminalProgress = spring({\n frame,\n fps,\n delay: 25,\n config: SPRING_CONFIGS.smooth,\n });\n const terminalOpacity = interpolate(terminalProgress, [0, 1], [0, 1]);\n const terminalScale = interpolate(terminalProgress, [0, 1], [0.7, 1]);\n const terminalSlideX = interpolate(terminalProgress, [0, 1], [-80, 0]);\n\n // --- Title text ---\n const titleProgress = spring({\n frame,\n fps,\n delay: 30,\n config: SPRING_CONFIGS.smooth,\n });\n const titleOpacity = interpolate(titleProgress, [0, 1], [0, 1]);\n\n // Tab cycling: ~3.3s per tab at 30fps\n const tabDurationFrames = 100;\n\n return (\n <AbsoluteFill\n style={{\n backgroundColor: \"#0F0F1A\",\n display: \"flex\",\n flexDirection: \"row\",\n alignItems: \"center\",\n justifyContent: \"center\",\n padding: \"60px 80px\",\n gap: 60,\n }}\n >\n {/* Left side: Terminal + text (only if we have tabs) */}\n {hasTabs && (\n <div\n style={{\n flex: 1,\n display: \"flex\",\n flexDirection: \"column\",\n gap: 24,\n opacity: terminalOpacity,\n transform: `translateX(${terminalSlideX}px) scale(${terminalScale})`,\n }}\n >\n {/* Title (only if not empty) */}\n {hasTitle && (\n <div style={{ opacity: titleOpacity }}>\n <FadeInText\n delay={35}\n fontSize={28}\n fontWeight={700}\n color=\"#FFFFFF\"\n style={{ lineHeight: 1.2 }}\n >\n {title}\n </FadeInText>\n </div>\n )}\n\n {/* Tabbed terminal */}\n <div style={{ height: hasTitle ? 500 : 560 }}>\n <TerminalWindowDark\n title=\"network-trace.json\"\n tabs={terminalTabs}\n tabDurationFrames={tabDurationFrames}\n renderTab={(activeIndex) => (\n <CodeTypewriter\n code={terminalTabs[activeIndex]?.content || \"\"}\n charFrames={2}\n delay={60}\n fontSize={14}\n />\n )}\n />\n </div>\n </div>\n )}\n\n {/* No tabs + title: Title text centered above device */}\n {!hasTabs && hasTitle && (\n <div\n style={{\n position: \"absolute\",\n top: 60,\n left: 0,\n right: 0,\n textAlign: \"center\",\n opacity: titleOpacity,\n }}\n >\n <FadeInText\n delay={35}\n fontSize={32}\n fontWeight={700}\n color=\"#FFFFFF\"\n style={{ lineHeight: 1.2 }}\n >\n {title}\n </FadeInText>\n </div>\n )}\n\n {/* Device mockup */}\n <div\n style={{\n flex: hasTabs ? \"0 0 auto\" : undefined,\n display: \"flex\",\n alignItems: \"center\",\n justifyContent: \"center\",\n opacity: deviceOpacity,\n transform: `scale(${deviceScale}) translateX(${deviceSlideX}px)`,\n }}\n >\n <DeviceFrame platform={platform}>\n <Video\n src={videoUrl}\n muted\n loop\n style={{\n width: \"100%\",\n height: \"100%\",\n objectFit: \"cover\",\n }}\n />\n </DeviceFrame>\n </div>\n </AbsoluteFill>\n );\n};\n","export const COLORS = {\n bg: \"#0F0F1A\",\n bgShowcase: \"#FFFFFF\",\n text: \"#FFFFFF\",\n textDark: \"#000000\",\n accent: \"#3B82F6\",\n accentLight: \"#60A5FA\",\n accentDark: \"#2563EB\",\n gray: \"#6B7280\",\n grayLight: \"#F3F4F6\",\n} as const;\n","import React from \"react\";\nimport {\n interpolate,\n spring,\n useCurrentFrame,\n useVideoConfig,\n Easing,\n} from \"remotion\";\nimport { COLORS } from \"../utils/colors\";\n\ntype CursorPoint = {\n x: number;\n y: number;\n atFrame: number;\n click?: boolean;\n};\n\ntype AnimatedCursorProps = {\n path: CursorPoint[];\n size?: number;\n};\n\nexport const AnimatedCursor: React.FC<AnimatedCursorProps> = ({\n path,\n size = 48,\n}) => {\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n\n if (path.length === 0) return null;\n\n let x = path[0].x;\n let y = path[0].y;\n let isClicking = false;\n let clickFrame = -999;\n\n for (let i = 0; i < path.length; i++) {\n const point = path[i];\n const nextPoint = path[i + 1];\n\n if (!nextPoint) {\n if (frame >= point.atFrame) {\n x = point.x;\n y = point.y;\n if (point.click && frame >= point.atFrame && frame < point.atFrame + 15) {\n isClicking = true;\n clickFrame = point.atFrame;\n }\n }\n break;\n }\n\n if (frame >= point.atFrame && frame < nextPoint.atFrame) {\n const segmentProgress = interpolate(\n frame,\n [point.atFrame, nextPoint.atFrame],\n [0, 1],\n {\n extrapolateLeft: \"clamp\",\n extrapolateRight: \"clamp\",\n easing: Easing.inOut(Easing.quad),\n }\n );\n\n x = interpolate(segmentProgress, [0, 1], [point.x, nextPoint.x]);\n y = interpolate(segmentProgress, [0, 1], [point.y, nextPoint.y]);\n\n if (point.click && frame >= point.atFrame && frame < point.atFrame + 15) {\n isClicking = true;\n clickFrame = point.atFrame;\n }\n break;\n }\n\n if (frame >= nextPoint.atFrame) {\n x = nextPoint.x;\n y = nextPoint.y;\n if (nextPoint.click && frame >= nextPoint.atFrame && frame < nextPoint.atFrame + 15) {\n isClicking = true;\n clickFrame = nextPoint.atFrame;\n }\n }\n }\n\n const entranceOpacity = interpolate(frame, [path[0].atFrame - 5, path[0].atFrame], [0, 1], {\n extrapolateLeft: \"clamp\",\n extrapolateRight: \"clamp\",\n });\n\n const rippleProgress = isClicking\n ? spring({\n frame,\n fps,\n delay: clickFrame,\n config: { damping: 15, stiffness: 150 },\n durationInFrames: 15,\n })\n : 0;\n\n const rippleScale = interpolate(rippleProgress, [0, 1], [0.3, 2.5]);\n const rippleOpacity = interpolate(rippleProgress, [0, 0.3, 1], [0, 0.6, 0]);\n\n const clickScale = isClicking\n ? interpolate(\n frame - clickFrame,\n [0, 3, 6, 10],\n [1, 0.8, 1.05, 1],\n { extrapolateRight: \"clamp\" }\n )\n : 1;\n\n // SVG cursor (no external image needed)\n return (\n <div\n style={{\n position: \"absolute\",\n inset: 0,\n pointerEvents: \"none\",\n zIndex: 100,\n }}\n >\n {isClicking && (\n <div\n style={{\n position: \"absolute\",\n left: x - 25,\n top: y - 25,\n width: 50,\n height: 50,\n borderRadius: \"50%\",\n border: `3px solid ${COLORS.accent}`,\n transform: `scale(${rippleScale})`,\n opacity: rippleOpacity,\n }}\n />\n )}\n\n <svg\n width={size}\n height={size}\n viewBox=\"0 0 24 24\"\n style={{\n position: \"absolute\",\n left: x,\n top: y,\n transform: `scale(${clickScale})`,\n transformOrigin: \"2px 2px\",\n opacity: entranceOpacity,\n filter: \"drop-shadow(2px 4px 6px rgba(0,0,0,0.4))\",\n }}\n >\n <path\n d=\"M5.5 3.21V20.8c0 .45.54.67.85.35l4.86-4.86a.5.5 0 0 1 .35-.15h6.87c.45 0 .67-.54.35-.85L5.85 2.36a.5.5 0 0 0-.35.85z\"\n fill=\"#FFFFFF\"\n stroke=\"#000000\"\n strokeWidth=\"1.5\"\n />\n </svg>\n </div>\n );\n};\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/Neonderthaw.ts\nvar getInfo = () => ({\n fontFamily: \"Neonderthaw\",\n importName: \"Neonderthaw\",\n version: \"v8\",\n url: \"https://fonts.googleapis.com/css2?family=Neonderthaw:ital,wght@0,400\",\n unicodeRanges: {\n vietnamese: \"U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB\",\n \"latin-ext\": \"U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF\",\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n normal: {\n \"400\": {\n vietnamese: \"https://fonts.gstatic.com/s/neonderthaw/v8/Iure6Yx5-oWVZI0r-17AeaZBrLRw4Q.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/neonderthaw/v8/Iure6Yx5-oWVZI0r-17AeaZArLRw4Q.woff2\",\n latin: \"https://fonts.gstatic.com/s/neonderthaw/v8/Iure6Yx5-oWVZI0r-17AeaZOrLQ.woff2\"\n }\n }\n },\n subsets: [\"latin\", \"latin-ext\", \"vietnamese\"]\n});\nvar fontFamily = \"Neonderthaw\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/BowlbyOneSC.ts\nvar getInfo = () => ({\n fontFamily: \"Bowlby One SC\",\n importName: \"BowlbyOneSC\",\n version: \"v27\",\n url: \"https://fonts.googleapis.com/css2?family=Bowlby+One+SC:ital,wght@0,400\",\n unicodeRanges: {\n \"latin-ext\": \"U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF\",\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n normal: {\n \"400\": {\n \"latin-ext\": \"https://fonts.gstatic.com/s/bowlbyonesc/v27/DtVlJxerQqQm37tzN3wMug9P_gH2ojhN.woff2\",\n latin: \"https://fonts.gstatic.com/s/bowlbyonesc/v27/DtVlJxerQqQm37tzN3wMug9P_g_2og.woff2\"\n }\n }\n },\n subsets: [\"latin\", \"latin-ext\"]\n});\nvar fontFamily = \"Bowlby One SC\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/BricolageGrotesque.ts\nvar getInfo = () => ({\n fontFamily: \"Bricolage Grotesque\",\n importName: \"BricolageGrotesque\",\n version: \"v9\",\n url: \"https://fonts.googleapis.com/css2?family=Bricolage+Grotesque:ital,wght@0,200;0,300;0,400;0,500;0,600;0,700;0,800\",\n unicodeRanges: {\n vietnamese: \"U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB\",\n \"latin-ext\": \"U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF\",\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n normal: {\n \"200\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"300\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"400\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"500\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"600\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"700\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n },\n \"800\": {\n vietnamese: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDFXplDs.woff2\",\n \"latin-ext\": \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawDVXplDs.woff2\",\n latin: \"https://fonts.gstatic.com/s/bricolagegrotesque/v9/3y9H6as8bTXq_nANBjzKo3IeZx8z6up5BeSl5jBNz_19PpbpMXuECpwUxJBOm_OJWiawA1Xp.woff2\"\n }\n }\n },\n subsets: [\"latin\", \"latin-ext\", \"vietnamese\"]\n});\nvar fontFamily = \"Bricolage Grotesque\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","// src/base.ts\nimport { continueRender, delayRender } from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nvar loadedFonts = {};\nvar withResolvers = function() {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve, reject };\n};\nvar loadFontFaceOrTimeoutAfter20Seconds = (fontFace) => {\n const timeout = withResolvers();\n const int = setTimeout(() => {\n timeout.reject(new Error(\"Timed out loading Google Font\"));\n }, 18000);\n return Promise.race([\n fontFace.load().then(() => {\n clearTimeout(int);\n }),\n timeout.promise\n ]);\n};\nvar loadFonts = (meta, style, options) => {\n const weightsAndSubsetsAreSpecified = Array.isArray(options?.weights) && Array.isArray(options?.subsets) && options.weights.length > 0 && options.subsets.length > 0;\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES && !weightsAndSubsetsAreSpecified) {\n throw new Error(\"Loading Google Fonts without specifying weights and subsets is not supported in Remotion v5. Please specify the weights and subsets you need.\");\n }\n const promises = [];\n const styles = style ? [style] : Object.keys(meta.fonts);\n let fontsLoaded = 0;\n for (const style2 of styles) {\n if (typeof FontFace === \"undefined\") {\n continue;\n }\n if (!meta.fonts[style2]) {\n throw new Error(`The font ${meta.fontFamily} does not have a style ${style2}`);\n }\n const weights = options?.weights ?? Object.keys(meta.fonts[style2]);\n for (const weight of weights) {\n if (!meta.fonts[style2][weight]) {\n throw new Error(`The font ${meta.fontFamily} does not have a weight ${weight} in style ${style2}`);\n }\n const subsets = options?.subsets ?? Object.keys(meta.fonts[style2][weight]);\n for (const subset of subsets) {\n let font = meta.fonts[style2]?.[weight]?.[subset];\n if (!font) {\n throw new Error(`weight: ${weight} subset: ${subset} is not available for '${meta.fontFamily}'`);\n }\n let fontKey = `${meta.fontFamily}-${style2}-${weight}-${subset}`;\n const previousPromise = loadedFonts[fontKey];\n if (previousPromise) {\n promises.push(previousPromise);\n continue;\n }\n const baseLabel = `Fetching ${meta.fontFamily} font ${JSON.stringify({\n style: style2,\n weight,\n subset\n })}`;\n const label = weightsAndSubsetsAreSpecified ? baseLabel : `${baseLabel}. This might be caused by loading too many font variations. Read more: https://www.remotion.dev/docs/troubleshooting/font-loading-errors#render-timeout-when-loading-google-fonts`;\n const handle = delayRender(label, { timeoutInMilliseconds: 60000 });\n fontsLoaded++;\n const fontFace = new FontFace(meta.fontFamily, `url(${font}) format('woff2')`, {\n weight,\n style: style2,\n unicodeRange: meta.unicodeRanges[subset]\n });\n let attempts = 2;\n const tryToLoad = () => {\n if (fontFace.status === \"loaded\") {\n continueRender(handle);\n return;\n }\n const promise = loadFontFaceOrTimeoutAfter20Seconds(fontFace).then(() => {\n (options?.document ?? document).fonts.add(fontFace);\n continueRender(handle);\n }).catch((err) => {\n loadedFonts[fontKey] = undefined;\n if (attempts === 0) {\n throw err;\n } else {\n attempts--;\n tryToLoad();\n }\n });\n loadedFonts[fontKey] = promise;\n promises.push(promise);\n };\n tryToLoad();\n }\n }\n if (fontsLoaded > 20) {\n console.warn(`Made ${fontsLoaded} network requests to load fonts for ${meta.fontFamily}. Consider loading fewer weights and subsets by passing options to loadFont(). Disable this warning by passing \"ignoreTooManyRequestsWarning: true\" to \"options\".`);\n }\n }\n return {\n fontFamily: meta.fontFamily,\n fonts: meta.fonts,\n unicodeRanges: meta.unicodeRanges,\n waitUntilDone: () => Promise.all(promises).then(() => {\n return;\n })\n };\n};\n\n// src/MiltonianTattoo.ts\nvar getInfo = () => ({\n fontFamily: \"Miltonian Tattoo\",\n importName: \"MiltonianTattoo\",\n version: \"v34\",\n url: \"https://fonts.googleapis.com/css2?family=Miltonian+Tattoo:ital,wght@0,400\",\n unicodeRanges: {\n latin: \"U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD\"\n },\n fonts: {\n normal: {\n \"400\": {\n latin: \"https://fonts.gstatic.com/s/miltoniantattoo/v34/EvOUzBRL0o0kCxF-lcMCQxlpVsA_JwT2MQ.woff2\"\n }\n }\n },\n subsets: [\"latin\"]\n});\nvar fontFamily = \"Miltonian Tattoo\";\nvar loadFont = (style, options) => {\n return loadFonts(getInfo(), style, options);\n};\nexport {\n loadFont,\n getInfo,\n fontFamily\n};\n","import React from \"react\";\nimport {\n AbsoluteFill,\n interpolate,\n spring,\n useCurrentFrame,\n useVideoConfig,\n Easing,\n} from \"remotion\";\nimport { Video } from \"@remotion/media\";\nimport { TerminalWindowDark } from \"../shared/TerminalWindowDark\";\nimport { CodeTypewriter } from \"../shared/CodeTypewriter\";\nimport { FadeInText } from \"../shared/FadeInText\";\nimport { DeviceFrame } from \"../shared/DeviceFrame\";\nimport { AnimatedCursor } from \"../shared/AnimatedCursor\";\nimport { SPRING_CONFIGS } from \"../utils/springs\";\nimport { COLORS } from \"../utils/colors\";\nimport { loadFont as loadNeonderthaw } from \"@remotion/google-fonts/Neonderthaw\";\nimport { loadFont as loadBowlbyOneSC } from \"@remotion/google-fonts/BowlbyOneSC\";\nimport { loadFont as loadBricolage } from \"@remotion/google-fonts/BricolageGrotesque\";\nimport { loadFont as loadMiltonianTattoo } from \"@remotion/google-fonts/MiltonianTattoo\";\nimport type { TemplateProps } from \"../Root\";\n\nconst { fontFamily: neonderthaw } = loadNeonderthaw();\nconst { fontFamily: bowlbyOneSC } = loadBowlbyOneSC();\nconst { fontFamily: bricolage } = loadBricolage();\nconst { fontFamily: miltonianTattoo } = loadMiltonianTattoo();\n\n// Font families for staggered title lines (alternating styles like V5)\nconst TITLE_FONTS = [neonderthaw, bowlbyOneSC, bricolage, miltonianTattoo];\nconst TITLE_SIZES = [120, 96, 88, 80];\n\n/**\n * Showcase Template (based on tuka's CreativeV5ArtisticFloat)\n *\n * Two modes:\n * 'artistic': Staggered fonts, no terminal — default when no tabs\n * 'terminal': Plain title (FadeInText) + tabbed terminal — default when tabs exist\n *\n * Layout: Title (top-left) + Terminal if terminal mode (bottom-left) + Floating device (right)\n * Timeline (12s = 360 frames):\n * 0-30: Device enters center with bouncy spring\n * 30-60: Device zoom in (1x → 1.3x)\n * 60-90: Cursor appears, moves to device, clicks\n * 90-120: Device bounces back (spring)\n * 120-160: Device slides to right side\n * 140+: Text appears (artistic staggered or plain FadeInText)\n * 160+: Terminal appears below text (if terminal mode), gentle floating on device\n */\nexport const Showcase: React.FC<TemplateProps> = ({\n videoUrl,\n platform,\n title,\n titleLines,\n subtitle,\n terminalTabs,\n showcaseMode,\n}) => {\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n const hasTabs = terminalTabs && terminalTabs.length > 0;\n\n // Determine effective mode\n const effectiveMode = showcaseMode || (hasTabs ? \"terminal\" : \"artistic\");\n\n // Device dimensions\n const isWeb = platform === \"web\";\n const phoneHeight = isWeb ? 640 : 820;\n const phoneWidth = isWeb ? 900 : phoneHeight * (1043 / 2063);\n\n // --- Device entrance ---\n const deviceEntrance = spring({\n frame,\n fps,\n delay: 0,\n config: SPRING_CONFIGS.bouncy,\n });\n const deviceEntranceScale = interpolate(deviceEntrance, [0, 1], [0.8, 1]);\n const deviceEntranceOpacity = interpolate(deviceEntrance, [0, 1], [0, 1]);\n\n // --- Zoom in (30-60) ---\n const zoomProgress = interpolate(frame, [30, 60], [0, 1], {\n extrapolateLeft: \"clamp\",\n extrapolateRight: \"clamp\",\n easing: Easing.inOut(Easing.cubic),\n });\n const zoomScale = interpolate(zoomProgress, [0, 1], [1, 1.3]);\n\n // --- Bounce back (90-120) ---\n const bounceProgress = spring({\n frame,\n fps,\n delay: 90,\n config: SPRING_CONFIGS.bouncy,\n });\n const bounceScale = frame >= 90\n ? interpolate(bounceProgress, [0, 1], [1.3, 1])\n : 1;\n\n // --- Slide to right (120-160) ---\n const slideProgress = interpolate(frame, [120, 160], [0, 1], {\n extrapolateLeft: \"clamp\",\n extrapolateRight: \"clamp\",\n easing: Easing.inOut(Easing.cubic),\n });\n\n const deviceCenterX = 960 - phoneWidth / 2;\n const deviceRightX = 1920 - phoneWidth - 120;\n const deviceX = interpolate(slideProgress, [0, 1], [deviceCenterX, deviceRightX]);\n\n // Device final scale\n let deviceFinalScale = deviceEntranceScale;\n if (frame >= 30 && frame < 90) {\n deviceFinalScale = zoomScale;\n } else if (frame >= 90) {\n deviceFinalScale = bounceScale;\n }\n\n // Gentle floating after settled\n const floatY = frame > 160 ? Math.sin((frame - 160) * 0.04) * 4 : 0;\n\n // --- Text appear frame ---\n const textAppearFrame = 140;\n const showText = frame >= textAppearFrame;\n\n // --- Terminal entrance ---\n const terminalAppearFrame = 170;\n const terminalProgress = spring({\n frame,\n fps,\n delay: terminalAppearFrame,\n config: SPRING_CONFIGS.smooth,\n });\n const terminalOpacity = interpolate(terminalProgress, [0, 1], [0, 1]);\n const terminalSlideY = interpolate(terminalProgress, [0, 1], [30, 0]);\n\n // Cursor visible during zoom phase\n const showCursor = frame >= 55 && frame < 95;\n\n // Dynamic tab duration based on content length\n const calculateTabDuration = () => {\n if (!hasTabs) return 100;\n const minDuration = 90; // 3s minimum\n const charFrames = 2;\n const maxContentLength = Math.max(...terminalTabs.map((t) => t.content.length));\n const framesNeeded = maxContentLength * charFrames + 30; // +30 for fade transitions\n // Cap so all tabs fit in composition (360 frames - terminalAppearFrame - 20 delay)\n const availableFrames = 360 - terminalAppearFrame - 20;\n const maxPerTab = Math.floor(availableFrames / terminalTabs.length);\n return Math.min(Math.max(minDuration, framesNeeded), maxPerTab);\n };\n const tabDurationFrames = calculateTabDuration();\n\n // Dynamic terminal height based on content\n const calculateTerminalHeight = () => {\n if (!hasTabs) return 280;\n const codeFontSize = 13;\n const lineHeight = codeFontSize * 1.7; // matches CodeTypewriter lineHeight\n const maxLines = Math.max(...terminalTabs.map((t) => t.content.split(\"\\n\").length));\n const contentPadding = 24 * 2; // top + bottom padding in TerminalWindowDark\n const titleBarHeight = 40;\n const tabBarHeight = terminalTabs.length > 1 ? 30 : 0;\n const computed = titleBarHeight + tabBarHeight + contentPadding + maxLines * lineHeight;\n // Clamp: min 200, max ~600 to leave room for title + padding\n return Math.round(Math.min(Math.max(computed, 200), 600));\n };\n const terminalHeight = calculateTerminalHeight();\n\n // Platform label\n const platformLabel =\n platform === \"ios\"\n ? \"iOS App\"\n : platform === \"android\"\n ? \"Android App\"\n : \"Web App\";\n\n // --- Title text config (shared between modes) ---\n const titleProgress = spring({\n frame,\n fps,\n delay: textAppearFrame,\n config: SPRING_CONFIGS.smooth,\n });\n const titleOpacity = interpolate(titleProgress, [0, 1], [0, 1]);\n\n // Build display lines for artistic mode\n const lineDelay = 8;\n const displayLines: string[] = titleLines && titleLines.length > 0\n ? titleLines\n : (() => {\n const words = title.split(\" \").slice(0, 8);\n const lines: string[] = [];\n for (let i = 0; i < words.length; i += 2) {\n lines.push(words.slice(i, i + 2).join(\" \"));\n }\n return lines;\n })();\n\n const makeLineProgress = (index: number) =>\n spring({\n frame,\n fps,\n delay: textAppearFrame + index * lineDelay,\n config: { damping: 18, stiffness: 160, mass: 0.6 },\n });\n\n return (\n <AbsoluteFill style={{ backgroundColor: \"#FFFFFF\" }}>\n {/* Left side content */}\n {showText && (\n <div\n style={{\n position: \"absolute\",\n top: 0,\n left: 0,\n width: deviceRightX - 40,\n height: \"100%\",\n display: \"flex\",\n flexDirection: \"column\",\n alignItems: \"flex-start\",\n justifyContent: \"flex-start\",\n gap: 6,\n paddingLeft: 100,\n paddingTop: 100,\n }}\n >\n {effectiveMode === \"artistic\" ? (\n /* Artistic mode: staggered lines with different fonts */\n <div\n style={{\n display: \"flex\",\n flexDirection: \"column\",\n gap: 4,\n maxWidth: 600,\n }}\n >\n {displayLines.map((line, idx) => {\n const progress = makeLineProgress(idx);\n const opacity = interpolate(progress, [0, 1], [0, 1]);\n const translateY = interpolate(progress, [0, 1], [40, 0]);\n\n const fontIdx = idx % TITLE_FONTS.length;\n const isStroke = idx === 0;\n\n return (\n <span\n key={idx}\n style={{\n fontFamily: TITLE_FONTS[fontIdx],\n fontSize: TITLE_SIZES[fontIdx],\n fontWeight:\n fontIdx === 0 || fontIdx === 3\n ? 400\n : fontIdx === 2\n ? 600\n : 400,\n color: isStroke ? \"transparent\" : \"#000000\",\n WebkitTextStroke: isStroke ? \"2.5px #000000\" : undefined,\n opacity,\n transform: `translateY(${translateY}px)`,\n display: \"block\",\n lineHeight: 1.1,\n }}\n >\n {line}\n </span>\n );\n })}\n </div>\n ) : (\n /* Terminal mode: plain title like Studio */\n <div style={{ opacity: titleOpacity, maxWidth: 580 }}>\n <FadeInText\n delay={textAppearFrame}\n fontSize={14}\n fontWeight={700}\n color={COLORS.accent}\n style={{ textTransform: \"uppercase\", letterSpacing: 3 }}\n >\n {platformLabel}\n </FadeInText>\n <FadeInText\n delay={textAppearFrame + 5}\n fontSize={38}\n fontWeight={800}\n color=\"#000000\"\n style={{ marginTop: 8, lineHeight: 1.2 }}\n >\n {title}\n </FadeInText>\n {subtitle && (\n <FadeInText\n delay={textAppearFrame + 10}\n fontSize={16}\n fontWeight={400}\n color=\"#6B7280\"\n style={{ marginTop: 6 }}\n >\n {subtitle}\n </FadeInText>\n )}\n </div>\n )}\n\n {/* Terminal below title (only in terminal mode with tabs) */}\n {effectiveMode === \"terminal\" && hasTabs && (\n <div\n style={{\n marginTop: 30,\n width: \"100%\",\n maxWidth: 780,\n height: terminalHeight,\n opacity: terminalOpacity,\n transform: `translateY(${terminalSlideY}px)`,\n }}\n >\n <TerminalWindowDark\n title=\"network-trace.json\"\n tabs={terminalTabs}\n tabDurationFrames={tabDurationFrames}\n renderTab={(activeIndex) => (\n <CodeTypewriter\n code={terminalTabs[activeIndex]?.content || \"\"}\n charFrames={2}\n delay={terminalAppearFrame + 20}\n fontSize={13}\n />\n )}\n />\n </div>\n )}\n </div>\n )}\n\n {/* Device */}\n <div\n style={{\n position: \"absolute\",\n left: deviceX,\n top: 540 - phoneHeight / 2 + floatY,\n width: phoneWidth,\n height: phoneHeight,\n opacity: deviceEntranceOpacity,\n transform: `scale(${deviceFinalScale})`,\n transformOrigin: \"center center\",\n }}\n >\n <DeviceFrame platform={platform} width={phoneWidth} height={phoneHeight}>\n <Video\n src={videoUrl}\n muted\n loop\n style={{\n width: \"100%\",\n height: \"100%\",\n objectFit: \"cover\",\n }}\n />\n </DeviceFrame>\n </div>\n\n {/* Animated cursor */}\n {showCursor && (\n <AnimatedCursor\n size={48}\n path={[\n { x: 1400, y: 200, atFrame: 55 },\n { x: 960, y: 540, atFrame: 75, click: true },\n { x: 980, y: 560, atFrame: 90 },\n ]}\n />\n )}\n </AbsoluteFill>\n );\n};\n","import React from \"react\";\nimport { Composition, Folder } from \"remotion\";\nimport { Studio } from \"./Studio\";\nimport { Showcase } from \"./Showcase\";\n\nexport interface TerminalTab {\n label: string;\n method: string;\n route: string;\n content: string;\n}\n\nexport interface TemplateProps {\n videoUrl: string;\n videoDuration: number;\n platform: \"ios\" | \"android\" | \"web\";\n title: string;\n titleLines?: string[];\n subtitle?: string;\n terminalTabs: TerminalTab[];\n hasNetworkData: boolean;\n /** Showcase display mode: 'artistic' = staggered fonts, no terminal; 'terminal' = plain title + terminal */\n showcaseMode?: \"artistic\" | \"terminal\";\n}\n\nconst FPS = 30;\n\nconst sampleTabs: TerminalTab[] = [\n {\n label: \"GET /api/users\",\n method: \"GET\",\n route: \"/api/users\",\n content: JSON.stringify(\n [\n { id: 1, name: \"Alice Chen\", email: \"alice@example.com\", role: \"admin\" },\n { id: 2, name: \"Bob Park\", email: \"bob@example.com\", role: \"user\" },\n ],\n null,\n 2\n ),\n },\n {\n label: \"POST /api/auth/login\",\n method: \"POST\",\n route: \"/api/auth/login\",\n content: JSON.stringify(\n { token: \"eyJhbGciOiJIUzI1NiIs...\", expiresIn: 3600, user: { id: 1, name: \"Alice\" } },\n null,\n 2\n ),\n },\n {\n label: \"GET /api/products\",\n method: \"GET\",\n route: \"/api/products\",\n content: JSON.stringify(\n [\n { id: \"p1\", name: \"Pro Plan\", price: 29.99, currency: \"USD\" },\n { id: \"p2\", name: \"Team Plan\", price: 79.99, currency: \"USD\" },\n ],\n null,\n 2\n ),\n },\n {\n label: \"PUT /api/settings\",\n method: \"PUT\",\n route: \"/api/settings\",\n content: JSON.stringify(\n { theme: \"dark\", notifications: true, language: \"en\", timezone: \"UTC-3\" },\n null,\n 2\n ),\n },\n {\n label: \"GET /api/analytics\",\n method: \"GET\",\n route: \"/api/analytics\",\n content: JSON.stringify(\n { dailyUsers: 1243, retention: \"87%\", avgSession: \"4m 32s\" },\n null,\n 2\n ),\n },\n];\n\nconst networkProps: TemplateProps = {\n videoUrl:\n \"https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4\",\n videoDuration: 10,\n platform: \"ios\",\n title: \"Network requests captured during recording session\",\n subtitle: \"AppLab Discovery\",\n terminalTabs: sampleTabs,\n hasNetworkData: true,\n};\n\nconst noNetworkProps: TemplateProps = {\n ...networkProps,\n title: \"Present your ideas with beautiful videos\",\n titleLines: [\"Present your\", \"ideas with\", \"beautiful\", \"videos\"],\n terminalTabs: [],\n hasNetworkData: false,\n};\n\nexport const Root: React.FC = () => {\n return (\n <>\n <Folder name=\"Templates\">\n <Composition\n id=\"studio\"\n component={Studio}\n durationInFrames={300}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={networkProps}\n />\n <Composition\n id=\"showcase\"\n component={Showcase}\n durationInFrames={360}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={{ ...noNetworkProps, showcaseMode: \"artistic\" }}\n />\n </Folder>\n\n {/* Variants for preview */}\n <Folder name=\"Variants\">\n <Composition\n id=\"studio-no-network\"\n component={Studio}\n durationInFrames={300}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={noNetworkProps}\n />\n <Composition\n id=\"studio-android\"\n component={Studio}\n durationInFrames={300}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={{ ...networkProps, platform: \"android\" }}\n />\n <Composition\n id=\"studio-web\"\n component={Studio}\n durationInFrames={300}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={{ ...networkProps, platform: \"web\" }}\n />\n <Composition\n id=\"showcase-terminal\"\n component={Showcase}\n durationInFrames={360}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={{ ...networkProps, showcaseMode: \"terminal\" }}\n />\n <Composition\n id=\"showcase-network\"\n component={Showcase}\n durationInFrames={360}\n fps={FPS}\n width={1920}\n height={1080}\n defaultProps={networkProps}\n />\n </Folder>\n </>\n );\n};\n","import { registerRoot } from \"remotion\";\nimport { Root } from \"./Root\";\n\nregisterRoot(Root);\n","\"use strict\";\n// https://github.com/remotion-dev/remotion/issues/3412#issuecomment-1910120552\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.injectCSS = void 0;\nfunction getEnvVar() {\n const parts = ['proc', 'ess', '.', 'en', 'v', '.', 'NOD', 'E_EN', 'V'];\n return parts.join('');\n}\nconst getEnvVariables = () => {\n if (window.remotion_isStudio) {\n // For the Studio, we already set the environment variables in index-html.ts.\n // We just add NODE_ENV here.\n if (!process.env.NODE_ENV) {\n throw new Error(`${getEnvVar()} is not set`);\n }\n return {\n NODE_ENV: process.env.NODE_ENV,\n };\n }\n const param = window.remotion_envVariables;\n if (!param) {\n return {};\n }\n return { ...JSON.parse(param), NODE_ENV: process.env.NODE_ENV };\n};\nconst setupEnvVariables = () => {\n const env = getEnvVariables();\n if (!window.process) {\n window.process = {};\n }\n if (!window.process.env) {\n window.process.env = {};\n }\n Object.keys(env).forEach((key) => {\n window.process.env[key] = env[key];\n });\n};\nsetupEnvVariables();\nconst injected = {};\nconst injectCSS = (css) => {\n // Skip in node\n if (typeof document === 'undefined') {\n return;\n }\n if (injected[css]) {\n return;\n }\n const head = document.head || document.getElementsByTagName('head')[0];\n const style = document.createElement('style');\n style.appendChild(document.createTextNode(css));\n head.prepend(style);\n injected[css] = true;\n};\nexports.injectCSS = injectCSS;\n(0, exports.injectCSS)(`\n .css-reset, .css-reset * {\n font-size: 16px;\n line-height: 1.5;\n color: white;\n font-family: Arial, Helvetica, sans-serif;\n background: transparent;\n box-sizing: border-box;\n }\n\n .algolia-docsearch-suggestion--highlight {\n font-size: 15px;\n line-height: 1.25;\n }\n\n .__remotion-info-button-container code {\n font-family: monospace;\n font-size: 14px;\n color: #0584f2\n }\n\n .__remotion-vertical-scrollbar::-webkit-scrollbar {\n width: 6px;\n }\n .__remotion-vertical-scrollbar::-webkit-scrollbar-thumb {\n background-color: rgba(0, 0, 0, 0.0);\n }\n .__remotion-vertical-scrollbar:hover::-webkit-scrollbar-thumb {\n background-color: rgba(0, 0, 0, 0.6);\n }\n .__remotion-vertical-scrollbar:hover::-webkit-scrollbar-thumb:hover {\n background-color: rgba(0, 0, 0, 1);\n }\n\n\n .__remotion-horizontal-scrollbar::-webkit-scrollbar {\n height: 6px;\n }\n .__remotion-horizontal-scrollbar::-webkit-scrollbar-thumb {\n background-color: rgba(0, 0, 0, 0.0);\n }\n .__remotion-horizontal-scrollbar:hover::-webkit-scrollbar-thumb {\n background-color: rgba(0, 0, 0, 0.6);\n }\n .__remotion-horizontal-scrollbar:hover::-webkit-scrollbar-thumb:hover {\n background-color: rgba(0, 0, 0, 1);\n }\n\n\n @-moz-document url-prefix() {\n .__remotion-vertical-scrollbar {\n scrollbar-width: thin;\n scrollbar-color: rgba(0, 0, 0, 0.6) rgba(0, 0, 0, 0);\n }\n\n .__remotion-vertical-scrollbar:hover {\n scrollbar-color: rgba(0, 0, 0, 1) rgba(0, 0, 0, 0);\n }\n\n .__remotion-horizontal-scrollbar {\n scrollbar-width: thin;\n scrollbar-color: rgba(0, 0, 0, 0.6) rgba(0, 0, 0, 0);\n }\n\n .__remotion-horizontal-scrollbar:hover {\n scrollbar-width: thin;\n scrollbar-color: rgba(0, 0, 0, 1) rgba(0, 0, 0, 0);\n }\n }\n\n\n .__remotion-timeline-slider {\n appearance: none;\n width: 100px;\n border-radius: 3px;\n height: 6px;\n background-color: rgba(255, 255, 255, 0.1);\n accent-color: #ffffff;\n }\n \n .__remotion-timeline-slider::-moz-range-thumb {\n width: 14px;\n height: 14px;\n border-radius: 50%;\n background-color: #ffffff;\n appearance: none;\n }\n`);\n","import * as React from 'react';\n\nif (typeof globalThis === 'undefined') {\n\twindow.React = React;\n} else {\n\tglobalThis.React = React;\n}\n","/**\n * @license React\n * react-dom.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n/*\n Modernizr 3.0.0pre (Custom Build) | MIT\n*/\n'use strict';var aa=require(\"react\"),ca=require(\"scheduler\");function p(a){for(var b=\"https://reactjs.org/docs/error-decoder.html?invariant=\"+a,c=1;c<arguments.length;c++)b+=\"&args[]=\"+encodeURIComponent(arguments[c]);return\"Minified React error #\"+a+\"; visit \"+b+\" for the full message or use the non-minified dev environment for full errors and additional helpful warnings.\"}var da=new Set,ea={};function fa(a,b){ha(a,b);ha(a+\"Capture\",b)}\nfunction ha(a,b){ea[a]=b;for(a=0;a<b.length;a++)da.add(b[a])}\nvar ia=!(\"undefined\"===typeof window||\"undefined\"===typeof window.document||\"undefined\"===typeof window.document.createElement),ja=Object.prototype.hasOwnProperty,ka=/^[:A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD][:A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$/,la=\n{},ma={};function oa(a){if(ja.call(ma,a))return!0;if(ja.call(la,a))return!1;if(ka.test(a))return ma[a]=!0;la[a]=!0;return!1}function pa(a,b,c,d){if(null!==c&&0===c.type)return!1;switch(typeof b){case \"function\":case \"symbol\":return!0;case \"boolean\":if(d)return!1;if(null!==c)return!c.acceptsBooleans;a=a.toLowerCase().slice(0,5);return\"data-\"!==a&&\"aria-\"!==a;default:return!1}}\nfunction qa(a,b,c,d){if(null===b||\"undefined\"===typeof b||pa(a,b,c,d))return!0;if(d)return!1;if(null!==c)switch(c.type){case 3:return!b;case 4:return!1===b;case 5:return isNaN(b);case 6:return isNaN(b)||1>b}return!1}function v(a,b,c,d,e,f,g){this.acceptsBooleans=2===b||3===b||4===b;this.attributeName=d;this.attributeNamespace=e;this.mustUseProperty=c;this.propertyName=a;this.type=b;this.sanitizeURL=f;this.removeEmptyString=g}var z={};\n\"children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style\".split(\" \").forEach(function(a){z[a]=new v(a,0,!1,a,null,!1,!1)});[[\"acceptCharset\",\"accept-charset\"],[\"className\",\"class\"],[\"htmlFor\",\"for\"],[\"httpEquiv\",\"http-equiv\"]].forEach(function(a){var b=a[0];z[b]=new v(b,1,!1,a[1],null,!1,!1)});[\"contentEditable\",\"draggable\",\"spellCheck\",\"value\"].forEach(function(a){z[a]=new v(a,2,!1,a.toLowerCase(),null,!1,!1)});\n[\"autoReverse\",\"externalResourcesRequired\",\"focusable\",\"preserveAlpha\"].forEach(function(a){z[a]=new v(a,2,!1,a,null,!1,!1)});\"allowFullScreen async autoFocus autoPlay controls default defer disabled disablePictureInPicture disableRemotePlayback formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope\".split(\" \").forEach(function(a){z[a]=new v(a,3,!1,a.toLowerCase(),null,!1,!1)});\n[\"checked\",\"multiple\",\"muted\",\"selected\"].forEach(function(a){z[a]=new v(a,3,!0,a,null,!1,!1)});[\"capture\",\"download\"].forEach(function(a){z[a]=new v(a,4,!1,a,null,!1,!1)});[\"cols\",\"rows\",\"size\",\"span\"].forEach(function(a){z[a]=new v(a,6,!1,a,null,!1,!1)});[\"rowSpan\",\"start\"].forEach(function(a){z[a]=new v(a,5,!1,a.toLowerCase(),null,!1,!1)});var ra=/[\\-:]([a-z])/g;function sa(a){return a[1].toUpperCase()}\n\"accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height\".split(\" \").forEach(function(a){var b=a.replace(ra,\nsa);z[b]=new v(b,1,!1,a,null,!1,!1)});\"xlink:actuate xlink:arcrole xlink:role xlink:show xlink:title xlink:type\".split(\" \").forEach(function(a){var b=a.replace(ra,sa);z[b]=new v(b,1,!1,a,\"http://www.w3.org/1999/xlink\",!1,!1)});[\"xml:base\",\"xml:lang\",\"xml:space\"].forEach(function(a){var b=a.replace(ra,sa);z[b]=new v(b,1,!1,a,\"http://www.w3.org/XML/1998/namespace\",!1,!1)});[\"tabIndex\",\"crossOrigin\"].forEach(function(a){z[a]=new v(a,1,!1,a.toLowerCase(),null,!1,!1)});\nz.xlinkHref=new v(\"xlinkHref\",1,!1,\"xlink:href\",\"http://www.w3.org/1999/xlink\",!0,!1);[\"src\",\"href\",\"action\",\"formAction\"].forEach(function(a){z[a]=new v(a,1,!1,a.toLowerCase(),null,!0,!0)});\nfunction ta(a,b,c,d){var e=z.hasOwnProperty(b)?z[b]:null;if(null!==e?0!==e.type:d||!(2<b.length)||\"o\"!==b[0]&&\"O\"!==b[0]||\"n\"!==b[1]&&\"N\"!==b[1])qa(b,c,e,d)&&(c=null),d||null===e?oa(b)&&(null===c?a.removeAttribute(b):a.setAttribute(b,\"\"+c)):e.mustUseProperty?a[e.propertyName]=null===c?3===e.type?!1:\"\":c:(b=e.attributeName,d=e.attributeNamespace,null===c?a.removeAttribute(b):(e=e.type,c=3===e||4===e&&!0===c?\"\":\"\"+c,d?a.setAttributeNS(d,b,c):a.setAttribute(b,c)))}\nvar ua=aa.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,va=Symbol.for(\"react.element\"),wa=Symbol.for(\"react.portal\"),ya=Symbol.for(\"react.fragment\"),za=Symbol.for(\"react.strict_mode\"),Aa=Symbol.for(\"react.profiler\"),Ba=Symbol.for(\"react.provider\"),Ca=Symbol.for(\"react.context\"),Da=Symbol.for(\"react.forward_ref\"),Ea=Symbol.for(\"react.suspense\"),Fa=Symbol.for(\"react.suspense_list\"),Ga=Symbol.for(\"react.memo\"),Ha=Symbol.for(\"react.lazy\");Symbol.for(\"react.scope\");Symbol.for(\"react.debug_trace_mode\");\nvar Ia=Symbol.for(\"react.offscreen\");Symbol.for(\"react.legacy_hidden\");Symbol.for(\"react.cache\");Symbol.for(\"react.tracing_marker\");var Ja=Symbol.iterator;function Ka(a){if(null===a||\"object\"!==typeof a)return null;a=Ja&&a[Ja]||a[\"@@iterator\"];return\"function\"===typeof a?a:null}var A=Object.assign,La;function Ma(a){if(void 0===La)try{throw Error();}catch(c){var b=c.stack.trim().match(/\\n( *(at )?)/);La=b&&b[1]||\"\"}return\"\\n\"+La+a}var Na=!1;\nfunction Oa(a,b){if(!a||Na)return\"\";Na=!0;var c=Error.prepareStackTrace;Error.prepareStackTrace=void 0;try{if(b)if(b=function(){throw Error();},Object.defineProperty(b.prototype,\"props\",{set:function(){throw Error();}}),\"object\"===typeof Reflect&&Reflect.construct){try{Reflect.construct(b,[])}catch(l){var d=l}Reflect.construct(a,[],b)}else{try{b.call()}catch(l){d=l}a.call(b.prototype)}else{try{throw Error();}catch(l){d=l}a()}}catch(l){if(l&&d&&\"string\"===typeof l.stack){for(var e=l.stack.split(\"\\n\"),\nf=d.stack.split(\"\\n\"),g=e.length-1,h=f.length-1;1<=g&&0<=h&&e[g]!==f[h];)h--;for(;1<=g&&0<=h;g--,h--)if(e[g]!==f[h]){if(1!==g||1!==h){do if(g--,h--,0>h||e[g]!==f[h]){var k=\"\\n\"+e[g].replace(\" at new \",\" at \");a.displayName&&k.includes(\"<anonymous>\")&&(k=k.replace(\"<anonymous>\",a.displayName));return k}while(1<=g&&0<=h)}break}}}finally{Na=!1,Error.prepareStackTrace=c}return(a=a?a.displayName||a.name:\"\")?Ma(a):\"\"}\nfunction Pa(a){switch(a.tag){case 5:return Ma(a.type);case 16:return Ma(\"Lazy\");case 13:return Ma(\"Suspense\");case 19:return Ma(\"SuspenseList\");case 0:case 2:case 15:return a=Oa(a.type,!1),a;case 11:return a=Oa(a.type.render,!1),a;case 1:return a=Oa(a.type,!0),a;default:return\"\"}}\nfunction Qa(a){if(null==a)return null;if(\"function\"===typeof a)return a.displayName||a.name||null;if(\"string\"===typeof a)return a;switch(a){case ya:return\"Fragment\";case wa:return\"Portal\";case Aa:return\"Profiler\";case za:return\"StrictMode\";case Ea:return\"Suspense\";case Fa:return\"SuspenseList\"}if(\"object\"===typeof a)switch(a.$$typeof){case Ca:return(a.displayName||\"Context\")+\".Consumer\";case Ba:return(a._context.displayName||\"Context\")+\".Provider\";case Da:var b=a.render;a=a.displayName;a||(a=b.displayName||\nb.name||\"\",a=\"\"!==a?\"ForwardRef(\"+a+\")\":\"ForwardRef\");return a;case Ga:return b=a.displayName||null,null!==b?b:Qa(a.type)||\"Memo\";case Ha:b=a._payload;a=a._init;try{return Qa(a(b))}catch(c){}}return null}\nfunction Ra(a){var b=a.type;switch(a.tag){case 24:return\"Cache\";case 9:return(b.displayName||\"Context\")+\".Consumer\";case 10:return(b._context.displayName||\"Context\")+\".Provider\";case 18:return\"DehydratedFragment\";case 11:return a=b.render,a=a.displayName||a.name||\"\",b.displayName||(\"\"!==a?\"ForwardRef(\"+a+\")\":\"ForwardRef\");case 7:return\"Fragment\";case 5:return b;case 4:return\"Portal\";case 3:return\"Root\";case 6:return\"Text\";case 16:return Qa(b);case 8:return b===za?\"StrictMode\":\"Mode\";case 22:return\"Offscreen\";\ncase 12:return\"Profiler\";case 21:return\"Scope\";case 13:return\"Suspense\";case 19:return\"SuspenseList\";case 25:return\"TracingMarker\";case 1:case 0:case 17:case 2:case 14:case 15:if(\"function\"===typeof b)return b.displayName||b.name||null;if(\"string\"===typeof b)return b}return null}function Sa(a){switch(typeof a){case \"boolean\":case \"number\":case \"string\":case \"undefined\":return a;case \"object\":return a;default:return\"\"}}\nfunction Ta(a){var b=a.type;return(a=a.nodeName)&&\"input\"===a.toLowerCase()&&(\"checkbox\"===b||\"radio\"===b)}\nfunction Ua(a){var b=Ta(a)?\"checked\":\"value\",c=Object.getOwnPropertyDescriptor(a.constructor.prototype,b),d=\"\"+a[b];if(!a.hasOwnProperty(b)&&\"undefined\"!==typeof c&&\"function\"===typeof c.get&&\"function\"===typeof c.set){var e=c.get,f=c.set;Object.defineProperty(a,b,{configurable:!0,get:function(){return e.call(this)},set:function(a){d=\"\"+a;f.call(this,a)}});Object.defineProperty(a,b,{enumerable:c.enumerable});return{getValue:function(){return d},setValue:function(a){d=\"\"+a},stopTracking:function(){a._valueTracker=\nnull;delete a[b]}}}}function Va(a){a._valueTracker||(a._valueTracker=Ua(a))}function Wa(a){if(!a)return!1;var b=a._valueTracker;if(!b)return!0;var c=b.getValue();var d=\"\";a&&(d=Ta(a)?a.checked?\"true\":\"false\":a.value);a=d;return a!==c?(b.setValue(a),!0):!1}function Xa(a){a=a||(\"undefined\"!==typeof document?document:void 0);if(\"undefined\"===typeof a)return null;try{return a.activeElement||a.body}catch(b){return a.body}}\nfunction Ya(a,b){var c=b.checked;return A({},b,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:null!=c?c:a._wrapperState.initialChecked})}function Za(a,b){var c=null==b.defaultValue?\"\":b.defaultValue,d=null!=b.checked?b.checked:b.defaultChecked;c=Sa(null!=b.value?b.value:c);a._wrapperState={initialChecked:d,initialValue:c,controlled:\"checkbox\"===b.type||\"radio\"===b.type?null!=b.checked:null!=b.value}}function ab(a,b){b=b.checked;null!=b&&ta(a,\"checked\",b,!1)}\nfunction bb(a,b){ab(a,b);var c=Sa(b.value),d=b.type;if(null!=c)if(\"number\"===d){if(0===c&&\"\"===a.value||a.value!=c)a.value=\"\"+c}else a.value!==\"\"+c&&(a.value=\"\"+c);else if(\"submit\"===d||\"reset\"===d){a.removeAttribute(\"value\");return}b.hasOwnProperty(\"value\")?cb(a,b.type,c):b.hasOwnProperty(\"defaultValue\")&&cb(a,b.type,Sa(b.defaultValue));null==b.checked&&null!=b.defaultChecked&&(a.defaultChecked=!!b.defaultChecked)}\nfunction db(a,b,c){if(b.hasOwnProperty(\"value\")||b.hasOwnProperty(\"defaultValue\")){var d=b.type;if(!(\"submit\"!==d&&\"reset\"!==d||void 0!==b.value&&null!==b.value))return;b=\"\"+a._wrapperState.initialValue;c||b===a.value||(a.value=b);a.defaultValue=b}c=a.name;\"\"!==c&&(a.name=\"\");a.defaultChecked=!!a._wrapperState.initialChecked;\"\"!==c&&(a.name=c)}\nfunction cb(a,b,c){if(\"number\"!==b||Xa(a.ownerDocument)!==a)null==c?a.defaultValue=\"\"+a._wrapperState.initialValue:a.defaultValue!==\"\"+c&&(a.defaultValue=\"\"+c)}var eb=Array.isArray;\nfunction fb(a,b,c,d){a=a.options;if(b){b={};for(var e=0;e<c.length;e++)b[\"$\"+c[e]]=!0;for(c=0;c<a.length;c++)e=b.hasOwnProperty(\"$\"+a[c].value),a[c].selected!==e&&(a[c].selected=e),e&&d&&(a[c].defaultSelected=!0)}else{c=\"\"+Sa(c);b=null;for(e=0;e<a.length;e++){if(a[e].value===c){a[e].selected=!0;d&&(a[e].defaultSelected=!0);return}null!==b||a[e].disabled||(b=a[e])}null!==b&&(b.selected=!0)}}\nfunction gb(a,b){if(null!=b.dangerouslySetInnerHTML)throw Error(p(91));return A({},b,{value:void 0,defaultValue:void 0,children:\"\"+a._wrapperState.initialValue})}function hb(a,b){var c=b.value;if(null==c){c=b.children;b=b.defaultValue;if(null!=c){if(null!=b)throw Error(p(92));if(eb(c)){if(1<c.length)throw Error(p(93));c=c[0]}b=c}null==b&&(b=\"\");c=b}a._wrapperState={initialValue:Sa(c)}}\nfunction ib(a,b){var c=Sa(b.value),d=Sa(b.defaultValue);null!=c&&(c=\"\"+c,c!==a.value&&(a.value=c),null==b.defaultValue&&a.defaultValue!==c&&(a.defaultValue=c));null!=d&&(a.defaultValue=\"\"+d)}function jb(a){var b=a.textContent;b===a._wrapperState.initialValue&&\"\"!==b&&null!==b&&(a.value=b)}function kb(a){switch(a){case \"svg\":return\"http://www.w3.org/2000/svg\";case \"math\":return\"http://www.w3.org/1998/Math/MathML\";default:return\"http://www.w3.org/1999/xhtml\"}}\nfunction lb(a,b){return null==a||\"http://www.w3.org/1999/xhtml\"===a?kb(b):\"http://www.w3.org/2000/svg\"===a&&\"foreignObject\"===b?\"http://www.w3.org/1999/xhtml\":a}\nvar mb,nb=function(a){return\"undefined\"!==typeof MSApp&&MSApp.execUnsafeLocalFunction?function(b,c,d,e){MSApp.execUnsafeLocalFunction(function(){return a(b,c,d,e)})}:a}(function(a,b){if(\"http://www.w3.org/2000/svg\"!==a.namespaceURI||\"innerHTML\"in a)a.innerHTML=b;else{mb=mb||document.createElement(\"div\");mb.innerHTML=\"<svg>\"+b.valueOf().toString()+\"</svg>\";for(b=mb.firstChild;a.firstChild;)a.removeChild(a.firstChild);for(;b.firstChild;)a.appendChild(b.firstChild)}});\nfunction ob(a,b){if(b){var c=a.firstChild;if(c&&c===a.lastChild&&3===c.nodeType){c.nodeValue=b;return}}a.textContent=b}\nvar pb={animationIterationCount:!0,aspectRatio:!0,borderImageOutset:!0,borderImageSlice:!0,borderImageWidth:!0,boxFlex:!0,boxFlexGroup:!0,boxOrdinalGroup:!0,columnCount:!0,columns:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,flexOrder:!0,gridArea:!0,gridRow:!0,gridRowEnd:!0,gridRowSpan:!0,gridRowStart:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnSpan:!0,gridColumnStart:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,\nzoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeDasharray:!0,strokeDashoffset:!0,strokeMiterlimit:!0,strokeOpacity:!0,strokeWidth:!0},qb=[\"Webkit\",\"ms\",\"Moz\",\"O\"];Object.keys(pb).forEach(function(a){qb.forEach(function(b){b=b+a.charAt(0).toUpperCase()+a.substring(1);pb[b]=pb[a]})});function rb(a,b,c){return null==b||\"boolean\"===typeof b||\"\"===b?\"\":c||\"number\"!==typeof b||0===b||pb.hasOwnProperty(a)&&pb[a]?(\"\"+b).trim():b+\"px\"}\nfunction sb(a,b){a=a.style;for(var c in b)if(b.hasOwnProperty(c)){var d=0===c.indexOf(\"--\"),e=rb(c,b[c],d);\"float\"===c&&(c=\"cssFloat\");d?a.setProperty(c,e):a[c]=e}}var tb=A({menuitem:!0},{area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0});\nfunction ub(a,b){if(b){if(tb[a]&&(null!=b.children||null!=b.dangerouslySetInnerHTML))throw Error(p(137,a));if(null!=b.dangerouslySetInnerHTML){if(null!=b.children)throw Error(p(60));if(\"object\"!==typeof b.dangerouslySetInnerHTML||!(\"__html\"in b.dangerouslySetInnerHTML))throw Error(p(61));}if(null!=b.style&&\"object\"!==typeof b.style)throw Error(p(62));}}\nfunction vb(a,b){if(-1===a.indexOf(\"-\"))return\"string\"===typeof b.is;switch(a){case \"annotation-xml\":case \"color-profile\":case \"font-face\":case \"font-face-src\":case \"font-face-uri\":case \"font-face-format\":case \"font-face-name\":case \"missing-glyph\":return!1;default:return!0}}var wb=null;function xb(a){a=a.target||a.srcElement||window;a.correspondingUseElement&&(a=a.correspondingUseElement);return 3===a.nodeType?a.parentNode:a}var yb=null,zb=null,Ab=null;\nfunction Bb(a){if(a=Cb(a)){if(\"function\"!==typeof yb)throw Error(p(280));var b=a.stateNode;b&&(b=Db(b),yb(a.stateNode,a.type,b))}}function Eb(a){zb?Ab?Ab.push(a):Ab=[a]:zb=a}function Fb(){if(zb){var a=zb,b=Ab;Ab=zb=null;Bb(a);if(b)for(a=0;a<b.length;a++)Bb(b[a])}}function Gb(a,b){return a(b)}function Hb(){}var Ib=!1;function Jb(a,b,c){if(Ib)return a(b,c);Ib=!0;try{return Gb(a,b,c)}finally{if(Ib=!1,null!==zb||null!==Ab)Hb(),Fb()}}\nfunction Kb(a,b){var c=a.stateNode;if(null===c)return null;var d=Db(c);if(null===d)return null;c=d[b];a:switch(b){case \"onClick\":case \"onClickCapture\":case \"onDoubleClick\":case \"onDoubleClickCapture\":case \"onMouseDown\":case \"onMouseDownCapture\":case \"onMouseMove\":case \"onMouseMoveCapture\":case \"onMouseUp\":case \"onMouseUpCapture\":case \"onMouseEnter\":(d=!d.disabled)||(a=a.type,d=!(\"button\"===a||\"input\"===a||\"select\"===a||\"textarea\"===a));a=!d;break a;default:a=!1}if(a)return null;if(c&&\"function\"!==\ntypeof c)throw Error(p(231,b,typeof c));return c}var Lb=!1;if(ia)try{var Mb={};Object.defineProperty(Mb,\"passive\",{get:function(){Lb=!0}});window.addEventListener(\"test\",Mb,Mb);window.removeEventListener(\"test\",Mb,Mb)}catch(a){Lb=!1}function Nb(a,b,c,d,e,f,g,h,k){var l=Array.prototype.slice.call(arguments,3);try{b.apply(c,l)}catch(m){this.onError(m)}}var Ob=!1,Pb=null,Qb=!1,Rb=null,Sb={onError:function(a){Ob=!0;Pb=a}};function Tb(a,b,c,d,e,f,g,h,k){Ob=!1;Pb=null;Nb.apply(Sb,arguments)}\nfunction Ub(a,b,c,d,e,f,g,h,k){Tb.apply(this,arguments);if(Ob){if(Ob){var l=Pb;Ob=!1;Pb=null}else throw Error(p(198));Qb||(Qb=!0,Rb=l)}}function Vb(a){var b=a,c=a;if(a.alternate)for(;b.return;)b=b.return;else{a=b;do b=a,0!==(b.flags&4098)&&(c=b.return),a=b.return;while(a)}return 3===b.tag?c:null}function Wb(a){if(13===a.tag){var b=a.memoizedState;null===b&&(a=a.alternate,null!==a&&(b=a.memoizedState));if(null!==b)return b.dehydrated}return null}function Xb(a){if(Vb(a)!==a)throw Error(p(188));}\nfunction Yb(a){var b=a.alternate;if(!b){b=Vb(a);if(null===b)throw Error(p(188));return b!==a?null:a}for(var c=a,d=b;;){var e=c.return;if(null===e)break;var f=e.alternate;if(null===f){d=e.return;if(null!==d){c=d;continue}break}if(e.child===f.child){for(f=e.child;f;){if(f===c)return Xb(e),a;if(f===d)return Xb(e),b;f=f.sibling}throw Error(p(188));}if(c.return!==d.return)c=e,d=f;else{for(var g=!1,h=e.child;h;){if(h===c){g=!0;c=e;d=f;break}if(h===d){g=!0;d=e;c=f;break}h=h.sibling}if(!g){for(h=f.child;h;){if(h===\nc){g=!0;c=f;d=e;break}if(h===d){g=!0;d=f;c=e;break}h=h.sibling}if(!g)throw Error(p(189));}}if(c.alternate!==d)throw Error(p(190));}if(3!==c.tag)throw Error(p(188));return c.stateNode.current===c?a:b}function Zb(a){a=Yb(a);return null!==a?$b(a):null}function $b(a){if(5===a.tag||6===a.tag)return a;for(a=a.child;null!==a;){var b=$b(a);if(null!==b)return b;a=a.sibling}return null}\nvar ac=ca.unstable_scheduleCallback,bc=ca.unstable_cancelCallback,cc=ca.unstable_shouldYield,dc=ca.unstable_requestPaint,B=ca.unstable_now,ec=ca.unstable_getCurrentPriorityLevel,fc=ca.unstable_ImmediatePriority,gc=ca.unstable_UserBlockingPriority,hc=ca.unstable_NormalPriority,ic=ca.unstable_LowPriority,jc=ca.unstable_IdlePriority,kc=null,lc=null;function mc(a){if(lc&&\"function\"===typeof lc.onCommitFiberRoot)try{lc.onCommitFiberRoot(kc,a,void 0,128===(a.current.flags&128))}catch(b){}}\nvar oc=Math.clz32?Math.clz32:nc,pc=Math.log,qc=Math.LN2;function nc(a){a>>>=0;return 0===a?32:31-(pc(a)/qc|0)|0}var rc=64,sc=4194304;\nfunction tc(a){switch(a&-a){case 1:return 1;case 2:return 2;case 4:return 4;case 8:return 8;case 16:return 16;case 32:return 32;case 64:case 128:case 256:case 512:case 1024:case 2048:case 4096:case 8192:case 16384:case 32768:case 65536:case 131072:case 262144:case 524288:case 1048576:case 2097152:return a&4194240;case 4194304:case 8388608:case 16777216:case 33554432:case 67108864:return a&130023424;case 134217728:return 134217728;case 268435456:return 268435456;case 536870912:return 536870912;case 1073741824:return 1073741824;\ndefault:return a}}function uc(a,b){var c=a.pendingLanes;if(0===c)return 0;var d=0,e=a.suspendedLanes,f=a.pingedLanes,g=c&268435455;if(0!==g){var h=g&~e;0!==h?d=tc(h):(f&=g,0!==f&&(d=tc(f)))}else g=c&~e,0!==g?d=tc(g):0!==f&&(d=tc(f));if(0===d)return 0;if(0!==b&&b!==d&&0===(b&e)&&(e=d&-d,f=b&-b,e>=f||16===e&&0!==(f&4194240)))return b;0!==(d&4)&&(d|=c&16);b=a.entangledLanes;if(0!==b)for(a=a.entanglements,b&=d;0<b;)c=31-oc(b),e=1<<c,d|=a[c],b&=~e;return d}\nfunction vc(a,b){switch(a){case 1:case 2:case 4:return b+250;case 8:case 16:case 32:case 64:case 128:case 256:case 512:case 1024:case 2048:case 4096:case 8192:case 16384:case 32768:case 65536:case 131072:case 262144:case 524288:case 1048576:case 2097152:return b+5E3;case 4194304:case 8388608:case 16777216:case 33554432:case 67108864:return-1;case 134217728:case 268435456:case 536870912:case 1073741824:return-1;default:return-1}}\nfunction wc(a,b){for(var c=a.suspendedLanes,d=a.pingedLanes,e=a.expirationTimes,f=a.pendingLanes;0<f;){var g=31-oc(f),h=1<<g,k=e[g];if(-1===k){if(0===(h&c)||0!==(h&d))e[g]=vc(h,b)}else k<=b&&(a.expiredLanes|=h);f&=~h}}function xc(a){a=a.pendingLanes&-1073741825;return 0!==a?a:a&1073741824?1073741824:0}function yc(){var a=rc;rc<<=1;0===(rc&4194240)&&(rc=64);return a}function zc(a){for(var b=[],c=0;31>c;c++)b.push(a);return b}\nfunction Ac(a,b,c){a.pendingLanes|=b;536870912!==b&&(a.suspendedLanes=0,a.pingedLanes=0);a=a.eventTimes;b=31-oc(b);a[b]=c}function Bc(a,b){var c=a.pendingLanes&~b;a.pendingLanes=b;a.suspendedLanes=0;a.pingedLanes=0;a.expiredLanes&=b;a.mutableReadLanes&=b;a.entangledLanes&=b;b=a.entanglements;var d=a.eventTimes;for(a=a.expirationTimes;0<c;){var e=31-oc(c),f=1<<e;b[e]=0;d[e]=-1;a[e]=-1;c&=~f}}\nfunction Cc(a,b){var c=a.entangledLanes|=b;for(a=a.entanglements;c;){var d=31-oc(c),e=1<<d;e&b|a[d]&b&&(a[d]|=b);c&=~e}}var C=0;function Dc(a){a&=-a;return 1<a?4<a?0!==(a&268435455)?16:536870912:4:1}var Ec,Fc,Gc,Hc,Ic,Jc=!1,Kc=[],Lc=null,Mc=null,Nc=null,Oc=new Map,Pc=new Map,Qc=[],Rc=\"mousedown mouseup touchcancel touchend touchstart auxclick dblclick pointercancel pointerdown pointerup dragend dragstart drop compositionend compositionstart keydown keypress keyup input textInput copy cut paste click change contextmenu reset submit\".split(\" \");\nfunction Sc(a,b){switch(a){case \"focusin\":case \"focusout\":Lc=null;break;case \"dragenter\":case \"dragleave\":Mc=null;break;case \"mouseover\":case \"mouseout\":Nc=null;break;case \"pointerover\":case \"pointerout\":Oc.delete(b.pointerId);break;case \"gotpointercapture\":case \"lostpointercapture\":Pc.delete(b.pointerId)}}\nfunction Tc(a,b,c,d,e,f){if(null===a||a.nativeEvent!==f)return a={blockedOn:b,domEventName:c,eventSystemFlags:d,nativeEvent:f,targetContainers:[e]},null!==b&&(b=Cb(b),null!==b&&Fc(b)),a;a.eventSystemFlags|=d;b=a.targetContainers;null!==e&&-1===b.indexOf(e)&&b.push(e);return a}\nfunction Uc(a,b,c,d,e){switch(b){case \"focusin\":return Lc=Tc(Lc,a,b,c,d,e),!0;case \"dragenter\":return Mc=Tc(Mc,a,b,c,d,e),!0;case \"mouseover\":return Nc=Tc(Nc,a,b,c,d,e),!0;case \"pointerover\":var f=e.pointerId;Oc.set(f,Tc(Oc.get(f)||null,a,b,c,d,e));return!0;case \"gotpointercapture\":return f=e.pointerId,Pc.set(f,Tc(Pc.get(f)||null,a,b,c,d,e)),!0}return!1}\nfunction Vc(a){var b=Wc(a.target);if(null!==b){var c=Vb(b);if(null!==c)if(b=c.tag,13===b){if(b=Wb(c),null!==b){a.blockedOn=b;Ic(a.priority,function(){Gc(c)});return}}else if(3===b&&c.stateNode.current.memoizedState.isDehydrated){a.blockedOn=3===c.tag?c.stateNode.containerInfo:null;return}}a.blockedOn=null}\nfunction Xc(a){if(null!==a.blockedOn)return!1;for(var b=a.targetContainers;0<b.length;){var c=Yc(a.domEventName,a.eventSystemFlags,b[0],a.nativeEvent);if(null===c){c=a.nativeEvent;var d=new c.constructor(c.type,c);wb=d;c.target.dispatchEvent(d);wb=null}else return b=Cb(c),null!==b&&Fc(b),a.blockedOn=c,!1;b.shift()}return!0}function Zc(a,b,c){Xc(a)&&c.delete(b)}function $c(){Jc=!1;null!==Lc&&Xc(Lc)&&(Lc=null);null!==Mc&&Xc(Mc)&&(Mc=null);null!==Nc&&Xc(Nc)&&(Nc=null);Oc.forEach(Zc);Pc.forEach(Zc)}\nfunction ad(a,b){a.blockedOn===b&&(a.blockedOn=null,Jc||(Jc=!0,ca.unstable_scheduleCallback(ca.unstable_NormalPriority,$c)))}\nfunction bd(a){function b(b){return ad(b,a)}if(0<Kc.length){ad(Kc[0],a);for(var c=1;c<Kc.length;c++){var d=Kc[c];d.blockedOn===a&&(d.blockedOn=null)}}null!==Lc&&ad(Lc,a);null!==Mc&&ad(Mc,a);null!==Nc&&ad(Nc,a);Oc.forEach(b);Pc.forEach(b);for(c=0;c<Qc.length;c++)d=Qc[c],d.blockedOn===a&&(d.blockedOn=null);for(;0<Qc.length&&(c=Qc[0],null===c.blockedOn);)Vc(c),null===c.blockedOn&&Qc.shift()}var cd=ua.ReactCurrentBatchConfig,dd=!0;\nfunction ed(a,b,c,d){var e=C,f=cd.transition;cd.transition=null;try{C=1,fd(a,b,c,d)}finally{C=e,cd.transition=f}}function gd(a,b,c,d){var e=C,f=cd.transition;cd.transition=null;try{C=4,fd(a,b,c,d)}finally{C=e,cd.transition=f}}\nfunction fd(a,b,c,d){if(dd){var e=Yc(a,b,c,d);if(null===e)hd(a,b,d,id,c),Sc(a,d);else if(Uc(e,a,b,c,d))d.stopPropagation();else if(Sc(a,d),b&4&&-1<Rc.indexOf(a)){for(;null!==e;){var f=Cb(e);null!==f&&Ec(f);f=Yc(a,b,c,d);null===f&&hd(a,b,d,id,c);if(f===e)break;e=f}null!==e&&d.stopPropagation()}else hd(a,b,d,null,c)}}var id=null;\nfunction Yc(a,b,c,d){id=null;a=xb(d);a=Wc(a);if(null!==a)if(b=Vb(a),null===b)a=null;else if(c=b.tag,13===c){a=Wb(b);if(null!==a)return a;a=null}else if(3===c){if(b.stateNode.current.memoizedState.isDehydrated)return 3===b.tag?b.stateNode.containerInfo:null;a=null}else b!==a&&(a=null);id=a;return null}\nfunction jd(a){switch(a){case \"cancel\":case \"click\":case \"close\":case \"contextmenu\":case \"copy\":case \"cut\":case \"auxclick\":case \"dblclick\":case \"dragend\":case \"dragstart\":case \"drop\":case \"focusin\":case \"focusout\":case \"input\":case \"invalid\":case \"keydown\":case \"keypress\":case \"keyup\":case \"mousedown\":case \"mouseup\":case \"paste\":case \"pause\":case \"play\":case \"pointercancel\":case \"pointerdown\":case \"pointerup\":case \"ratechange\":case \"reset\":case \"resize\":case \"seeked\":case \"submit\":case \"touchcancel\":case \"touchend\":case \"touchstart\":case \"volumechange\":case \"change\":case \"selectionchange\":case \"textInput\":case \"compositionstart\":case \"compositionend\":case \"compositionupdate\":case \"beforeblur\":case \"afterblur\":case \"beforeinput\":case \"blur\":case \"fullscreenchange\":case \"focus\":case \"hashchange\":case \"popstate\":case \"select\":case \"selectstart\":return 1;case \"drag\":case \"dragenter\":case \"dragexit\":case \"dragleave\":case \"dragover\":case \"mousemove\":case \"mouseout\":case \"mouseover\":case \"pointermove\":case \"pointerout\":case \"pointerover\":case \"scroll\":case \"toggle\":case \"touchmove\":case \"wheel\":case \"mouseenter\":case \"mouseleave\":case \"pointerenter\":case \"pointerleave\":return 4;\ncase \"message\":switch(ec()){case fc:return 1;case gc:return 4;case hc:case ic:return 16;case jc:return 536870912;default:return 16}default:return 16}}var kd=null,ld=null,md=null;function nd(){if(md)return md;var a,b=ld,c=b.length,d,e=\"value\"in kd?kd.value:kd.textContent,f=e.length;for(a=0;a<c&&b[a]===e[a];a++);var g=c-a;for(d=1;d<=g&&b[c-d]===e[f-d];d++);return md=e.slice(a,1<d?1-d:void 0)}\nfunction od(a){var b=a.keyCode;\"charCode\"in a?(a=a.charCode,0===a&&13===b&&(a=13)):a=b;10===a&&(a=13);return 32<=a||13===a?a:0}function pd(){return!0}function qd(){return!1}\nfunction rd(a){function b(b,d,e,f,g){this._reactName=b;this._targetInst=e;this.type=d;this.nativeEvent=f;this.target=g;this.currentTarget=null;for(var c in a)a.hasOwnProperty(c)&&(b=a[c],this[c]=b?b(f):f[c]);this.isDefaultPrevented=(null!=f.defaultPrevented?f.defaultPrevented:!1===f.returnValue)?pd:qd;this.isPropagationStopped=qd;return this}A(b.prototype,{preventDefault:function(){this.defaultPrevented=!0;var a=this.nativeEvent;a&&(a.preventDefault?a.preventDefault():\"unknown\"!==typeof a.returnValue&&\n(a.returnValue=!1),this.isDefaultPrevented=pd)},stopPropagation:function(){var a=this.nativeEvent;a&&(a.stopPropagation?a.stopPropagation():\"unknown\"!==typeof a.cancelBubble&&(a.cancelBubble=!0),this.isPropagationStopped=pd)},persist:function(){},isPersistent:pd});return b}\nvar sd={eventPhase:0,bubbles:0,cancelable:0,timeStamp:function(a){return a.timeStamp||Date.now()},defaultPrevented:0,isTrusted:0},td=rd(sd),ud=A({},sd,{view:0,detail:0}),vd=rd(ud),wd,xd,yd,Ad=A({},ud,{screenX:0,screenY:0,clientX:0,clientY:0,pageX:0,pageY:0,ctrlKey:0,shiftKey:0,altKey:0,metaKey:0,getModifierState:zd,button:0,buttons:0,relatedTarget:function(a){return void 0===a.relatedTarget?a.fromElement===a.srcElement?a.toElement:a.fromElement:a.relatedTarget},movementX:function(a){if(\"movementX\"in\na)return a.movementX;a!==yd&&(yd&&\"mousemove\"===a.type?(wd=a.screenX-yd.screenX,xd=a.screenY-yd.screenY):xd=wd=0,yd=a);return wd},movementY:function(a){return\"movementY\"in a?a.movementY:xd}}),Bd=rd(Ad),Cd=A({},Ad,{dataTransfer:0}),Dd=rd(Cd),Ed=A({},ud,{relatedTarget:0}),Fd=rd(Ed),Gd=A({},sd,{animationName:0,elapsedTime:0,pseudoElement:0}),Hd=rd(Gd),Id=A({},sd,{clipboardData:function(a){return\"clipboardData\"in a?a.clipboardData:window.clipboardData}}),Jd=rd(Id),Kd=A({},sd,{data:0}),Ld=rd(Kd),Md={Esc:\"Escape\",\nSpacebar:\" \",Left:\"ArrowLeft\",Up:\"ArrowUp\",Right:\"ArrowRight\",Down:\"ArrowDown\",Del:\"Delete\",Win:\"OS\",Menu:\"ContextMenu\",Apps:\"ContextMenu\",Scroll:\"ScrollLock\",MozPrintableKey:\"Unidentified\"},Nd={8:\"Backspace\",9:\"Tab\",12:\"Clear\",13:\"Enter\",16:\"Shift\",17:\"Control\",18:\"Alt\",19:\"Pause\",20:\"CapsLock\",27:\"Escape\",32:\" \",33:\"PageUp\",34:\"PageDown\",35:\"End\",36:\"Home\",37:\"ArrowLeft\",38:\"ArrowUp\",39:\"ArrowRight\",40:\"ArrowDown\",45:\"Insert\",46:\"Delete\",112:\"F1\",113:\"F2\",114:\"F3\",115:\"F4\",116:\"F5\",117:\"F6\",118:\"F7\",\n119:\"F8\",120:\"F9\",121:\"F10\",122:\"F11\",123:\"F12\",144:\"NumLock\",145:\"ScrollLock\",224:\"Meta\"},Od={Alt:\"altKey\",Control:\"ctrlKey\",Meta:\"metaKey\",Shift:\"shiftKey\"};function Pd(a){var b=this.nativeEvent;return b.getModifierState?b.getModifierState(a):(a=Od[a])?!!b[a]:!1}function zd(){return Pd}\nvar Qd=A({},ud,{key:function(a){if(a.key){var b=Md[a.key]||a.key;if(\"Unidentified\"!==b)return b}return\"keypress\"===a.type?(a=od(a),13===a?\"Enter\":String.fromCharCode(a)):\"keydown\"===a.type||\"keyup\"===a.type?Nd[a.keyCode]||\"Unidentified\":\"\"},code:0,location:0,ctrlKey:0,shiftKey:0,altKey:0,metaKey:0,repeat:0,locale:0,getModifierState:zd,charCode:function(a){return\"keypress\"===a.type?od(a):0},keyCode:function(a){return\"keydown\"===a.type||\"keyup\"===a.type?a.keyCode:0},which:function(a){return\"keypress\"===\na.type?od(a):\"keydown\"===a.type||\"keyup\"===a.type?a.keyCode:0}}),Rd=rd(Qd),Sd=A({},Ad,{pointerId:0,width:0,height:0,pressure:0,tangentialPressure:0,tiltX:0,tiltY:0,twist:0,pointerType:0,isPrimary:0}),Td=rd(Sd),Ud=A({},ud,{touches:0,targetTouches:0,changedTouches:0,altKey:0,metaKey:0,ctrlKey:0,shiftKey:0,getModifierState:zd}),Vd=rd(Ud),Wd=A({},sd,{propertyName:0,elapsedTime:0,pseudoElement:0}),Xd=rd(Wd),Yd=A({},Ad,{deltaX:function(a){return\"deltaX\"in a?a.deltaX:\"wheelDeltaX\"in a?-a.wheelDeltaX:0},\ndeltaY:function(a){return\"deltaY\"in a?a.deltaY:\"wheelDeltaY\"in a?-a.wheelDeltaY:\"wheelDelta\"in a?-a.wheelDelta:0},deltaZ:0,deltaMode:0}),Zd=rd(Yd),$d=[9,13,27,32],ae=ia&&\"CompositionEvent\"in window,be=null;ia&&\"documentMode\"in document&&(be=document.documentMode);var ce=ia&&\"TextEvent\"in window&&!be,de=ia&&(!ae||be&&8<be&&11>=be),ee=String.fromCharCode(32),fe=!1;\nfunction ge(a,b){switch(a){case \"keyup\":return-1!==$d.indexOf(b.keyCode);case \"keydown\":return 229!==b.keyCode;case \"keypress\":case \"mousedown\":case \"focusout\":return!0;default:return!1}}function he(a){a=a.detail;return\"object\"===typeof a&&\"data\"in a?a.data:null}var ie=!1;function je(a,b){switch(a){case \"compositionend\":return he(b);case \"keypress\":if(32!==b.which)return null;fe=!0;return ee;case \"textInput\":return a=b.data,a===ee&&fe?null:a;default:return null}}\nfunction ke(a,b){if(ie)return\"compositionend\"===a||!ae&&ge(a,b)?(a=nd(),md=ld=kd=null,ie=!1,a):null;switch(a){case \"paste\":return null;case \"keypress\":if(!(b.ctrlKey||b.altKey||b.metaKey)||b.ctrlKey&&b.altKey){if(b.char&&1<b.char.length)return b.char;if(b.which)return String.fromCharCode(b.which)}return null;case \"compositionend\":return de&&\"ko\"!==b.locale?null:b.data;default:return null}}\nvar le={color:!0,date:!0,datetime:!0,\"datetime-local\":!0,email:!0,month:!0,number:!0,password:!0,range:!0,search:!0,tel:!0,text:!0,time:!0,url:!0,week:!0};function me(a){var b=a&&a.nodeName&&a.nodeName.toLowerCase();return\"input\"===b?!!le[a.type]:\"textarea\"===b?!0:!1}function ne(a,b,c,d){Eb(d);b=oe(b,\"onChange\");0<b.length&&(c=new td(\"onChange\",\"change\",null,c,d),a.push({event:c,listeners:b}))}var pe=null,qe=null;function re(a){se(a,0)}function te(a){var b=ue(a);if(Wa(b))return a}\nfunction ve(a,b){if(\"change\"===a)return b}var we=!1;if(ia){var xe;if(ia){var ye=\"oninput\"in document;if(!ye){var ze=document.createElement(\"div\");ze.setAttribute(\"oninput\",\"return;\");ye=\"function\"===typeof ze.oninput}xe=ye}else xe=!1;we=xe&&(!document.documentMode||9<document.documentMode)}function Ae(){pe&&(pe.detachEvent(\"onpropertychange\",Be),qe=pe=null)}function Be(a){if(\"value\"===a.propertyName&&te(qe)){var b=[];ne(b,qe,a,xb(a));Jb(re,b)}}\nfunction Ce(a,b,c){\"focusin\"===a?(Ae(),pe=b,qe=c,pe.attachEvent(\"onpropertychange\",Be)):\"focusout\"===a&&Ae()}function De(a){if(\"selectionchange\"===a||\"keyup\"===a||\"keydown\"===a)return te(qe)}function Ee(a,b){if(\"click\"===a)return te(b)}function Fe(a,b){if(\"input\"===a||\"change\"===a)return te(b)}function Ge(a,b){return a===b&&(0!==a||1/a===1/b)||a!==a&&b!==b}var He=\"function\"===typeof Object.is?Object.is:Ge;\nfunction Ie(a,b){if(He(a,b))return!0;if(\"object\"!==typeof a||null===a||\"object\"!==typeof b||null===b)return!1;var c=Object.keys(a),d=Object.keys(b);if(c.length!==d.length)return!1;for(d=0;d<c.length;d++){var e=c[d];if(!ja.call(b,e)||!He(a[e],b[e]))return!1}return!0}function Je(a){for(;a&&a.firstChild;)a=a.firstChild;return a}\nfunction Ke(a,b){var c=Je(a);a=0;for(var d;c;){if(3===c.nodeType){d=a+c.textContent.length;if(a<=b&&d>=b)return{node:c,offset:b-a};a=d}a:{for(;c;){if(c.nextSibling){c=c.nextSibling;break a}c=c.parentNode}c=void 0}c=Je(c)}}function Le(a,b){return a&&b?a===b?!0:a&&3===a.nodeType?!1:b&&3===b.nodeType?Le(a,b.parentNode):\"contains\"in a?a.contains(b):a.compareDocumentPosition?!!(a.compareDocumentPosition(b)&16):!1:!1}\nfunction Me(){for(var a=window,b=Xa();b instanceof a.HTMLIFrameElement;){try{var c=\"string\"===typeof b.contentWindow.location.href}catch(d){c=!1}if(c)a=b.contentWindow;else break;b=Xa(a.document)}return b}function Ne(a){var b=a&&a.nodeName&&a.nodeName.toLowerCase();return b&&(\"input\"===b&&(\"text\"===a.type||\"search\"===a.type||\"tel\"===a.type||\"url\"===a.type||\"password\"===a.type)||\"textarea\"===b||\"true\"===a.contentEditable)}\nfunction Oe(a){var b=Me(),c=a.focusedElem,d=a.selectionRange;if(b!==c&&c&&c.ownerDocument&&Le(c.ownerDocument.documentElement,c)){if(null!==d&&Ne(c))if(b=d.start,a=d.end,void 0===a&&(a=b),\"selectionStart\"in c)c.selectionStart=b,c.selectionEnd=Math.min(a,c.value.length);else if(a=(b=c.ownerDocument||document)&&b.defaultView||window,a.getSelection){a=a.getSelection();var e=c.textContent.length,f=Math.min(d.start,e);d=void 0===d.end?f:Math.min(d.end,e);!a.extend&&f>d&&(e=d,d=f,f=e);e=Ke(c,f);var g=Ke(c,\nd);e&&g&&(1!==a.rangeCount||a.anchorNode!==e.node||a.anchorOffset!==e.offset||a.focusNode!==g.node||a.focusOffset!==g.offset)&&(b=b.createRange(),b.setStart(e.node,e.offset),a.removeAllRanges(),f>d?(a.addRange(b),a.extend(g.node,g.offset)):(b.setEnd(g.node,g.offset),a.addRange(b)))}b=[];for(a=c;a=a.parentNode;)1===a.nodeType&&b.push({element:a,left:a.scrollLeft,top:a.scrollTop});\"function\"===typeof c.focus&&c.focus();for(c=0;c<b.length;c++)a=b[c],a.element.scrollLeft=a.left,a.element.scrollTop=a.top}}\nvar Pe=ia&&\"documentMode\"in document&&11>=document.documentMode,Qe=null,Re=null,Se=null,Te=!1;\nfunction Ue(a,b,c){var d=c.window===c?c.document:9===c.nodeType?c:c.ownerDocument;Te||null==Qe||Qe!==Xa(d)||(d=Qe,\"selectionStart\"in d&&Ne(d)?d={start:d.selectionStart,end:d.selectionEnd}:(d=(d.ownerDocument&&d.ownerDocument.defaultView||window).getSelection(),d={anchorNode:d.anchorNode,anchorOffset:d.anchorOffset,focusNode:d.focusNode,focusOffset:d.focusOffset}),Se&&Ie(Se,d)||(Se=d,d=oe(Re,\"onSelect\"),0<d.length&&(b=new td(\"onSelect\",\"select\",null,b,c),a.push({event:b,listeners:d}),b.target=Qe)))}\nfunction Ve(a,b){var c={};c[a.toLowerCase()]=b.toLowerCase();c[\"Webkit\"+a]=\"webkit\"+b;c[\"Moz\"+a]=\"moz\"+b;return c}var We={animationend:Ve(\"Animation\",\"AnimationEnd\"),animationiteration:Ve(\"Animation\",\"AnimationIteration\"),animationstart:Ve(\"Animation\",\"AnimationStart\"),transitionend:Ve(\"Transition\",\"TransitionEnd\")},Xe={},Ye={};\nia&&(Ye=document.createElement(\"div\").style,\"AnimationEvent\"in window||(delete We.animationend.animation,delete We.animationiteration.animation,delete We.animationstart.animation),\"TransitionEvent\"in window||delete We.transitionend.transition);function Ze(a){if(Xe[a])return Xe[a];if(!We[a])return a;var b=We[a],c;for(c in b)if(b.hasOwnProperty(c)&&c in Ye)return Xe[a]=b[c];return a}var $e=Ze(\"animationend\"),af=Ze(\"animationiteration\"),bf=Ze(\"animationstart\"),cf=Ze(\"transitionend\"),df=new Map,ef=\"abort auxClick cancel canPlay canPlayThrough click close contextMenu copy cut drag dragEnd dragEnter dragExit dragLeave dragOver dragStart drop durationChange emptied encrypted ended error gotPointerCapture input invalid keyDown keyPress keyUp load loadedData loadedMetadata loadStart lostPointerCapture mouseDown mouseMove mouseOut mouseOver mouseUp paste pause play playing pointerCancel pointerDown pointerMove pointerOut pointerOver pointerUp progress rateChange reset resize seeked seeking stalled submit suspend timeUpdate touchCancel touchEnd touchStart volumeChange scroll toggle touchMove waiting wheel\".split(\" \");\nfunction ff(a,b){df.set(a,b);fa(b,[a])}for(var gf=0;gf<ef.length;gf++){var hf=ef[gf],jf=hf.toLowerCase(),kf=hf[0].toUpperCase()+hf.slice(1);ff(jf,\"on\"+kf)}ff($e,\"onAnimationEnd\");ff(af,\"onAnimationIteration\");ff(bf,\"onAnimationStart\");ff(\"dblclick\",\"onDoubleClick\");ff(\"focusin\",\"onFocus\");ff(\"focusout\",\"onBlur\");ff(cf,\"onTransitionEnd\");ha(\"onMouseEnter\",[\"mouseout\",\"mouseover\"]);ha(\"onMouseLeave\",[\"mouseout\",\"mouseover\"]);ha(\"onPointerEnter\",[\"pointerout\",\"pointerover\"]);\nha(\"onPointerLeave\",[\"pointerout\",\"pointerover\"]);fa(\"onChange\",\"change click focusin focusout input keydown keyup selectionchange\".split(\" \"));fa(\"onSelect\",\"focusout contextmenu dragend focusin keydown keyup mousedown mouseup selectionchange\".split(\" \"));fa(\"onBeforeInput\",[\"compositionend\",\"keypress\",\"textInput\",\"paste\"]);fa(\"onCompositionEnd\",\"compositionend focusout keydown keypress keyup mousedown\".split(\" \"));fa(\"onCompositionStart\",\"compositionstart focusout keydown keypress keyup mousedown\".split(\" \"));\nfa(\"onCompositionUpdate\",\"compositionupdate focusout keydown keypress keyup mousedown\".split(\" \"));var lf=\"abort canplay canplaythrough durationchange emptied encrypted ended error loadeddata loadedmetadata loadstart pause play playing progress ratechange resize seeked seeking stalled suspend timeupdate volumechange waiting\".split(\" \"),mf=new Set(\"cancel close invalid load scroll toggle\".split(\" \").concat(lf));\nfunction nf(a,b,c){var d=a.type||\"unknown-event\";a.currentTarget=c;Ub(d,b,void 0,a);a.currentTarget=null}\nfunction se(a,b){b=0!==(b&4);for(var c=0;c<a.length;c++){var d=a[c],e=d.event;d=d.listeners;a:{var f=void 0;if(b)for(var g=d.length-1;0<=g;g--){var h=d[g],k=h.instance,l=h.currentTarget;h=h.listener;if(k!==f&&e.isPropagationStopped())break a;nf(e,h,l);f=k}else for(g=0;g<d.length;g++){h=d[g];k=h.instance;l=h.currentTarget;h=h.listener;if(k!==f&&e.isPropagationStopped())break a;nf(e,h,l);f=k}}}if(Qb)throw a=Rb,Qb=!1,Rb=null,a;}\nfunction D(a,b){var c=b[of];void 0===c&&(c=b[of]=new Set);var d=a+\"__bubble\";c.has(d)||(pf(b,a,2,!1),c.add(d))}function qf(a,b,c){var d=0;b&&(d|=4);pf(c,a,d,b)}var rf=\"_reactListening\"+Math.random().toString(36).slice(2);function sf(a){if(!a[rf]){a[rf]=!0;da.forEach(function(b){\"selectionchange\"!==b&&(mf.has(b)||qf(b,!1,a),qf(b,!0,a))});var b=9===a.nodeType?a:a.ownerDocument;null===b||b[rf]||(b[rf]=!0,qf(\"selectionchange\",!1,b))}}\nfunction pf(a,b,c,d){switch(jd(b)){case 1:var e=ed;break;case 4:e=gd;break;default:e=fd}c=e.bind(null,b,c,a);e=void 0;!Lb||\"touchstart\"!==b&&\"touchmove\"!==b&&\"wheel\"!==b||(e=!0);d?void 0!==e?a.addEventListener(b,c,{capture:!0,passive:e}):a.addEventListener(b,c,!0):void 0!==e?a.addEventListener(b,c,{passive:e}):a.addEventListener(b,c,!1)}\nfunction hd(a,b,c,d,e){var f=d;if(0===(b&1)&&0===(b&2)&&null!==d)a:for(;;){if(null===d)return;var g=d.tag;if(3===g||4===g){var h=d.stateNode.containerInfo;if(h===e||8===h.nodeType&&h.parentNode===e)break;if(4===g)for(g=d.return;null!==g;){var k=g.tag;if(3===k||4===k)if(k=g.stateNode.containerInfo,k===e||8===k.nodeType&&k.parentNode===e)return;g=g.return}for(;null!==h;){g=Wc(h);if(null===g)return;k=g.tag;if(5===k||6===k){d=f=g;continue a}h=h.parentNode}}d=d.return}Jb(function(){var d=f,e=xb(c),g=[];\na:{var h=df.get(a);if(void 0!==h){var k=td,n=a;switch(a){case \"keypress\":if(0===od(c))break a;case \"keydown\":case \"keyup\":k=Rd;break;case \"focusin\":n=\"focus\";k=Fd;break;case \"focusout\":n=\"blur\";k=Fd;break;case \"beforeblur\":case \"afterblur\":k=Fd;break;case \"click\":if(2===c.button)break a;case \"auxclick\":case \"dblclick\":case \"mousedown\":case \"mousemove\":case \"mouseup\":case \"mouseout\":case \"mouseover\":case \"contextmenu\":k=Bd;break;case \"drag\":case \"dragend\":case \"dragenter\":case \"dragexit\":case \"dragleave\":case \"dragover\":case \"dragstart\":case \"drop\":k=\nDd;break;case \"touchcancel\":case \"touchend\":case \"touchmove\":case \"touchstart\":k=Vd;break;case $e:case af:case bf:k=Hd;break;case cf:k=Xd;break;case \"scroll\":k=vd;break;case \"wheel\":k=Zd;break;case \"copy\":case \"cut\":case \"paste\":k=Jd;break;case \"gotpointercapture\":case \"lostpointercapture\":case \"pointercancel\":case \"pointerdown\":case \"pointermove\":case \"pointerout\":case \"pointerover\":case \"pointerup\":k=Td}var t=0!==(b&4),J=!t&&\"scroll\"===a,x=t?null!==h?h+\"Capture\":null:h;t=[];for(var w=d,u;null!==\nw;){u=w;var F=u.stateNode;5===u.tag&&null!==F&&(u=F,null!==x&&(F=Kb(w,x),null!=F&&t.push(tf(w,F,u))));if(J)break;w=w.return}0<t.length&&(h=new k(h,n,null,c,e),g.push({event:h,listeners:t}))}}if(0===(b&7)){a:{h=\"mouseover\"===a||\"pointerover\"===a;k=\"mouseout\"===a||\"pointerout\"===a;if(h&&c!==wb&&(n=c.relatedTarget||c.fromElement)&&(Wc(n)||n[uf]))break a;if(k||h){h=e.window===e?e:(h=e.ownerDocument)?h.defaultView||h.parentWindow:window;if(k){if(n=c.relatedTarget||c.toElement,k=d,n=n?Wc(n):null,null!==\nn&&(J=Vb(n),n!==J||5!==n.tag&&6!==n.tag))n=null}else k=null,n=d;if(k!==n){t=Bd;F=\"onMouseLeave\";x=\"onMouseEnter\";w=\"mouse\";if(\"pointerout\"===a||\"pointerover\"===a)t=Td,F=\"onPointerLeave\",x=\"onPointerEnter\",w=\"pointer\";J=null==k?h:ue(k);u=null==n?h:ue(n);h=new t(F,w+\"leave\",k,c,e);h.target=J;h.relatedTarget=u;F=null;Wc(e)===d&&(t=new t(x,w+\"enter\",n,c,e),t.target=u,t.relatedTarget=J,F=t);J=F;if(k&&n)b:{t=k;x=n;w=0;for(u=t;u;u=vf(u))w++;u=0;for(F=x;F;F=vf(F))u++;for(;0<w-u;)t=vf(t),w--;for(;0<u-w;)x=\nvf(x),u--;for(;w--;){if(t===x||null!==x&&t===x.alternate)break b;t=vf(t);x=vf(x)}t=null}else t=null;null!==k&&wf(g,h,k,t,!1);null!==n&&null!==J&&wf(g,J,n,t,!0)}}}a:{h=d?ue(d):window;k=h.nodeName&&h.nodeName.toLowerCase();if(\"select\"===k||\"input\"===k&&\"file\"===h.type)var na=ve;else if(me(h))if(we)na=Fe;else{na=De;var xa=Ce}else(k=h.nodeName)&&\"input\"===k.toLowerCase()&&(\"checkbox\"===h.type||\"radio\"===h.type)&&(na=Ee);if(na&&(na=na(a,d))){ne(g,na,c,e);break a}xa&&xa(a,h,d);\"focusout\"===a&&(xa=h._wrapperState)&&\nxa.controlled&&\"number\"===h.type&&cb(h,\"number\",h.value)}xa=d?ue(d):window;switch(a){case \"focusin\":if(me(xa)||\"true\"===xa.contentEditable)Qe=xa,Re=d,Se=null;break;case \"focusout\":Se=Re=Qe=null;break;case \"mousedown\":Te=!0;break;case \"contextmenu\":case \"mouseup\":case \"dragend\":Te=!1;Ue(g,c,e);break;case \"selectionchange\":if(Pe)break;case \"keydown\":case \"keyup\":Ue(g,c,e)}var $a;if(ae)b:{switch(a){case \"compositionstart\":var ba=\"onCompositionStart\";break b;case \"compositionend\":ba=\"onCompositionEnd\";\nbreak b;case \"compositionupdate\":ba=\"onCompositionUpdate\";break b}ba=void 0}else ie?ge(a,c)&&(ba=\"onCompositionEnd\"):\"keydown\"===a&&229===c.keyCode&&(ba=\"onCompositionStart\");ba&&(de&&\"ko\"!==c.locale&&(ie||\"onCompositionStart\"!==ba?\"onCompositionEnd\"===ba&&ie&&($a=nd()):(kd=e,ld=\"value\"in kd?kd.value:kd.textContent,ie=!0)),xa=oe(d,ba),0<xa.length&&(ba=new Ld(ba,a,null,c,e),g.push({event:ba,listeners:xa}),$a?ba.data=$a:($a=he(c),null!==$a&&(ba.data=$a))));if($a=ce?je(a,c):ke(a,c))d=oe(d,\"onBeforeInput\"),\n0<d.length&&(e=new Ld(\"onBeforeInput\",\"beforeinput\",null,c,e),g.push({event:e,listeners:d}),e.data=$a)}se(g,b)})}function tf(a,b,c){return{instance:a,listener:b,currentTarget:c}}function oe(a,b){for(var c=b+\"Capture\",d=[];null!==a;){var e=a,f=e.stateNode;5===e.tag&&null!==f&&(e=f,f=Kb(a,c),null!=f&&d.unshift(tf(a,f,e)),f=Kb(a,b),null!=f&&d.push(tf(a,f,e)));a=a.return}return d}function vf(a){if(null===a)return null;do a=a.return;while(a&&5!==a.tag);return a?a:null}\nfunction wf(a,b,c,d,e){for(var f=b._reactName,g=[];null!==c&&c!==d;){var h=c,k=h.alternate,l=h.stateNode;if(null!==k&&k===d)break;5===h.tag&&null!==l&&(h=l,e?(k=Kb(c,f),null!=k&&g.unshift(tf(c,k,h))):e||(k=Kb(c,f),null!=k&&g.push(tf(c,k,h))));c=c.return}0!==g.length&&a.push({event:b,listeners:g})}var xf=/\\r\\n?/g,yf=/\\u0000|\\uFFFD/g;function zf(a){return(\"string\"===typeof a?a:\"\"+a).replace(xf,\"\\n\").replace(yf,\"\")}function Af(a,b,c){b=zf(b);if(zf(a)!==b&&c)throw Error(p(425));}function Bf(){}\nvar Cf=null,Df=null;function Ef(a,b){return\"textarea\"===a||\"noscript\"===a||\"string\"===typeof b.children||\"number\"===typeof b.children||\"object\"===typeof b.dangerouslySetInnerHTML&&null!==b.dangerouslySetInnerHTML&&null!=b.dangerouslySetInnerHTML.__html}\nvar Ff=\"function\"===typeof setTimeout?setTimeout:void 0,Gf=\"function\"===typeof clearTimeout?clearTimeout:void 0,Hf=\"function\"===typeof Promise?Promise:void 0,Jf=\"function\"===typeof queueMicrotask?queueMicrotask:\"undefined\"!==typeof Hf?function(a){return Hf.resolve(null).then(a).catch(If)}:Ff;function If(a){setTimeout(function(){throw a;})}\nfunction Kf(a,b){var c=b,d=0;do{var e=c.nextSibling;a.removeChild(c);if(e&&8===e.nodeType)if(c=e.data,\"/$\"===c){if(0===d){a.removeChild(e);bd(b);return}d--}else\"$\"!==c&&\"$?\"!==c&&\"$!\"!==c||d++;c=e}while(c);bd(b)}function Lf(a){for(;null!=a;a=a.nextSibling){var b=a.nodeType;if(1===b||3===b)break;if(8===b){b=a.data;if(\"$\"===b||\"$!\"===b||\"$?\"===b)break;if(\"/$\"===b)return null}}return a}\nfunction Mf(a){a=a.previousSibling;for(var b=0;a;){if(8===a.nodeType){var c=a.data;if(\"$\"===c||\"$!\"===c||\"$?\"===c){if(0===b)return a;b--}else\"/$\"===c&&b++}a=a.previousSibling}return null}var Nf=Math.random().toString(36).slice(2),Of=\"__reactFiber$\"+Nf,Pf=\"__reactProps$\"+Nf,uf=\"__reactContainer$\"+Nf,of=\"__reactEvents$\"+Nf,Qf=\"__reactListeners$\"+Nf,Rf=\"__reactHandles$\"+Nf;\nfunction Wc(a){var b=a[Of];if(b)return b;for(var c=a.parentNode;c;){if(b=c[uf]||c[Of]){c=b.alternate;if(null!==b.child||null!==c&&null!==c.child)for(a=Mf(a);null!==a;){if(c=a[Of])return c;a=Mf(a)}return b}a=c;c=a.parentNode}return null}function Cb(a){a=a[Of]||a[uf];return!a||5!==a.tag&&6!==a.tag&&13!==a.tag&&3!==a.tag?null:a}function ue(a){if(5===a.tag||6===a.tag)return a.stateNode;throw Error(p(33));}function Db(a){return a[Pf]||null}var Sf=[],Tf=-1;function Uf(a){return{current:a}}\nfunction E(a){0>Tf||(a.current=Sf[Tf],Sf[Tf]=null,Tf--)}function G(a,b){Tf++;Sf[Tf]=a.current;a.current=b}var Vf={},H=Uf(Vf),Wf=Uf(!1),Xf=Vf;function Yf(a,b){var c=a.type.contextTypes;if(!c)return Vf;var d=a.stateNode;if(d&&d.__reactInternalMemoizedUnmaskedChildContext===b)return d.__reactInternalMemoizedMaskedChildContext;var e={},f;for(f in c)e[f]=b[f];d&&(a=a.stateNode,a.__reactInternalMemoizedUnmaskedChildContext=b,a.__reactInternalMemoizedMaskedChildContext=e);return e}\nfunction Zf(a){a=a.childContextTypes;return null!==a&&void 0!==a}function $f(){E(Wf);E(H)}function ag(a,b,c){if(H.current!==Vf)throw Error(p(168));G(H,b);G(Wf,c)}function bg(a,b,c){var d=a.stateNode;b=b.childContextTypes;if(\"function\"!==typeof d.getChildContext)return c;d=d.getChildContext();for(var e in d)if(!(e in b))throw Error(p(108,Ra(a)||\"Unknown\",e));return A({},c,d)}\nfunction cg(a){a=(a=a.stateNode)&&a.__reactInternalMemoizedMergedChildContext||Vf;Xf=H.current;G(H,a);G(Wf,Wf.current);return!0}function dg(a,b,c){var d=a.stateNode;if(!d)throw Error(p(169));c?(a=bg(a,b,Xf),d.__reactInternalMemoizedMergedChildContext=a,E(Wf),E(H),G(H,a)):E(Wf);G(Wf,c)}var eg=null,fg=!1,gg=!1;function hg(a){null===eg?eg=[a]:eg.push(a)}function ig(a){fg=!0;hg(a)}\nfunction jg(){if(!gg&&null!==eg){gg=!0;var a=0,b=C;try{var c=eg;for(C=1;a<c.length;a++){var d=c[a];do d=d(!0);while(null!==d)}eg=null;fg=!1}catch(e){throw null!==eg&&(eg=eg.slice(a+1)),ac(fc,jg),e;}finally{C=b,gg=!1}}return null}var kg=[],lg=0,mg=null,ng=0,og=[],pg=0,qg=null,rg=1,sg=\"\";function tg(a,b){kg[lg++]=ng;kg[lg++]=mg;mg=a;ng=b}\nfunction ug(a,b,c){og[pg++]=rg;og[pg++]=sg;og[pg++]=qg;qg=a;var d=rg;a=sg;var e=32-oc(d)-1;d&=~(1<<e);c+=1;var f=32-oc(b)+e;if(30<f){var g=e-e%5;f=(d&(1<<g)-1).toString(32);d>>=g;e-=g;rg=1<<32-oc(b)+e|c<<e|d;sg=f+a}else rg=1<<f|c<<e|d,sg=a}function vg(a){null!==a.return&&(tg(a,1),ug(a,1,0))}function wg(a){for(;a===mg;)mg=kg[--lg],kg[lg]=null,ng=kg[--lg],kg[lg]=null;for(;a===qg;)qg=og[--pg],og[pg]=null,sg=og[--pg],og[pg]=null,rg=og[--pg],og[pg]=null}var xg=null,yg=null,I=!1,zg=null;\nfunction Ag(a,b){var c=Bg(5,null,null,0);c.elementType=\"DELETED\";c.stateNode=b;c.return=a;b=a.deletions;null===b?(a.deletions=[c],a.flags|=16):b.push(c)}\nfunction Cg(a,b){switch(a.tag){case 5:var c=a.type;b=1!==b.nodeType||c.toLowerCase()!==b.nodeName.toLowerCase()?null:b;return null!==b?(a.stateNode=b,xg=a,yg=Lf(b.firstChild),!0):!1;case 6:return b=\"\"===a.pendingProps||3!==b.nodeType?null:b,null!==b?(a.stateNode=b,xg=a,yg=null,!0):!1;case 13:return b=8!==b.nodeType?null:b,null!==b?(c=null!==qg?{id:rg,overflow:sg}:null,a.memoizedState={dehydrated:b,treeContext:c,retryLane:1073741824},c=Bg(18,null,null,0),c.stateNode=b,c.return=a,a.child=c,xg=a,yg=\nnull,!0):!1;default:return!1}}function Dg(a){return 0!==(a.mode&1)&&0===(a.flags&128)}function Eg(a){if(I){var b=yg;if(b){var c=b;if(!Cg(a,b)){if(Dg(a))throw Error(p(418));b=Lf(c.nextSibling);var d=xg;b&&Cg(a,b)?Ag(d,c):(a.flags=a.flags&-4097|2,I=!1,xg=a)}}else{if(Dg(a))throw Error(p(418));a.flags=a.flags&-4097|2;I=!1;xg=a}}}function Fg(a){for(a=a.return;null!==a&&5!==a.tag&&3!==a.tag&&13!==a.tag;)a=a.return;xg=a}\nfunction Gg(a){if(a!==xg)return!1;if(!I)return Fg(a),I=!0,!1;var b;(b=3!==a.tag)&&!(b=5!==a.tag)&&(b=a.type,b=\"head\"!==b&&\"body\"!==b&&!Ef(a.type,a.memoizedProps));if(b&&(b=yg)){if(Dg(a))throw Hg(),Error(p(418));for(;b;)Ag(a,b),b=Lf(b.nextSibling)}Fg(a);if(13===a.tag){a=a.memoizedState;a=null!==a?a.dehydrated:null;if(!a)throw Error(p(317));a:{a=a.nextSibling;for(b=0;a;){if(8===a.nodeType){var c=a.data;if(\"/$\"===c){if(0===b){yg=Lf(a.nextSibling);break a}b--}else\"$\"!==c&&\"$!\"!==c&&\"$?\"!==c||b++}a=a.nextSibling}yg=\nnull}}else yg=xg?Lf(a.stateNode.nextSibling):null;return!0}function Hg(){for(var a=yg;a;)a=Lf(a.nextSibling)}function Ig(){yg=xg=null;I=!1}function Jg(a){null===zg?zg=[a]:zg.push(a)}var Kg=ua.ReactCurrentBatchConfig;\nfunction Lg(a,b,c){a=c.ref;if(null!==a&&\"function\"!==typeof a&&\"object\"!==typeof a){if(c._owner){c=c._owner;if(c){if(1!==c.tag)throw Error(p(309));var d=c.stateNode}if(!d)throw Error(p(147,a));var e=d,f=\"\"+a;if(null!==b&&null!==b.ref&&\"function\"===typeof b.ref&&b.ref._stringRef===f)return b.ref;b=function(a){var b=e.refs;null===a?delete b[f]:b[f]=a};b._stringRef=f;return b}if(\"string\"!==typeof a)throw Error(p(284));if(!c._owner)throw Error(p(290,a));}return a}\nfunction Mg(a,b){a=Object.prototype.toString.call(b);throw Error(p(31,\"[object Object]\"===a?\"object with keys {\"+Object.keys(b).join(\", \")+\"}\":a));}function Ng(a){var b=a._init;return b(a._payload)}\nfunction Og(a){function b(b,c){if(a){var d=b.deletions;null===d?(b.deletions=[c],b.flags|=16):d.push(c)}}function c(c,d){if(!a)return null;for(;null!==d;)b(c,d),d=d.sibling;return null}function d(a,b){for(a=new Map;null!==b;)null!==b.key?a.set(b.key,b):a.set(b.index,b),b=b.sibling;return a}function e(a,b){a=Pg(a,b);a.index=0;a.sibling=null;return a}function f(b,c,d){b.index=d;if(!a)return b.flags|=1048576,c;d=b.alternate;if(null!==d)return d=d.index,d<c?(b.flags|=2,c):d;b.flags|=2;return c}function g(b){a&&\nnull===b.alternate&&(b.flags|=2);return b}function h(a,b,c,d){if(null===b||6!==b.tag)return b=Qg(c,a.mode,d),b.return=a,b;b=e(b,c);b.return=a;return b}function k(a,b,c,d){var f=c.type;if(f===ya)return m(a,b,c.props.children,d,c.key);if(null!==b&&(b.elementType===f||\"object\"===typeof f&&null!==f&&f.$$typeof===Ha&&Ng(f)===b.type))return d=e(b,c.props),d.ref=Lg(a,b,c),d.return=a,d;d=Rg(c.type,c.key,c.props,null,a.mode,d);d.ref=Lg(a,b,c);d.return=a;return d}function l(a,b,c,d){if(null===b||4!==b.tag||\nb.stateNode.containerInfo!==c.containerInfo||b.stateNode.implementation!==c.implementation)return b=Sg(c,a.mode,d),b.return=a,b;b=e(b,c.children||[]);b.return=a;return b}function m(a,b,c,d,f){if(null===b||7!==b.tag)return b=Tg(c,a.mode,d,f),b.return=a,b;b=e(b,c);b.return=a;return b}function q(a,b,c){if(\"string\"===typeof b&&\"\"!==b||\"number\"===typeof b)return b=Qg(\"\"+b,a.mode,c),b.return=a,b;if(\"object\"===typeof b&&null!==b){switch(b.$$typeof){case va:return c=Rg(b.type,b.key,b.props,null,a.mode,c),\nc.ref=Lg(a,null,b),c.return=a,c;case wa:return b=Sg(b,a.mode,c),b.return=a,b;case Ha:var d=b._init;return q(a,d(b._payload),c)}if(eb(b)||Ka(b))return b=Tg(b,a.mode,c,null),b.return=a,b;Mg(a,b)}return null}function r(a,b,c,d){var e=null!==b?b.key:null;if(\"string\"===typeof c&&\"\"!==c||\"number\"===typeof c)return null!==e?null:h(a,b,\"\"+c,d);if(\"object\"===typeof c&&null!==c){switch(c.$$typeof){case va:return c.key===e?k(a,b,c,d):null;case wa:return c.key===e?l(a,b,c,d):null;case Ha:return e=c._init,r(a,\nb,e(c._payload),d)}if(eb(c)||Ka(c))return null!==e?null:m(a,b,c,d,null);Mg(a,c)}return null}function y(a,b,c,d,e){if(\"string\"===typeof d&&\"\"!==d||\"number\"===typeof d)return a=a.get(c)||null,h(b,a,\"\"+d,e);if(\"object\"===typeof d&&null!==d){switch(d.$$typeof){case va:return a=a.get(null===d.key?c:d.key)||null,k(b,a,d,e);case wa:return a=a.get(null===d.key?c:d.key)||null,l(b,a,d,e);case Ha:var f=d._init;return y(a,b,c,f(d._payload),e)}if(eb(d)||Ka(d))return a=a.get(c)||null,m(b,a,d,e,null);Mg(b,d)}return null}\nfunction n(e,g,h,k){for(var l=null,m=null,u=g,w=g=0,x=null;null!==u&&w<h.length;w++){u.index>w?(x=u,u=null):x=u.sibling;var n=r(e,u,h[w],k);if(null===n){null===u&&(u=x);break}a&&u&&null===n.alternate&&b(e,u);g=f(n,g,w);null===m?l=n:m.sibling=n;m=n;u=x}if(w===h.length)return c(e,u),I&&tg(e,w),l;if(null===u){for(;w<h.length;w++)u=q(e,h[w],k),null!==u&&(g=f(u,g,w),null===m?l=u:m.sibling=u,m=u);I&&tg(e,w);return l}for(u=d(e,u);w<h.length;w++)x=y(u,e,w,h[w],k),null!==x&&(a&&null!==x.alternate&&u.delete(null===\nx.key?w:x.key),g=f(x,g,w),null===m?l=x:m.sibling=x,m=x);a&&u.forEach(function(a){return b(e,a)});I&&tg(e,w);return l}function t(e,g,h,k){var l=Ka(h);if(\"function\"!==typeof l)throw Error(p(150));h=l.call(h);if(null==h)throw Error(p(151));for(var u=l=null,m=g,w=g=0,x=null,n=h.next();null!==m&&!n.done;w++,n=h.next()){m.index>w?(x=m,m=null):x=m.sibling;var t=r(e,m,n.value,k);if(null===t){null===m&&(m=x);break}a&&m&&null===t.alternate&&b(e,m);g=f(t,g,w);null===u?l=t:u.sibling=t;u=t;m=x}if(n.done)return c(e,\nm),I&&tg(e,w),l;if(null===m){for(;!n.done;w++,n=h.next())n=q(e,n.value,k),null!==n&&(g=f(n,g,w),null===u?l=n:u.sibling=n,u=n);I&&tg(e,w);return l}for(m=d(e,m);!n.done;w++,n=h.next())n=y(m,e,w,n.value,k),null!==n&&(a&&null!==n.alternate&&m.delete(null===n.key?w:n.key),g=f(n,g,w),null===u?l=n:u.sibling=n,u=n);a&&m.forEach(function(a){return b(e,a)});I&&tg(e,w);return l}function J(a,d,f,h){\"object\"===typeof f&&null!==f&&f.type===ya&&null===f.key&&(f=f.props.children);if(\"object\"===typeof f&&null!==f){switch(f.$$typeof){case va:a:{for(var k=\nf.key,l=d;null!==l;){if(l.key===k){k=f.type;if(k===ya){if(7===l.tag){c(a,l.sibling);d=e(l,f.props.children);d.return=a;a=d;break a}}else if(l.elementType===k||\"object\"===typeof k&&null!==k&&k.$$typeof===Ha&&Ng(k)===l.type){c(a,l.sibling);d=e(l,f.props);d.ref=Lg(a,l,f);d.return=a;a=d;break a}c(a,l);break}else b(a,l);l=l.sibling}f.type===ya?(d=Tg(f.props.children,a.mode,h,f.key),d.return=a,a=d):(h=Rg(f.type,f.key,f.props,null,a.mode,h),h.ref=Lg(a,d,f),h.return=a,a=h)}return g(a);case wa:a:{for(l=f.key;null!==\nd;){if(d.key===l)if(4===d.tag&&d.stateNode.containerInfo===f.containerInfo&&d.stateNode.implementation===f.implementation){c(a,d.sibling);d=e(d,f.children||[]);d.return=a;a=d;break a}else{c(a,d);break}else b(a,d);d=d.sibling}d=Sg(f,a.mode,h);d.return=a;a=d}return g(a);case Ha:return l=f._init,J(a,d,l(f._payload),h)}if(eb(f))return n(a,d,f,h);if(Ka(f))return t(a,d,f,h);Mg(a,f)}return\"string\"===typeof f&&\"\"!==f||\"number\"===typeof f?(f=\"\"+f,null!==d&&6===d.tag?(c(a,d.sibling),d=e(d,f),d.return=a,a=d):\n(c(a,d),d=Qg(f,a.mode,h),d.return=a,a=d),g(a)):c(a,d)}return J}var Ug=Og(!0),Vg=Og(!1),Wg=Uf(null),Xg=null,Yg=null,Zg=null;function $g(){Zg=Yg=Xg=null}function ah(a){var b=Wg.current;E(Wg);a._currentValue=b}function bh(a,b,c){for(;null!==a;){var d=a.alternate;(a.childLanes&b)!==b?(a.childLanes|=b,null!==d&&(d.childLanes|=b)):null!==d&&(d.childLanes&b)!==b&&(d.childLanes|=b);if(a===c)break;a=a.return}}\nfunction ch(a,b){Xg=a;Zg=Yg=null;a=a.dependencies;null!==a&&null!==a.firstContext&&(0!==(a.lanes&b)&&(dh=!0),a.firstContext=null)}function eh(a){var b=a._currentValue;if(Zg!==a)if(a={context:a,memoizedValue:b,next:null},null===Yg){if(null===Xg)throw Error(p(308));Yg=a;Xg.dependencies={lanes:0,firstContext:a}}else Yg=Yg.next=a;return b}var fh=null;function gh(a){null===fh?fh=[a]:fh.push(a)}\nfunction hh(a,b,c,d){var e=b.interleaved;null===e?(c.next=c,gh(b)):(c.next=e.next,e.next=c);b.interleaved=c;return ih(a,d)}function ih(a,b){a.lanes|=b;var c=a.alternate;null!==c&&(c.lanes|=b);c=a;for(a=a.return;null!==a;)a.childLanes|=b,c=a.alternate,null!==c&&(c.childLanes|=b),c=a,a=a.return;return 3===c.tag?c.stateNode:null}var jh=!1;function kh(a){a.updateQueue={baseState:a.memoizedState,firstBaseUpdate:null,lastBaseUpdate:null,shared:{pending:null,interleaved:null,lanes:0},effects:null}}\nfunction lh(a,b){a=a.updateQueue;b.updateQueue===a&&(b.updateQueue={baseState:a.baseState,firstBaseUpdate:a.firstBaseUpdate,lastBaseUpdate:a.lastBaseUpdate,shared:a.shared,effects:a.effects})}function mh(a,b){return{eventTime:a,lane:b,tag:0,payload:null,callback:null,next:null}}\nfunction nh(a,b,c){var d=a.updateQueue;if(null===d)return null;d=d.shared;if(0!==(K&2)){var e=d.pending;null===e?b.next=b:(b.next=e.next,e.next=b);d.pending=b;return ih(a,c)}e=d.interleaved;null===e?(b.next=b,gh(d)):(b.next=e.next,e.next=b);d.interleaved=b;return ih(a,c)}function oh(a,b,c){b=b.updateQueue;if(null!==b&&(b=b.shared,0!==(c&4194240))){var d=b.lanes;d&=a.pendingLanes;c|=d;b.lanes=c;Cc(a,c)}}\nfunction ph(a,b){var c=a.updateQueue,d=a.alternate;if(null!==d&&(d=d.updateQueue,c===d)){var e=null,f=null;c=c.firstBaseUpdate;if(null!==c){do{var g={eventTime:c.eventTime,lane:c.lane,tag:c.tag,payload:c.payload,callback:c.callback,next:null};null===f?e=f=g:f=f.next=g;c=c.next}while(null!==c);null===f?e=f=b:f=f.next=b}else e=f=b;c={baseState:d.baseState,firstBaseUpdate:e,lastBaseUpdate:f,shared:d.shared,effects:d.effects};a.updateQueue=c;return}a=c.lastBaseUpdate;null===a?c.firstBaseUpdate=b:a.next=\nb;c.lastBaseUpdate=b}\nfunction qh(a,b,c,d){var e=a.updateQueue;jh=!1;var f=e.firstBaseUpdate,g=e.lastBaseUpdate,h=e.shared.pending;if(null!==h){e.shared.pending=null;var k=h,l=k.next;k.next=null;null===g?f=l:g.next=l;g=k;var m=a.alternate;null!==m&&(m=m.updateQueue,h=m.lastBaseUpdate,h!==g&&(null===h?m.firstBaseUpdate=l:h.next=l,m.lastBaseUpdate=k))}if(null!==f){var q=e.baseState;g=0;m=l=k=null;h=f;do{var r=h.lane,y=h.eventTime;if((d&r)===r){null!==m&&(m=m.next={eventTime:y,lane:0,tag:h.tag,payload:h.payload,callback:h.callback,\nnext:null});a:{var n=a,t=h;r=b;y=c;switch(t.tag){case 1:n=t.payload;if(\"function\"===typeof n){q=n.call(y,q,r);break a}q=n;break a;case 3:n.flags=n.flags&-65537|128;case 0:n=t.payload;r=\"function\"===typeof n?n.call(y,q,r):n;if(null===r||void 0===r)break a;q=A({},q,r);break a;case 2:jh=!0}}null!==h.callback&&0!==h.lane&&(a.flags|=64,r=e.effects,null===r?e.effects=[h]:r.push(h))}else y={eventTime:y,lane:r,tag:h.tag,payload:h.payload,callback:h.callback,next:null},null===m?(l=m=y,k=q):m=m.next=y,g|=r;\nh=h.next;if(null===h)if(h=e.shared.pending,null===h)break;else r=h,h=r.next,r.next=null,e.lastBaseUpdate=r,e.shared.pending=null}while(1);null===m&&(k=q);e.baseState=k;e.firstBaseUpdate=l;e.lastBaseUpdate=m;b=e.shared.interleaved;if(null!==b){e=b;do g|=e.lane,e=e.next;while(e!==b)}else null===f&&(e.shared.lanes=0);rh|=g;a.lanes=g;a.memoizedState=q}}\nfunction sh(a,b,c){a=b.effects;b.effects=null;if(null!==a)for(b=0;b<a.length;b++){var d=a[b],e=d.callback;if(null!==e){d.callback=null;d=c;if(\"function\"!==typeof e)throw Error(p(191,e));e.call(d)}}}var th={},uh=Uf(th),vh=Uf(th),wh=Uf(th);function xh(a){if(a===th)throw Error(p(174));return a}\nfunction yh(a,b){G(wh,b);G(vh,a);G(uh,th);a=b.nodeType;switch(a){case 9:case 11:b=(b=b.documentElement)?b.namespaceURI:lb(null,\"\");break;default:a=8===a?b.parentNode:b,b=a.namespaceURI||null,a=a.tagName,b=lb(b,a)}E(uh);G(uh,b)}function zh(){E(uh);E(vh);E(wh)}function Ah(a){xh(wh.current);var b=xh(uh.current);var c=lb(b,a.type);b!==c&&(G(vh,a),G(uh,c))}function Bh(a){vh.current===a&&(E(uh),E(vh))}var L=Uf(0);\nfunction Ch(a){for(var b=a;null!==b;){if(13===b.tag){var c=b.memoizedState;if(null!==c&&(c=c.dehydrated,null===c||\"$?\"===c.data||\"$!\"===c.data))return b}else if(19===b.tag&&void 0!==b.memoizedProps.revealOrder){if(0!==(b.flags&128))return b}else if(null!==b.child){b.child.return=b;b=b.child;continue}if(b===a)break;for(;null===b.sibling;){if(null===b.return||b.return===a)return null;b=b.return}b.sibling.return=b.return;b=b.sibling}return null}var Dh=[];\nfunction Eh(){for(var a=0;a<Dh.length;a++)Dh[a]._workInProgressVersionPrimary=null;Dh.length=0}var Fh=ua.ReactCurrentDispatcher,Gh=ua.ReactCurrentBatchConfig,Hh=0,M=null,N=null,O=null,Ih=!1,Jh=!1,Kh=0,Lh=0;function P(){throw Error(p(321));}function Mh(a,b){if(null===b)return!1;for(var c=0;c<b.length&&c<a.length;c++)if(!He(a[c],b[c]))return!1;return!0}\nfunction Nh(a,b,c,d,e,f){Hh=f;M=b;b.memoizedState=null;b.updateQueue=null;b.lanes=0;Fh.current=null===a||null===a.memoizedState?Oh:Ph;a=c(d,e);if(Jh){f=0;do{Jh=!1;Kh=0;if(25<=f)throw Error(p(301));f+=1;O=N=null;b.updateQueue=null;Fh.current=Qh;a=c(d,e)}while(Jh)}Fh.current=Rh;b=null!==N&&null!==N.next;Hh=0;O=N=M=null;Ih=!1;if(b)throw Error(p(300));return a}function Sh(){var a=0!==Kh;Kh=0;return a}\nfunction Th(){var a={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};null===O?M.memoizedState=O=a:O=O.next=a;return O}function Uh(){if(null===N){var a=M.alternate;a=null!==a?a.memoizedState:null}else a=N.next;var b=null===O?M.memoizedState:O.next;if(null!==b)O=b,N=a;else{if(null===a)throw Error(p(310));N=a;a={memoizedState:N.memoizedState,baseState:N.baseState,baseQueue:N.baseQueue,queue:N.queue,next:null};null===O?M.memoizedState=O=a:O=O.next=a}return O}\nfunction Vh(a,b){return\"function\"===typeof b?b(a):b}\nfunction Wh(a){var b=Uh(),c=b.queue;if(null===c)throw Error(p(311));c.lastRenderedReducer=a;var d=N,e=d.baseQueue,f=c.pending;if(null!==f){if(null!==e){var g=e.next;e.next=f.next;f.next=g}d.baseQueue=e=f;c.pending=null}if(null!==e){f=e.next;d=d.baseState;var h=g=null,k=null,l=f;do{var m=l.lane;if((Hh&m)===m)null!==k&&(k=k.next={lane:0,action:l.action,hasEagerState:l.hasEagerState,eagerState:l.eagerState,next:null}),d=l.hasEagerState?l.eagerState:a(d,l.action);else{var q={lane:m,action:l.action,hasEagerState:l.hasEagerState,\neagerState:l.eagerState,next:null};null===k?(h=k=q,g=d):k=k.next=q;M.lanes|=m;rh|=m}l=l.next}while(null!==l&&l!==f);null===k?g=d:k.next=h;He(d,b.memoizedState)||(dh=!0);b.memoizedState=d;b.baseState=g;b.baseQueue=k;c.lastRenderedState=d}a=c.interleaved;if(null!==a){e=a;do f=e.lane,M.lanes|=f,rh|=f,e=e.next;while(e!==a)}else null===e&&(c.lanes=0);return[b.memoizedState,c.dispatch]}\nfunction Xh(a){var b=Uh(),c=b.queue;if(null===c)throw Error(p(311));c.lastRenderedReducer=a;var d=c.dispatch,e=c.pending,f=b.memoizedState;if(null!==e){c.pending=null;var g=e=e.next;do f=a(f,g.action),g=g.next;while(g!==e);He(f,b.memoizedState)||(dh=!0);b.memoizedState=f;null===b.baseQueue&&(b.baseState=f);c.lastRenderedState=f}return[f,d]}function Yh(){}\nfunction Zh(a,b){var c=M,d=Uh(),e=b(),f=!He(d.memoizedState,e);f&&(d.memoizedState=e,dh=!0);d=d.queue;$h(ai.bind(null,c,d,a),[a]);if(d.getSnapshot!==b||f||null!==O&&O.memoizedState.tag&1){c.flags|=2048;bi(9,ci.bind(null,c,d,e,b),void 0,null);if(null===Q)throw Error(p(349));0!==(Hh&30)||di(c,b,e)}return e}function di(a,b,c){a.flags|=16384;a={getSnapshot:b,value:c};b=M.updateQueue;null===b?(b={lastEffect:null,stores:null},M.updateQueue=b,b.stores=[a]):(c=b.stores,null===c?b.stores=[a]:c.push(a))}\nfunction ci(a,b,c,d){b.value=c;b.getSnapshot=d;ei(b)&&fi(a)}function ai(a,b,c){return c(function(){ei(b)&&fi(a)})}function ei(a){var b=a.getSnapshot;a=a.value;try{var c=b();return!He(a,c)}catch(d){return!0}}function fi(a){var b=ih(a,1);null!==b&&gi(b,a,1,-1)}\nfunction hi(a){var b=Th();\"function\"===typeof a&&(a=a());b.memoizedState=b.baseState=a;a={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:Vh,lastRenderedState:a};b.queue=a;a=a.dispatch=ii.bind(null,M,a);return[b.memoizedState,a]}\nfunction bi(a,b,c,d){a={tag:a,create:b,destroy:c,deps:d,next:null};b=M.updateQueue;null===b?(b={lastEffect:null,stores:null},M.updateQueue=b,b.lastEffect=a.next=a):(c=b.lastEffect,null===c?b.lastEffect=a.next=a:(d=c.next,c.next=a,a.next=d,b.lastEffect=a));return a}function ji(){return Uh().memoizedState}function ki(a,b,c,d){var e=Th();M.flags|=a;e.memoizedState=bi(1|b,c,void 0,void 0===d?null:d)}\nfunction li(a,b,c,d){var e=Uh();d=void 0===d?null:d;var f=void 0;if(null!==N){var g=N.memoizedState;f=g.destroy;if(null!==d&&Mh(d,g.deps)){e.memoizedState=bi(b,c,f,d);return}}M.flags|=a;e.memoizedState=bi(1|b,c,f,d)}function mi(a,b){return ki(8390656,8,a,b)}function $h(a,b){return li(2048,8,a,b)}function ni(a,b){return li(4,2,a,b)}function oi(a,b){return li(4,4,a,b)}\nfunction pi(a,b){if(\"function\"===typeof b)return a=a(),b(a),function(){b(null)};if(null!==b&&void 0!==b)return a=a(),b.current=a,function(){b.current=null}}function qi(a,b,c){c=null!==c&&void 0!==c?c.concat([a]):null;return li(4,4,pi.bind(null,b,a),c)}function ri(){}function si(a,b){var c=Uh();b=void 0===b?null:b;var d=c.memoizedState;if(null!==d&&null!==b&&Mh(b,d[1]))return d[0];c.memoizedState=[a,b];return a}\nfunction ti(a,b){var c=Uh();b=void 0===b?null:b;var d=c.memoizedState;if(null!==d&&null!==b&&Mh(b,d[1]))return d[0];a=a();c.memoizedState=[a,b];return a}function ui(a,b,c){if(0===(Hh&21))return a.baseState&&(a.baseState=!1,dh=!0),a.memoizedState=c;He(c,b)||(c=yc(),M.lanes|=c,rh|=c,a.baseState=!0);return b}function vi(a,b){var c=C;C=0!==c&&4>c?c:4;a(!0);var d=Gh.transition;Gh.transition={};try{a(!1),b()}finally{C=c,Gh.transition=d}}function wi(){return Uh().memoizedState}\nfunction xi(a,b,c){var d=yi(a);c={lane:d,action:c,hasEagerState:!1,eagerState:null,next:null};if(zi(a))Ai(b,c);else if(c=hh(a,b,c,d),null!==c){var e=R();gi(c,a,d,e);Bi(c,b,d)}}\nfunction ii(a,b,c){var d=yi(a),e={lane:d,action:c,hasEagerState:!1,eagerState:null,next:null};if(zi(a))Ai(b,e);else{var f=a.alternate;if(0===a.lanes&&(null===f||0===f.lanes)&&(f=b.lastRenderedReducer,null!==f))try{var g=b.lastRenderedState,h=f(g,c);e.hasEagerState=!0;e.eagerState=h;if(He(h,g)){var k=b.interleaved;null===k?(e.next=e,gh(b)):(e.next=k.next,k.next=e);b.interleaved=e;return}}catch(l){}finally{}c=hh(a,b,e,d);null!==c&&(e=R(),gi(c,a,d,e),Bi(c,b,d))}}\nfunction zi(a){var b=a.alternate;return a===M||null!==b&&b===M}function Ai(a,b){Jh=Ih=!0;var c=a.pending;null===c?b.next=b:(b.next=c.next,c.next=b);a.pending=b}function Bi(a,b,c){if(0!==(c&4194240)){var d=b.lanes;d&=a.pendingLanes;c|=d;b.lanes=c;Cc(a,c)}}\nvar Rh={readContext:eh,useCallback:P,useContext:P,useEffect:P,useImperativeHandle:P,useInsertionEffect:P,useLayoutEffect:P,useMemo:P,useReducer:P,useRef:P,useState:P,useDebugValue:P,useDeferredValue:P,useTransition:P,useMutableSource:P,useSyncExternalStore:P,useId:P,unstable_isNewReconciler:!1},Oh={readContext:eh,useCallback:function(a,b){Th().memoizedState=[a,void 0===b?null:b];return a},useContext:eh,useEffect:mi,useImperativeHandle:function(a,b,c){c=null!==c&&void 0!==c?c.concat([a]):null;return ki(4194308,\n4,pi.bind(null,b,a),c)},useLayoutEffect:function(a,b){return ki(4194308,4,a,b)},useInsertionEffect:function(a,b){return ki(4,2,a,b)},useMemo:function(a,b){var c=Th();b=void 0===b?null:b;a=a();c.memoizedState=[a,b];return a},useReducer:function(a,b,c){var d=Th();b=void 0!==c?c(b):b;d.memoizedState=d.baseState=b;a={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:a,lastRenderedState:b};d.queue=a;a=a.dispatch=xi.bind(null,M,a);return[d.memoizedState,a]},useRef:function(a){var b=\nTh();a={current:a};return b.memoizedState=a},useState:hi,useDebugValue:ri,useDeferredValue:function(a){return Th().memoizedState=a},useTransition:function(){var a=hi(!1),b=a[0];a=vi.bind(null,a[1]);Th().memoizedState=a;return[b,a]},useMutableSource:function(){},useSyncExternalStore:function(a,b,c){var d=M,e=Th();if(I){if(void 0===c)throw Error(p(407));c=c()}else{c=b();if(null===Q)throw Error(p(349));0!==(Hh&30)||di(d,b,c)}e.memoizedState=c;var f={value:c,getSnapshot:b};e.queue=f;mi(ai.bind(null,d,\nf,a),[a]);d.flags|=2048;bi(9,ci.bind(null,d,f,c,b),void 0,null);return c},useId:function(){var a=Th(),b=Q.identifierPrefix;if(I){var c=sg;var d=rg;c=(d&~(1<<32-oc(d)-1)).toString(32)+c;b=\":\"+b+\"R\"+c;c=Kh++;0<c&&(b+=\"H\"+c.toString(32));b+=\":\"}else c=Lh++,b=\":\"+b+\"r\"+c.toString(32)+\":\";return a.memoizedState=b},unstable_isNewReconciler:!1},Ph={readContext:eh,useCallback:si,useContext:eh,useEffect:$h,useImperativeHandle:qi,useInsertionEffect:ni,useLayoutEffect:oi,useMemo:ti,useReducer:Wh,useRef:ji,useState:function(){return Wh(Vh)},\nuseDebugValue:ri,useDeferredValue:function(a){var b=Uh();return ui(b,N.memoizedState,a)},useTransition:function(){var a=Wh(Vh)[0],b=Uh().memoizedState;return[a,b]},useMutableSource:Yh,useSyncExternalStore:Zh,useId:wi,unstable_isNewReconciler:!1},Qh={readContext:eh,useCallback:si,useContext:eh,useEffect:$h,useImperativeHandle:qi,useInsertionEffect:ni,useLayoutEffect:oi,useMemo:ti,useReducer:Xh,useRef:ji,useState:function(){return Xh(Vh)},useDebugValue:ri,useDeferredValue:function(a){var b=Uh();return null===\nN?b.memoizedState=a:ui(b,N.memoizedState,a)},useTransition:function(){var a=Xh(Vh)[0],b=Uh().memoizedState;return[a,b]},useMutableSource:Yh,useSyncExternalStore:Zh,useId:wi,unstable_isNewReconciler:!1};function Ci(a,b){if(a&&a.defaultProps){b=A({},b);a=a.defaultProps;for(var c in a)void 0===b[c]&&(b[c]=a[c]);return b}return b}function Di(a,b,c,d){b=a.memoizedState;c=c(d,b);c=null===c||void 0===c?b:A({},b,c);a.memoizedState=c;0===a.lanes&&(a.updateQueue.baseState=c)}\nvar Ei={isMounted:function(a){return(a=a._reactInternals)?Vb(a)===a:!1},enqueueSetState:function(a,b,c){a=a._reactInternals;var d=R(),e=yi(a),f=mh(d,e);f.payload=b;void 0!==c&&null!==c&&(f.callback=c);b=nh(a,f,e);null!==b&&(gi(b,a,e,d),oh(b,a,e))},enqueueReplaceState:function(a,b,c){a=a._reactInternals;var d=R(),e=yi(a),f=mh(d,e);f.tag=1;f.payload=b;void 0!==c&&null!==c&&(f.callback=c);b=nh(a,f,e);null!==b&&(gi(b,a,e,d),oh(b,a,e))},enqueueForceUpdate:function(a,b){a=a._reactInternals;var c=R(),d=\nyi(a),e=mh(c,d);e.tag=2;void 0!==b&&null!==b&&(e.callback=b);b=nh(a,e,d);null!==b&&(gi(b,a,d,c),oh(b,a,d))}};function Fi(a,b,c,d,e,f,g){a=a.stateNode;return\"function\"===typeof a.shouldComponentUpdate?a.shouldComponentUpdate(d,f,g):b.prototype&&b.prototype.isPureReactComponent?!Ie(c,d)||!Ie(e,f):!0}\nfunction Gi(a,b,c){var d=!1,e=Vf;var f=b.contextType;\"object\"===typeof f&&null!==f?f=eh(f):(e=Zf(b)?Xf:H.current,d=b.contextTypes,f=(d=null!==d&&void 0!==d)?Yf(a,e):Vf);b=new b(c,f);a.memoizedState=null!==b.state&&void 0!==b.state?b.state:null;b.updater=Ei;a.stateNode=b;b._reactInternals=a;d&&(a=a.stateNode,a.__reactInternalMemoizedUnmaskedChildContext=e,a.__reactInternalMemoizedMaskedChildContext=f);return b}\nfunction Hi(a,b,c,d){a=b.state;\"function\"===typeof b.componentWillReceiveProps&&b.componentWillReceiveProps(c,d);\"function\"===typeof b.UNSAFE_componentWillReceiveProps&&b.UNSAFE_componentWillReceiveProps(c,d);b.state!==a&&Ei.enqueueReplaceState(b,b.state,null)}\nfunction Ii(a,b,c,d){var e=a.stateNode;e.props=c;e.state=a.memoizedState;e.refs={};kh(a);var f=b.contextType;\"object\"===typeof f&&null!==f?e.context=eh(f):(f=Zf(b)?Xf:H.current,e.context=Yf(a,f));e.state=a.memoizedState;f=b.getDerivedStateFromProps;\"function\"===typeof f&&(Di(a,b,f,c),e.state=a.memoizedState);\"function\"===typeof b.getDerivedStateFromProps||\"function\"===typeof e.getSnapshotBeforeUpdate||\"function\"!==typeof e.UNSAFE_componentWillMount&&\"function\"!==typeof e.componentWillMount||(b=e.state,\n\"function\"===typeof e.componentWillMount&&e.componentWillMount(),\"function\"===typeof e.UNSAFE_componentWillMount&&e.UNSAFE_componentWillMount(),b!==e.state&&Ei.enqueueReplaceState(e,e.state,null),qh(a,c,e,d),e.state=a.memoizedState);\"function\"===typeof e.componentDidMount&&(a.flags|=4194308)}function Ji(a,b){try{var c=\"\",d=b;do c+=Pa(d),d=d.return;while(d);var e=c}catch(f){e=\"\\nError generating stack: \"+f.message+\"\\n\"+f.stack}return{value:a,source:b,stack:e,digest:null}}\nfunction Ki(a,b,c){return{value:a,source:null,stack:null!=c?c:null,digest:null!=b?b:null}}function Li(a,b){try{console.error(b.value)}catch(c){setTimeout(function(){throw c;})}}var Mi=\"function\"===typeof WeakMap?WeakMap:Map;function Ni(a,b,c){c=mh(-1,c);c.tag=3;c.payload={element:null};var d=b.value;c.callback=function(){Oi||(Oi=!0,Pi=d);Li(a,b)};return c}\nfunction Qi(a,b,c){c=mh(-1,c);c.tag=3;var d=a.type.getDerivedStateFromError;if(\"function\"===typeof d){var e=b.value;c.payload=function(){return d(e)};c.callback=function(){Li(a,b)}}var f=a.stateNode;null!==f&&\"function\"===typeof f.componentDidCatch&&(c.callback=function(){Li(a,b);\"function\"!==typeof d&&(null===Ri?Ri=new Set([this]):Ri.add(this));var c=b.stack;this.componentDidCatch(b.value,{componentStack:null!==c?c:\"\"})});return c}\nfunction Si(a,b,c){var d=a.pingCache;if(null===d){d=a.pingCache=new Mi;var e=new Set;d.set(b,e)}else e=d.get(b),void 0===e&&(e=new Set,d.set(b,e));e.has(c)||(e.add(c),a=Ti.bind(null,a,b,c),b.then(a,a))}function Ui(a){do{var b;if(b=13===a.tag)b=a.memoizedState,b=null!==b?null!==b.dehydrated?!0:!1:!0;if(b)return a;a=a.return}while(null!==a);return null}\nfunction Vi(a,b,c,d,e){if(0===(a.mode&1))return a===b?a.flags|=65536:(a.flags|=128,c.flags|=131072,c.flags&=-52805,1===c.tag&&(null===c.alternate?c.tag=17:(b=mh(-1,1),b.tag=2,nh(c,b,1))),c.lanes|=1),a;a.flags|=65536;a.lanes=e;return a}var Wi=ua.ReactCurrentOwner,dh=!1;function Xi(a,b,c,d){b.child=null===a?Vg(b,null,c,d):Ug(b,a.child,c,d)}\nfunction Yi(a,b,c,d,e){c=c.render;var f=b.ref;ch(b,e);d=Nh(a,b,c,d,f,e);c=Sh();if(null!==a&&!dh)return b.updateQueue=a.updateQueue,b.flags&=-2053,a.lanes&=~e,Zi(a,b,e);I&&c&&vg(b);b.flags|=1;Xi(a,b,d,e);return b.child}\nfunction $i(a,b,c,d,e){if(null===a){var f=c.type;if(\"function\"===typeof f&&!aj(f)&&void 0===f.defaultProps&&null===c.compare&&void 0===c.defaultProps)return b.tag=15,b.type=f,bj(a,b,f,d,e);a=Rg(c.type,null,d,b,b.mode,e);a.ref=b.ref;a.return=b;return b.child=a}f=a.child;if(0===(a.lanes&e)){var g=f.memoizedProps;c=c.compare;c=null!==c?c:Ie;if(c(g,d)&&a.ref===b.ref)return Zi(a,b,e)}b.flags|=1;a=Pg(f,d);a.ref=b.ref;a.return=b;return b.child=a}\nfunction bj(a,b,c,d,e){if(null!==a){var f=a.memoizedProps;if(Ie(f,d)&&a.ref===b.ref)if(dh=!1,b.pendingProps=d=f,0!==(a.lanes&e))0!==(a.flags&131072)&&(dh=!0);else return b.lanes=a.lanes,Zi(a,b,e)}return cj(a,b,c,d,e)}\nfunction dj(a,b,c){var d=b.pendingProps,e=d.children,f=null!==a?a.memoizedState:null;if(\"hidden\"===d.mode)if(0===(b.mode&1))b.memoizedState={baseLanes:0,cachePool:null,transitions:null},G(ej,fj),fj|=c;else{if(0===(c&1073741824))return a=null!==f?f.baseLanes|c:c,b.lanes=b.childLanes=1073741824,b.memoizedState={baseLanes:a,cachePool:null,transitions:null},b.updateQueue=null,G(ej,fj),fj|=a,null;b.memoizedState={baseLanes:0,cachePool:null,transitions:null};d=null!==f?f.baseLanes:c;G(ej,fj);fj|=d}else null!==\nf?(d=f.baseLanes|c,b.memoizedState=null):d=c,G(ej,fj),fj|=d;Xi(a,b,e,c);return b.child}function gj(a,b){var c=b.ref;if(null===a&&null!==c||null!==a&&a.ref!==c)b.flags|=512,b.flags|=2097152}function cj(a,b,c,d,e){var f=Zf(c)?Xf:H.current;f=Yf(b,f);ch(b,e);c=Nh(a,b,c,d,f,e);d=Sh();if(null!==a&&!dh)return b.updateQueue=a.updateQueue,b.flags&=-2053,a.lanes&=~e,Zi(a,b,e);I&&d&&vg(b);b.flags|=1;Xi(a,b,c,e);return b.child}\nfunction hj(a,b,c,d,e){if(Zf(c)){var f=!0;cg(b)}else f=!1;ch(b,e);if(null===b.stateNode)ij(a,b),Gi(b,c,d),Ii(b,c,d,e),d=!0;else if(null===a){var g=b.stateNode,h=b.memoizedProps;g.props=h;var k=g.context,l=c.contextType;\"object\"===typeof l&&null!==l?l=eh(l):(l=Zf(c)?Xf:H.current,l=Yf(b,l));var m=c.getDerivedStateFromProps,q=\"function\"===typeof m||\"function\"===typeof g.getSnapshotBeforeUpdate;q||\"function\"!==typeof g.UNSAFE_componentWillReceiveProps&&\"function\"!==typeof g.componentWillReceiveProps||\n(h!==d||k!==l)&&Hi(b,g,d,l);jh=!1;var r=b.memoizedState;g.state=r;qh(b,d,g,e);k=b.memoizedState;h!==d||r!==k||Wf.current||jh?(\"function\"===typeof m&&(Di(b,c,m,d),k=b.memoizedState),(h=jh||Fi(b,c,h,d,r,k,l))?(q||\"function\"!==typeof g.UNSAFE_componentWillMount&&\"function\"!==typeof g.componentWillMount||(\"function\"===typeof g.componentWillMount&&g.componentWillMount(),\"function\"===typeof g.UNSAFE_componentWillMount&&g.UNSAFE_componentWillMount()),\"function\"===typeof g.componentDidMount&&(b.flags|=4194308)):\n(\"function\"===typeof g.componentDidMount&&(b.flags|=4194308),b.memoizedProps=d,b.memoizedState=k),g.props=d,g.state=k,g.context=l,d=h):(\"function\"===typeof g.componentDidMount&&(b.flags|=4194308),d=!1)}else{g=b.stateNode;lh(a,b);h=b.memoizedProps;l=b.type===b.elementType?h:Ci(b.type,h);g.props=l;q=b.pendingProps;r=g.context;k=c.contextType;\"object\"===typeof k&&null!==k?k=eh(k):(k=Zf(c)?Xf:H.current,k=Yf(b,k));var y=c.getDerivedStateFromProps;(m=\"function\"===typeof y||\"function\"===typeof g.getSnapshotBeforeUpdate)||\n\"function\"!==typeof g.UNSAFE_componentWillReceiveProps&&\"function\"!==typeof g.componentWillReceiveProps||(h!==q||r!==k)&&Hi(b,g,d,k);jh=!1;r=b.memoizedState;g.state=r;qh(b,d,g,e);var n=b.memoizedState;h!==q||r!==n||Wf.current||jh?(\"function\"===typeof y&&(Di(b,c,y,d),n=b.memoizedState),(l=jh||Fi(b,c,l,d,r,n,k)||!1)?(m||\"function\"!==typeof g.UNSAFE_componentWillUpdate&&\"function\"!==typeof g.componentWillUpdate||(\"function\"===typeof g.componentWillUpdate&&g.componentWillUpdate(d,n,k),\"function\"===typeof g.UNSAFE_componentWillUpdate&&\ng.UNSAFE_componentWillUpdate(d,n,k)),\"function\"===typeof g.componentDidUpdate&&(b.flags|=4),\"function\"===typeof g.getSnapshotBeforeUpdate&&(b.flags|=1024)):(\"function\"!==typeof g.componentDidUpdate||h===a.memoizedProps&&r===a.memoizedState||(b.flags|=4),\"function\"!==typeof g.getSnapshotBeforeUpdate||h===a.memoizedProps&&r===a.memoizedState||(b.flags|=1024),b.memoizedProps=d,b.memoizedState=n),g.props=d,g.state=n,g.context=k,d=l):(\"function\"!==typeof g.componentDidUpdate||h===a.memoizedProps&&r===\na.memoizedState||(b.flags|=4),\"function\"!==typeof g.getSnapshotBeforeUpdate||h===a.memoizedProps&&r===a.memoizedState||(b.flags|=1024),d=!1)}return jj(a,b,c,d,f,e)}\nfunction jj(a,b,c,d,e,f){gj(a,b);var g=0!==(b.flags&128);if(!d&&!g)return e&&dg(b,c,!1),Zi(a,b,f);d=b.stateNode;Wi.current=b;var h=g&&\"function\"!==typeof c.getDerivedStateFromError?null:d.render();b.flags|=1;null!==a&&g?(b.child=Ug(b,a.child,null,f),b.child=Ug(b,null,h,f)):Xi(a,b,h,f);b.memoizedState=d.state;e&&dg(b,c,!0);return b.child}function kj(a){var b=a.stateNode;b.pendingContext?ag(a,b.pendingContext,b.pendingContext!==b.context):b.context&&ag(a,b.context,!1);yh(a,b.containerInfo)}\nfunction lj(a,b,c,d,e){Ig();Jg(e);b.flags|=256;Xi(a,b,c,d);return b.child}var mj={dehydrated:null,treeContext:null,retryLane:0};function nj(a){return{baseLanes:a,cachePool:null,transitions:null}}\nfunction oj(a,b,c){var d=b.pendingProps,e=L.current,f=!1,g=0!==(b.flags&128),h;(h=g)||(h=null!==a&&null===a.memoizedState?!1:0!==(e&2));if(h)f=!0,b.flags&=-129;else if(null===a||null!==a.memoizedState)e|=1;G(L,e&1);if(null===a){Eg(b);a=b.memoizedState;if(null!==a&&(a=a.dehydrated,null!==a))return 0===(b.mode&1)?b.lanes=1:\"$!\"===a.data?b.lanes=8:b.lanes=1073741824,null;g=d.children;a=d.fallback;return f?(d=b.mode,f=b.child,g={mode:\"hidden\",children:g},0===(d&1)&&null!==f?(f.childLanes=0,f.pendingProps=\ng):f=pj(g,d,0,null),a=Tg(a,d,c,null),f.return=b,a.return=b,f.sibling=a,b.child=f,b.child.memoizedState=nj(c),b.memoizedState=mj,a):qj(b,g)}e=a.memoizedState;if(null!==e&&(h=e.dehydrated,null!==h))return rj(a,b,g,d,h,e,c);if(f){f=d.fallback;g=b.mode;e=a.child;h=e.sibling;var k={mode:\"hidden\",children:d.children};0===(g&1)&&b.child!==e?(d=b.child,d.childLanes=0,d.pendingProps=k,b.deletions=null):(d=Pg(e,k),d.subtreeFlags=e.subtreeFlags&14680064);null!==h?f=Pg(h,f):(f=Tg(f,g,c,null),f.flags|=2);f.return=\nb;d.return=b;d.sibling=f;b.child=d;d=f;f=b.child;g=a.child.memoizedState;g=null===g?nj(c):{baseLanes:g.baseLanes|c,cachePool:null,transitions:g.transitions};f.memoizedState=g;f.childLanes=a.childLanes&~c;b.memoizedState=mj;return d}f=a.child;a=f.sibling;d=Pg(f,{mode:\"visible\",children:d.children});0===(b.mode&1)&&(d.lanes=c);d.return=b;d.sibling=null;null!==a&&(c=b.deletions,null===c?(b.deletions=[a],b.flags|=16):c.push(a));b.child=d;b.memoizedState=null;return d}\nfunction qj(a,b){b=pj({mode:\"visible\",children:b},a.mode,0,null);b.return=a;return a.child=b}function sj(a,b,c,d){null!==d&&Jg(d);Ug(b,a.child,null,c);a=qj(b,b.pendingProps.children);a.flags|=2;b.memoizedState=null;return a}\nfunction rj(a,b,c,d,e,f,g){if(c){if(b.flags&256)return b.flags&=-257,d=Ki(Error(p(422))),sj(a,b,g,d);if(null!==b.memoizedState)return b.child=a.child,b.flags|=128,null;f=d.fallback;e=b.mode;d=pj({mode:\"visible\",children:d.children},e,0,null);f=Tg(f,e,g,null);f.flags|=2;d.return=b;f.return=b;d.sibling=f;b.child=d;0!==(b.mode&1)&&Ug(b,a.child,null,g);b.child.memoizedState=nj(g);b.memoizedState=mj;return f}if(0===(b.mode&1))return sj(a,b,g,null);if(\"$!\"===e.data){d=e.nextSibling&&e.nextSibling.dataset;\nif(d)var h=d.dgst;d=h;f=Error(p(419));d=Ki(f,d,void 0);return sj(a,b,g,d)}h=0!==(g&a.childLanes);if(dh||h){d=Q;if(null!==d){switch(g&-g){case 4:e=2;break;case 16:e=8;break;case 64:case 128:case 256:case 512:case 1024:case 2048:case 4096:case 8192:case 16384:case 32768:case 65536:case 131072:case 262144:case 524288:case 1048576:case 2097152:case 4194304:case 8388608:case 16777216:case 33554432:case 67108864:e=32;break;case 536870912:e=268435456;break;default:e=0}e=0!==(e&(d.suspendedLanes|g))?0:e;\n0!==e&&e!==f.retryLane&&(f.retryLane=e,ih(a,e),gi(d,a,e,-1))}tj();d=Ki(Error(p(421)));return sj(a,b,g,d)}if(\"$?\"===e.data)return b.flags|=128,b.child=a.child,b=uj.bind(null,a),e._reactRetry=b,null;a=f.treeContext;yg=Lf(e.nextSibling);xg=b;I=!0;zg=null;null!==a&&(og[pg++]=rg,og[pg++]=sg,og[pg++]=qg,rg=a.id,sg=a.overflow,qg=b);b=qj(b,d.children);b.flags|=4096;return b}function vj(a,b,c){a.lanes|=b;var d=a.alternate;null!==d&&(d.lanes|=b);bh(a.return,b,c)}\nfunction wj(a,b,c,d,e){var f=a.memoizedState;null===f?a.memoizedState={isBackwards:b,rendering:null,renderingStartTime:0,last:d,tail:c,tailMode:e}:(f.isBackwards=b,f.rendering=null,f.renderingStartTime=0,f.last=d,f.tail=c,f.tailMode=e)}\nfunction xj(a,b,c){var d=b.pendingProps,e=d.revealOrder,f=d.tail;Xi(a,b,d.children,c);d=L.current;if(0!==(d&2))d=d&1|2,b.flags|=128;else{if(null!==a&&0!==(a.flags&128))a:for(a=b.child;null!==a;){if(13===a.tag)null!==a.memoizedState&&vj(a,c,b);else if(19===a.tag)vj(a,c,b);else if(null!==a.child){a.child.return=a;a=a.child;continue}if(a===b)break a;for(;null===a.sibling;){if(null===a.return||a.return===b)break a;a=a.return}a.sibling.return=a.return;a=a.sibling}d&=1}G(L,d);if(0===(b.mode&1))b.memoizedState=\nnull;else switch(e){case \"forwards\":c=b.child;for(e=null;null!==c;)a=c.alternate,null!==a&&null===Ch(a)&&(e=c),c=c.sibling;c=e;null===c?(e=b.child,b.child=null):(e=c.sibling,c.sibling=null);wj(b,!1,e,c,f);break;case \"backwards\":c=null;e=b.child;for(b.child=null;null!==e;){a=e.alternate;if(null!==a&&null===Ch(a)){b.child=e;break}a=e.sibling;e.sibling=c;c=e;e=a}wj(b,!0,c,null,f);break;case \"together\":wj(b,!1,null,null,void 0);break;default:b.memoizedState=null}return b.child}\nfunction ij(a,b){0===(b.mode&1)&&null!==a&&(a.alternate=null,b.alternate=null,b.flags|=2)}function Zi(a,b,c){null!==a&&(b.dependencies=a.dependencies);rh|=b.lanes;if(0===(c&b.childLanes))return null;if(null!==a&&b.child!==a.child)throw Error(p(153));if(null!==b.child){a=b.child;c=Pg(a,a.pendingProps);b.child=c;for(c.return=b;null!==a.sibling;)a=a.sibling,c=c.sibling=Pg(a,a.pendingProps),c.return=b;c.sibling=null}return b.child}\nfunction yj(a,b,c){switch(b.tag){case 3:kj(b);Ig();break;case 5:Ah(b);break;case 1:Zf(b.type)&&cg(b);break;case 4:yh(b,b.stateNode.containerInfo);break;case 10:var d=b.type._context,e=b.memoizedProps.value;G(Wg,d._currentValue);d._currentValue=e;break;case 13:d=b.memoizedState;if(null!==d){if(null!==d.dehydrated)return G(L,L.current&1),b.flags|=128,null;if(0!==(c&b.child.childLanes))return oj(a,b,c);G(L,L.current&1);a=Zi(a,b,c);return null!==a?a.sibling:null}G(L,L.current&1);break;case 19:d=0!==(c&\nb.childLanes);if(0!==(a.flags&128)){if(d)return xj(a,b,c);b.flags|=128}e=b.memoizedState;null!==e&&(e.rendering=null,e.tail=null,e.lastEffect=null);G(L,L.current);if(d)break;else return null;case 22:case 23:return b.lanes=0,dj(a,b,c)}return Zi(a,b,c)}var zj,Aj,Bj,Cj;\nzj=function(a,b){for(var c=b.child;null!==c;){if(5===c.tag||6===c.tag)a.appendChild(c.stateNode);else if(4!==c.tag&&null!==c.child){c.child.return=c;c=c.child;continue}if(c===b)break;for(;null===c.sibling;){if(null===c.return||c.return===b)return;c=c.return}c.sibling.return=c.return;c=c.sibling}};Aj=function(){};\nBj=function(a,b,c,d){var e=a.memoizedProps;if(e!==d){a=b.stateNode;xh(uh.current);var f=null;switch(c){case \"input\":e=Ya(a,e);d=Ya(a,d);f=[];break;case \"select\":e=A({},e,{value:void 0});d=A({},d,{value:void 0});f=[];break;case \"textarea\":e=gb(a,e);d=gb(a,d);f=[];break;default:\"function\"!==typeof e.onClick&&\"function\"===typeof d.onClick&&(a.onclick=Bf)}ub(c,d);var g;c=null;for(l in e)if(!d.hasOwnProperty(l)&&e.hasOwnProperty(l)&&null!=e[l])if(\"style\"===l){var h=e[l];for(g in h)h.hasOwnProperty(g)&&\n(c||(c={}),c[g]=\"\")}else\"dangerouslySetInnerHTML\"!==l&&\"children\"!==l&&\"suppressContentEditableWarning\"!==l&&\"suppressHydrationWarning\"!==l&&\"autoFocus\"!==l&&(ea.hasOwnProperty(l)?f||(f=[]):(f=f||[]).push(l,null));for(l in d){var k=d[l];h=null!=e?e[l]:void 0;if(d.hasOwnProperty(l)&&k!==h&&(null!=k||null!=h))if(\"style\"===l)if(h){for(g in h)!h.hasOwnProperty(g)||k&&k.hasOwnProperty(g)||(c||(c={}),c[g]=\"\");for(g in k)k.hasOwnProperty(g)&&h[g]!==k[g]&&(c||(c={}),c[g]=k[g])}else c||(f||(f=[]),f.push(l,\nc)),c=k;else\"dangerouslySetInnerHTML\"===l?(k=k?k.__html:void 0,h=h?h.__html:void 0,null!=k&&h!==k&&(f=f||[]).push(l,k)):\"children\"===l?\"string\"!==typeof k&&\"number\"!==typeof k||(f=f||[]).push(l,\"\"+k):\"suppressContentEditableWarning\"!==l&&\"suppressHydrationWarning\"!==l&&(ea.hasOwnProperty(l)?(null!=k&&\"onScroll\"===l&&D(\"scroll\",a),f||h===k||(f=[])):(f=f||[]).push(l,k))}c&&(f=f||[]).push(\"style\",c);var l=f;if(b.updateQueue=l)b.flags|=4}};Cj=function(a,b,c,d){c!==d&&(b.flags|=4)};\nfunction Dj(a,b){if(!I)switch(a.tailMode){case \"hidden\":b=a.tail;for(var c=null;null!==b;)null!==b.alternate&&(c=b),b=b.sibling;null===c?a.tail=null:c.sibling=null;break;case \"collapsed\":c=a.tail;for(var d=null;null!==c;)null!==c.alternate&&(d=c),c=c.sibling;null===d?b||null===a.tail?a.tail=null:a.tail.sibling=null:d.sibling=null}}\nfunction S(a){var b=null!==a.alternate&&a.alternate.child===a.child,c=0,d=0;if(b)for(var e=a.child;null!==e;)c|=e.lanes|e.childLanes,d|=e.subtreeFlags&14680064,d|=e.flags&14680064,e.return=a,e=e.sibling;else for(e=a.child;null!==e;)c|=e.lanes|e.childLanes,d|=e.subtreeFlags,d|=e.flags,e.return=a,e=e.sibling;a.subtreeFlags|=d;a.childLanes=c;return b}\nfunction Ej(a,b,c){var d=b.pendingProps;wg(b);switch(b.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return S(b),null;case 1:return Zf(b.type)&&$f(),S(b),null;case 3:d=b.stateNode;zh();E(Wf);E(H);Eh();d.pendingContext&&(d.context=d.pendingContext,d.pendingContext=null);if(null===a||null===a.child)Gg(b)?b.flags|=4:null===a||a.memoizedState.isDehydrated&&0===(b.flags&256)||(b.flags|=1024,null!==zg&&(Fj(zg),zg=null));Aj(a,b);S(b);return null;case 5:Bh(b);var e=xh(wh.current);\nc=b.type;if(null!==a&&null!=b.stateNode)Bj(a,b,c,d,e),a.ref!==b.ref&&(b.flags|=512,b.flags|=2097152);else{if(!d){if(null===b.stateNode)throw Error(p(166));S(b);return null}a=xh(uh.current);if(Gg(b)){d=b.stateNode;c=b.type;var f=b.memoizedProps;d[Of]=b;d[Pf]=f;a=0!==(b.mode&1);switch(c){case \"dialog\":D(\"cancel\",d);D(\"close\",d);break;case \"iframe\":case \"object\":case \"embed\":D(\"load\",d);break;case \"video\":case \"audio\":for(e=0;e<lf.length;e++)D(lf[e],d);break;case \"source\":D(\"error\",d);break;case \"img\":case \"image\":case \"link\":D(\"error\",\nd);D(\"load\",d);break;case \"details\":D(\"toggle\",d);break;case \"input\":Za(d,f);D(\"invalid\",d);break;case \"select\":d._wrapperState={wasMultiple:!!f.multiple};D(\"invalid\",d);break;case \"textarea\":hb(d,f),D(\"invalid\",d)}ub(c,f);e=null;for(var g in f)if(f.hasOwnProperty(g)){var h=f[g];\"children\"===g?\"string\"===typeof h?d.textContent!==h&&(!0!==f.suppressHydrationWarning&&Af(d.textContent,h,a),e=[\"children\",h]):\"number\"===typeof h&&d.textContent!==\"\"+h&&(!0!==f.suppressHydrationWarning&&Af(d.textContent,\nh,a),e=[\"children\",\"\"+h]):ea.hasOwnProperty(g)&&null!=h&&\"onScroll\"===g&&D(\"scroll\",d)}switch(c){case \"input\":Va(d);db(d,f,!0);break;case \"textarea\":Va(d);jb(d);break;case \"select\":case \"option\":break;default:\"function\"===typeof f.onClick&&(d.onclick=Bf)}d=e;b.updateQueue=d;null!==d&&(b.flags|=4)}else{g=9===e.nodeType?e:e.ownerDocument;\"http://www.w3.org/1999/xhtml\"===a&&(a=kb(c));\"http://www.w3.org/1999/xhtml\"===a?\"script\"===c?(a=g.createElement(\"div\"),a.innerHTML=\"<script>\\x3c/script>\",a=a.removeChild(a.firstChild)):\n\"string\"===typeof d.is?a=g.createElement(c,{is:d.is}):(a=g.createElement(c),\"select\"===c&&(g=a,d.multiple?g.multiple=!0:d.size&&(g.size=d.size))):a=g.createElementNS(a,c);a[Of]=b;a[Pf]=d;zj(a,b,!1,!1);b.stateNode=a;a:{g=vb(c,d);switch(c){case \"dialog\":D(\"cancel\",a);D(\"close\",a);e=d;break;case \"iframe\":case \"object\":case \"embed\":D(\"load\",a);e=d;break;case \"video\":case \"audio\":for(e=0;e<lf.length;e++)D(lf[e],a);e=d;break;case \"source\":D(\"error\",a);e=d;break;case \"img\":case \"image\":case \"link\":D(\"error\",\na);D(\"load\",a);e=d;break;case \"details\":D(\"toggle\",a);e=d;break;case \"input\":Za(a,d);e=Ya(a,d);D(\"invalid\",a);break;case \"option\":e=d;break;case \"select\":a._wrapperState={wasMultiple:!!d.multiple};e=A({},d,{value:void 0});D(\"invalid\",a);break;case \"textarea\":hb(a,d);e=gb(a,d);D(\"invalid\",a);break;default:e=d}ub(c,e);h=e;for(f in h)if(h.hasOwnProperty(f)){var k=h[f];\"style\"===f?sb(a,k):\"dangerouslySetInnerHTML\"===f?(k=k?k.__html:void 0,null!=k&&nb(a,k)):\"children\"===f?\"string\"===typeof k?(\"textarea\"!==\nc||\"\"!==k)&&ob(a,k):\"number\"===typeof k&&ob(a,\"\"+k):\"suppressContentEditableWarning\"!==f&&\"suppressHydrationWarning\"!==f&&\"autoFocus\"!==f&&(ea.hasOwnProperty(f)?null!=k&&\"onScroll\"===f&&D(\"scroll\",a):null!=k&&ta(a,f,k,g))}switch(c){case \"input\":Va(a);db(a,d,!1);break;case \"textarea\":Va(a);jb(a);break;case \"option\":null!=d.value&&a.setAttribute(\"value\",\"\"+Sa(d.value));break;case \"select\":a.multiple=!!d.multiple;f=d.value;null!=f?fb(a,!!d.multiple,f,!1):null!=d.defaultValue&&fb(a,!!d.multiple,d.defaultValue,\n!0);break;default:\"function\"===typeof e.onClick&&(a.onclick=Bf)}switch(c){case \"button\":case \"input\":case \"select\":case \"textarea\":d=!!d.autoFocus;break a;case \"img\":d=!0;break a;default:d=!1}}d&&(b.flags|=4)}null!==b.ref&&(b.flags|=512,b.flags|=2097152)}S(b);return null;case 6:if(a&&null!=b.stateNode)Cj(a,b,a.memoizedProps,d);else{if(\"string\"!==typeof d&&null===b.stateNode)throw Error(p(166));c=xh(wh.current);xh(uh.current);if(Gg(b)){d=b.stateNode;c=b.memoizedProps;d[Of]=b;if(f=d.nodeValue!==c)if(a=\nxg,null!==a)switch(a.tag){case 3:Af(d.nodeValue,c,0!==(a.mode&1));break;case 5:!0!==a.memoizedProps.suppressHydrationWarning&&Af(d.nodeValue,c,0!==(a.mode&1))}f&&(b.flags|=4)}else d=(9===c.nodeType?c:c.ownerDocument).createTextNode(d),d[Of]=b,b.stateNode=d}S(b);return null;case 13:E(L);d=b.memoizedState;if(null===a||null!==a.memoizedState&&null!==a.memoizedState.dehydrated){if(I&&null!==yg&&0!==(b.mode&1)&&0===(b.flags&128))Hg(),Ig(),b.flags|=98560,f=!1;else if(f=Gg(b),null!==d&&null!==d.dehydrated){if(null===\na){if(!f)throw Error(p(318));f=b.memoizedState;f=null!==f?f.dehydrated:null;if(!f)throw Error(p(317));f[Of]=b}else Ig(),0===(b.flags&128)&&(b.memoizedState=null),b.flags|=4;S(b);f=!1}else null!==zg&&(Fj(zg),zg=null),f=!0;if(!f)return b.flags&65536?b:null}if(0!==(b.flags&128))return b.lanes=c,b;d=null!==d;d!==(null!==a&&null!==a.memoizedState)&&d&&(b.child.flags|=8192,0!==(b.mode&1)&&(null===a||0!==(L.current&1)?0===T&&(T=3):tj()));null!==b.updateQueue&&(b.flags|=4);S(b);return null;case 4:return zh(),\nAj(a,b),null===a&&sf(b.stateNode.containerInfo),S(b),null;case 10:return ah(b.type._context),S(b),null;case 17:return Zf(b.type)&&$f(),S(b),null;case 19:E(L);f=b.memoizedState;if(null===f)return S(b),null;d=0!==(b.flags&128);g=f.rendering;if(null===g)if(d)Dj(f,!1);else{if(0!==T||null!==a&&0!==(a.flags&128))for(a=b.child;null!==a;){g=Ch(a);if(null!==g){b.flags|=128;Dj(f,!1);d=g.updateQueue;null!==d&&(b.updateQueue=d,b.flags|=4);b.subtreeFlags=0;d=c;for(c=b.child;null!==c;)f=c,a=d,f.flags&=14680066,\ng=f.alternate,null===g?(f.childLanes=0,f.lanes=a,f.child=null,f.subtreeFlags=0,f.memoizedProps=null,f.memoizedState=null,f.updateQueue=null,f.dependencies=null,f.stateNode=null):(f.childLanes=g.childLanes,f.lanes=g.lanes,f.child=g.child,f.subtreeFlags=0,f.deletions=null,f.memoizedProps=g.memoizedProps,f.memoizedState=g.memoizedState,f.updateQueue=g.updateQueue,f.type=g.type,a=g.dependencies,f.dependencies=null===a?null:{lanes:a.lanes,firstContext:a.firstContext}),c=c.sibling;G(L,L.current&1|2);return b.child}a=\na.sibling}null!==f.tail&&B()>Gj&&(b.flags|=128,d=!0,Dj(f,!1),b.lanes=4194304)}else{if(!d)if(a=Ch(g),null!==a){if(b.flags|=128,d=!0,c=a.updateQueue,null!==c&&(b.updateQueue=c,b.flags|=4),Dj(f,!0),null===f.tail&&\"hidden\"===f.tailMode&&!g.alternate&&!I)return S(b),null}else 2*B()-f.renderingStartTime>Gj&&1073741824!==c&&(b.flags|=128,d=!0,Dj(f,!1),b.lanes=4194304);f.isBackwards?(g.sibling=b.child,b.child=g):(c=f.last,null!==c?c.sibling=g:b.child=g,f.last=g)}if(null!==f.tail)return b=f.tail,f.rendering=\nb,f.tail=b.sibling,f.renderingStartTime=B(),b.sibling=null,c=L.current,G(L,d?c&1|2:c&1),b;S(b);return null;case 22:case 23:return Hj(),d=null!==b.memoizedState,null!==a&&null!==a.memoizedState!==d&&(b.flags|=8192),d&&0!==(b.mode&1)?0!==(fj&1073741824)&&(S(b),b.subtreeFlags&6&&(b.flags|=8192)):S(b),null;case 24:return null;case 25:return null}throw Error(p(156,b.tag));}\nfunction Ij(a,b){wg(b);switch(b.tag){case 1:return Zf(b.type)&&$f(),a=b.flags,a&65536?(b.flags=a&-65537|128,b):null;case 3:return zh(),E(Wf),E(H),Eh(),a=b.flags,0!==(a&65536)&&0===(a&128)?(b.flags=a&-65537|128,b):null;case 5:return Bh(b),null;case 13:E(L);a=b.memoizedState;if(null!==a&&null!==a.dehydrated){if(null===b.alternate)throw Error(p(340));Ig()}a=b.flags;return a&65536?(b.flags=a&-65537|128,b):null;case 19:return E(L),null;case 4:return zh(),null;case 10:return ah(b.type._context),null;case 22:case 23:return Hj(),\nnull;case 24:return null;default:return null}}var Jj=!1,U=!1,Kj=\"function\"===typeof WeakSet?WeakSet:Set,V=null;function Lj(a,b){var c=a.ref;if(null!==c)if(\"function\"===typeof c)try{c(null)}catch(d){W(a,b,d)}else c.current=null}function Mj(a,b,c){try{c()}catch(d){W(a,b,d)}}var Nj=!1;\nfunction Oj(a,b){Cf=dd;a=Me();if(Ne(a)){if(\"selectionStart\"in a)var c={start:a.selectionStart,end:a.selectionEnd};else a:{c=(c=a.ownerDocument)&&c.defaultView||window;var d=c.getSelection&&c.getSelection();if(d&&0!==d.rangeCount){c=d.anchorNode;var e=d.anchorOffset,f=d.focusNode;d=d.focusOffset;try{c.nodeType,f.nodeType}catch(F){c=null;break a}var g=0,h=-1,k=-1,l=0,m=0,q=a,r=null;b:for(;;){for(var y;;){q!==c||0!==e&&3!==q.nodeType||(h=g+e);q!==f||0!==d&&3!==q.nodeType||(k=g+d);3===q.nodeType&&(g+=\nq.nodeValue.length);if(null===(y=q.firstChild))break;r=q;q=y}for(;;){if(q===a)break b;r===c&&++l===e&&(h=g);r===f&&++m===d&&(k=g);if(null!==(y=q.nextSibling))break;q=r;r=q.parentNode}q=y}c=-1===h||-1===k?null:{start:h,end:k}}else c=null}c=c||{start:0,end:0}}else c=null;Df={focusedElem:a,selectionRange:c};dd=!1;for(V=b;null!==V;)if(b=V,a=b.child,0!==(b.subtreeFlags&1028)&&null!==a)a.return=b,V=a;else for(;null!==V;){b=V;try{var n=b.alternate;if(0!==(b.flags&1024))switch(b.tag){case 0:case 11:case 15:break;\ncase 1:if(null!==n){var t=n.memoizedProps,J=n.memoizedState,x=b.stateNode,w=x.getSnapshotBeforeUpdate(b.elementType===b.type?t:Ci(b.type,t),J);x.__reactInternalSnapshotBeforeUpdate=w}break;case 3:var u=b.stateNode.containerInfo;1===u.nodeType?u.textContent=\"\":9===u.nodeType&&u.documentElement&&u.removeChild(u.documentElement);break;case 5:case 6:case 4:case 17:break;default:throw Error(p(163));}}catch(F){W(b,b.return,F)}a=b.sibling;if(null!==a){a.return=b.return;V=a;break}V=b.return}n=Nj;Nj=!1;return n}\nfunction Pj(a,b,c){var d=b.updateQueue;d=null!==d?d.lastEffect:null;if(null!==d){var e=d=d.next;do{if((e.tag&a)===a){var f=e.destroy;e.destroy=void 0;void 0!==f&&Mj(b,c,f)}e=e.next}while(e!==d)}}function Qj(a,b){b=b.updateQueue;b=null!==b?b.lastEffect:null;if(null!==b){var c=b=b.next;do{if((c.tag&a)===a){var d=c.create;c.destroy=d()}c=c.next}while(c!==b)}}function Rj(a){var b=a.ref;if(null!==b){var c=a.stateNode;switch(a.tag){case 5:a=c;break;default:a=c}\"function\"===typeof b?b(a):b.current=a}}\nfunction Sj(a){var b=a.alternate;null!==b&&(a.alternate=null,Sj(b));a.child=null;a.deletions=null;a.sibling=null;5===a.tag&&(b=a.stateNode,null!==b&&(delete b[Of],delete b[Pf],delete b[of],delete b[Qf],delete b[Rf]));a.stateNode=null;a.return=null;a.dependencies=null;a.memoizedProps=null;a.memoizedState=null;a.pendingProps=null;a.stateNode=null;a.updateQueue=null}function Tj(a){return 5===a.tag||3===a.tag||4===a.tag}\nfunction Uj(a){a:for(;;){for(;null===a.sibling;){if(null===a.return||Tj(a.return))return null;a=a.return}a.sibling.return=a.return;for(a=a.sibling;5!==a.tag&&6!==a.tag&&18!==a.tag;){if(a.flags&2)continue a;if(null===a.child||4===a.tag)continue a;else a.child.return=a,a=a.child}if(!(a.flags&2))return a.stateNode}}\nfunction Vj(a,b,c){var d=a.tag;if(5===d||6===d)a=a.stateNode,b?8===c.nodeType?c.parentNode.insertBefore(a,b):c.insertBefore(a,b):(8===c.nodeType?(b=c.parentNode,b.insertBefore(a,c)):(b=c,b.appendChild(a)),c=c._reactRootContainer,null!==c&&void 0!==c||null!==b.onclick||(b.onclick=Bf));else if(4!==d&&(a=a.child,null!==a))for(Vj(a,b,c),a=a.sibling;null!==a;)Vj(a,b,c),a=a.sibling}\nfunction Wj(a,b,c){var d=a.tag;if(5===d||6===d)a=a.stateNode,b?c.insertBefore(a,b):c.appendChild(a);else if(4!==d&&(a=a.child,null!==a))for(Wj(a,b,c),a=a.sibling;null!==a;)Wj(a,b,c),a=a.sibling}var X=null,Xj=!1;function Yj(a,b,c){for(c=c.child;null!==c;)Zj(a,b,c),c=c.sibling}\nfunction Zj(a,b,c){if(lc&&\"function\"===typeof lc.onCommitFiberUnmount)try{lc.onCommitFiberUnmount(kc,c)}catch(h){}switch(c.tag){case 5:U||Lj(c,b);case 6:var d=X,e=Xj;X=null;Yj(a,b,c);X=d;Xj=e;null!==X&&(Xj?(a=X,c=c.stateNode,8===a.nodeType?a.parentNode.removeChild(c):a.removeChild(c)):X.removeChild(c.stateNode));break;case 18:null!==X&&(Xj?(a=X,c=c.stateNode,8===a.nodeType?Kf(a.parentNode,c):1===a.nodeType&&Kf(a,c),bd(a)):Kf(X,c.stateNode));break;case 4:d=X;e=Xj;X=c.stateNode.containerInfo;Xj=!0;\nYj(a,b,c);X=d;Xj=e;break;case 0:case 11:case 14:case 15:if(!U&&(d=c.updateQueue,null!==d&&(d=d.lastEffect,null!==d))){e=d=d.next;do{var f=e,g=f.destroy;f=f.tag;void 0!==g&&(0!==(f&2)?Mj(c,b,g):0!==(f&4)&&Mj(c,b,g));e=e.next}while(e!==d)}Yj(a,b,c);break;case 1:if(!U&&(Lj(c,b),d=c.stateNode,\"function\"===typeof d.componentWillUnmount))try{d.props=c.memoizedProps,d.state=c.memoizedState,d.componentWillUnmount()}catch(h){W(c,b,h)}Yj(a,b,c);break;case 21:Yj(a,b,c);break;case 22:c.mode&1?(U=(d=U)||null!==\nc.memoizedState,Yj(a,b,c),U=d):Yj(a,b,c);break;default:Yj(a,b,c)}}function ak(a){var b=a.updateQueue;if(null!==b){a.updateQueue=null;var c=a.stateNode;null===c&&(c=a.stateNode=new Kj);b.forEach(function(b){var d=bk.bind(null,a,b);c.has(b)||(c.add(b),b.then(d,d))})}}\nfunction ck(a,b){var c=b.deletions;if(null!==c)for(var d=0;d<c.length;d++){var e=c[d];try{var f=a,g=b,h=g;a:for(;null!==h;){switch(h.tag){case 5:X=h.stateNode;Xj=!1;break a;case 3:X=h.stateNode.containerInfo;Xj=!0;break a;case 4:X=h.stateNode.containerInfo;Xj=!0;break a}h=h.return}if(null===X)throw Error(p(160));Zj(f,g,e);X=null;Xj=!1;var k=e.alternate;null!==k&&(k.return=null);e.return=null}catch(l){W(e,b,l)}}if(b.subtreeFlags&12854)for(b=b.child;null!==b;)dk(b,a),b=b.sibling}\nfunction dk(a,b){var c=a.alternate,d=a.flags;switch(a.tag){case 0:case 11:case 14:case 15:ck(b,a);ek(a);if(d&4){try{Pj(3,a,a.return),Qj(3,a)}catch(t){W(a,a.return,t)}try{Pj(5,a,a.return)}catch(t){W(a,a.return,t)}}break;case 1:ck(b,a);ek(a);d&512&&null!==c&&Lj(c,c.return);break;case 5:ck(b,a);ek(a);d&512&&null!==c&&Lj(c,c.return);if(a.flags&32){var e=a.stateNode;try{ob(e,\"\")}catch(t){W(a,a.return,t)}}if(d&4&&(e=a.stateNode,null!=e)){var f=a.memoizedProps,g=null!==c?c.memoizedProps:f,h=a.type,k=a.updateQueue;\na.updateQueue=null;if(null!==k)try{\"input\"===h&&\"radio\"===f.type&&null!=f.name&&ab(e,f);vb(h,g);var l=vb(h,f);for(g=0;g<k.length;g+=2){var m=k[g],q=k[g+1];\"style\"===m?sb(e,q):\"dangerouslySetInnerHTML\"===m?nb(e,q):\"children\"===m?ob(e,q):ta(e,m,q,l)}switch(h){case \"input\":bb(e,f);break;case \"textarea\":ib(e,f);break;case \"select\":var r=e._wrapperState.wasMultiple;e._wrapperState.wasMultiple=!!f.multiple;var y=f.value;null!=y?fb(e,!!f.multiple,y,!1):r!==!!f.multiple&&(null!=f.defaultValue?fb(e,!!f.multiple,\nf.defaultValue,!0):fb(e,!!f.multiple,f.multiple?[]:\"\",!1))}e[Pf]=f}catch(t){W(a,a.return,t)}}break;case 6:ck(b,a);ek(a);if(d&4){if(null===a.stateNode)throw Error(p(162));e=a.stateNode;f=a.memoizedProps;try{e.nodeValue=f}catch(t){W(a,a.return,t)}}break;case 3:ck(b,a);ek(a);if(d&4&&null!==c&&c.memoizedState.isDehydrated)try{bd(b.containerInfo)}catch(t){W(a,a.return,t)}break;case 4:ck(b,a);ek(a);break;case 13:ck(b,a);ek(a);e=a.child;e.flags&8192&&(f=null!==e.memoizedState,e.stateNode.isHidden=f,!f||\nnull!==e.alternate&&null!==e.alternate.memoizedState||(fk=B()));d&4&&ak(a);break;case 22:m=null!==c&&null!==c.memoizedState;a.mode&1?(U=(l=U)||m,ck(b,a),U=l):ck(b,a);ek(a);if(d&8192){l=null!==a.memoizedState;if((a.stateNode.isHidden=l)&&!m&&0!==(a.mode&1))for(V=a,m=a.child;null!==m;){for(q=V=m;null!==V;){r=V;y=r.child;switch(r.tag){case 0:case 11:case 14:case 15:Pj(4,r,r.return);break;case 1:Lj(r,r.return);var n=r.stateNode;if(\"function\"===typeof n.componentWillUnmount){d=r;c=r.return;try{b=d,n.props=\nb.memoizedProps,n.state=b.memoizedState,n.componentWillUnmount()}catch(t){W(d,c,t)}}break;case 5:Lj(r,r.return);break;case 22:if(null!==r.memoizedState){gk(q);continue}}null!==y?(y.return=r,V=y):gk(q)}m=m.sibling}a:for(m=null,q=a;;){if(5===q.tag){if(null===m){m=q;try{e=q.stateNode,l?(f=e.style,\"function\"===typeof f.setProperty?f.setProperty(\"display\",\"none\",\"important\"):f.display=\"none\"):(h=q.stateNode,k=q.memoizedProps.style,g=void 0!==k&&null!==k&&k.hasOwnProperty(\"display\")?k.display:null,h.style.display=\nrb(\"display\",g))}catch(t){W(a,a.return,t)}}}else if(6===q.tag){if(null===m)try{q.stateNode.nodeValue=l?\"\":q.memoizedProps}catch(t){W(a,a.return,t)}}else if((22!==q.tag&&23!==q.tag||null===q.memoizedState||q===a)&&null!==q.child){q.child.return=q;q=q.child;continue}if(q===a)break a;for(;null===q.sibling;){if(null===q.return||q.return===a)break a;m===q&&(m=null);q=q.return}m===q&&(m=null);q.sibling.return=q.return;q=q.sibling}}break;case 19:ck(b,a);ek(a);d&4&&ak(a);break;case 21:break;default:ck(b,\na),ek(a)}}function ek(a){var b=a.flags;if(b&2){try{a:{for(var c=a.return;null!==c;){if(Tj(c)){var d=c;break a}c=c.return}throw Error(p(160));}switch(d.tag){case 5:var e=d.stateNode;d.flags&32&&(ob(e,\"\"),d.flags&=-33);var f=Uj(a);Wj(a,f,e);break;case 3:case 4:var g=d.stateNode.containerInfo,h=Uj(a);Vj(a,h,g);break;default:throw Error(p(161));}}catch(k){W(a,a.return,k)}a.flags&=-3}b&4096&&(a.flags&=-4097)}function hk(a,b,c){V=a;ik(a,b,c)}\nfunction ik(a,b,c){for(var d=0!==(a.mode&1);null!==V;){var e=V,f=e.child;if(22===e.tag&&d){var g=null!==e.memoizedState||Jj;if(!g){var h=e.alternate,k=null!==h&&null!==h.memoizedState||U;h=Jj;var l=U;Jj=g;if((U=k)&&!l)for(V=e;null!==V;)g=V,k=g.child,22===g.tag&&null!==g.memoizedState?jk(e):null!==k?(k.return=g,V=k):jk(e);for(;null!==f;)V=f,ik(f,b,c),f=f.sibling;V=e;Jj=h;U=l}kk(a,b,c)}else 0!==(e.subtreeFlags&8772)&&null!==f?(f.return=e,V=f):kk(a,b,c)}}\nfunction kk(a){for(;null!==V;){var b=V;if(0!==(b.flags&8772)){var c=b.alternate;try{if(0!==(b.flags&8772))switch(b.tag){case 0:case 11:case 15:U||Qj(5,b);break;case 1:var d=b.stateNode;if(b.flags&4&&!U)if(null===c)d.componentDidMount();else{var e=b.elementType===b.type?c.memoizedProps:Ci(b.type,c.memoizedProps);d.componentDidUpdate(e,c.memoizedState,d.__reactInternalSnapshotBeforeUpdate)}var f=b.updateQueue;null!==f&&sh(b,f,d);break;case 3:var g=b.updateQueue;if(null!==g){c=null;if(null!==b.child)switch(b.child.tag){case 5:c=\nb.child.stateNode;break;case 1:c=b.child.stateNode}sh(b,g,c)}break;case 5:var h=b.stateNode;if(null===c&&b.flags&4){c=h;var k=b.memoizedProps;switch(b.type){case \"button\":case \"input\":case \"select\":case \"textarea\":k.autoFocus&&c.focus();break;case \"img\":k.src&&(c.src=k.src)}}break;case 6:break;case 4:break;case 12:break;case 13:if(null===b.memoizedState){var l=b.alternate;if(null!==l){var m=l.memoizedState;if(null!==m){var q=m.dehydrated;null!==q&&bd(q)}}}break;case 19:case 17:case 21:case 22:case 23:case 25:break;\ndefault:throw Error(p(163));}U||b.flags&512&&Rj(b)}catch(r){W(b,b.return,r)}}if(b===a){V=null;break}c=b.sibling;if(null!==c){c.return=b.return;V=c;break}V=b.return}}function gk(a){for(;null!==V;){var b=V;if(b===a){V=null;break}var c=b.sibling;if(null!==c){c.return=b.return;V=c;break}V=b.return}}\nfunction jk(a){for(;null!==V;){var b=V;try{switch(b.tag){case 0:case 11:case 15:var c=b.return;try{Qj(4,b)}catch(k){W(b,c,k)}break;case 1:var d=b.stateNode;if(\"function\"===typeof d.componentDidMount){var e=b.return;try{d.componentDidMount()}catch(k){W(b,e,k)}}var f=b.return;try{Rj(b)}catch(k){W(b,f,k)}break;case 5:var g=b.return;try{Rj(b)}catch(k){W(b,g,k)}}}catch(k){W(b,b.return,k)}if(b===a){V=null;break}var h=b.sibling;if(null!==h){h.return=b.return;V=h;break}V=b.return}}\nvar lk=Math.ceil,mk=ua.ReactCurrentDispatcher,nk=ua.ReactCurrentOwner,ok=ua.ReactCurrentBatchConfig,K=0,Q=null,Y=null,Z=0,fj=0,ej=Uf(0),T=0,pk=null,rh=0,qk=0,rk=0,sk=null,tk=null,fk=0,Gj=Infinity,uk=null,Oi=!1,Pi=null,Ri=null,vk=!1,wk=null,xk=0,yk=0,zk=null,Ak=-1,Bk=0;function R(){return 0!==(K&6)?B():-1!==Ak?Ak:Ak=B()}\nfunction yi(a){if(0===(a.mode&1))return 1;if(0!==(K&2)&&0!==Z)return Z&-Z;if(null!==Kg.transition)return 0===Bk&&(Bk=yc()),Bk;a=C;if(0!==a)return a;a=window.event;a=void 0===a?16:jd(a.type);return a}function gi(a,b,c,d){if(50<yk)throw yk=0,zk=null,Error(p(185));Ac(a,c,d);if(0===(K&2)||a!==Q)a===Q&&(0===(K&2)&&(qk|=c),4===T&&Ck(a,Z)),Dk(a,d),1===c&&0===K&&0===(b.mode&1)&&(Gj=B()+500,fg&&jg())}\nfunction Dk(a,b){var c=a.callbackNode;wc(a,b);var d=uc(a,a===Q?Z:0);if(0===d)null!==c&&bc(c),a.callbackNode=null,a.callbackPriority=0;else if(b=d&-d,a.callbackPriority!==b){null!=c&&bc(c);if(1===b)0===a.tag?ig(Ek.bind(null,a)):hg(Ek.bind(null,a)),Jf(function(){0===(K&6)&&jg()}),c=null;else{switch(Dc(d)){case 1:c=fc;break;case 4:c=gc;break;case 16:c=hc;break;case 536870912:c=jc;break;default:c=hc}c=Fk(c,Gk.bind(null,a))}a.callbackPriority=b;a.callbackNode=c}}\nfunction Gk(a,b){Ak=-1;Bk=0;if(0!==(K&6))throw Error(p(327));var c=a.callbackNode;if(Hk()&&a.callbackNode!==c)return null;var d=uc(a,a===Q?Z:0);if(0===d)return null;if(0!==(d&30)||0!==(d&a.expiredLanes)||b)b=Ik(a,d);else{b=d;var e=K;K|=2;var f=Jk();if(Q!==a||Z!==b)uk=null,Gj=B()+500,Kk(a,b);do try{Lk();break}catch(h){Mk(a,h)}while(1);$g();mk.current=f;K=e;null!==Y?b=0:(Q=null,Z=0,b=T)}if(0!==b){2===b&&(e=xc(a),0!==e&&(d=e,b=Nk(a,e)));if(1===b)throw c=pk,Kk(a,0),Ck(a,d),Dk(a,B()),c;if(6===b)Ck(a,d);\nelse{e=a.current.alternate;if(0===(d&30)&&!Ok(e)&&(b=Ik(a,d),2===b&&(f=xc(a),0!==f&&(d=f,b=Nk(a,f))),1===b))throw c=pk,Kk(a,0),Ck(a,d),Dk(a,B()),c;a.finishedWork=e;a.finishedLanes=d;switch(b){case 0:case 1:throw Error(p(345));case 2:Pk(a,tk,uk);break;case 3:Ck(a,d);if((d&130023424)===d&&(b=fk+500-B(),10<b)){if(0!==uc(a,0))break;e=a.suspendedLanes;if((e&d)!==d){R();a.pingedLanes|=a.suspendedLanes&e;break}a.timeoutHandle=Ff(Pk.bind(null,a,tk,uk),b);break}Pk(a,tk,uk);break;case 4:Ck(a,d);if((d&4194240)===\nd)break;b=a.eventTimes;for(e=-1;0<d;){var g=31-oc(d);f=1<<g;g=b[g];g>e&&(e=g);d&=~f}d=e;d=B()-d;d=(120>d?120:480>d?480:1080>d?1080:1920>d?1920:3E3>d?3E3:4320>d?4320:1960*lk(d/1960))-d;if(10<d){a.timeoutHandle=Ff(Pk.bind(null,a,tk,uk),d);break}Pk(a,tk,uk);break;case 5:Pk(a,tk,uk);break;default:throw Error(p(329));}}}Dk(a,B());return a.callbackNode===c?Gk.bind(null,a):null}\nfunction Nk(a,b){var c=sk;a.current.memoizedState.isDehydrated&&(Kk(a,b).flags|=256);a=Ik(a,b);2!==a&&(b=tk,tk=c,null!==b&&Fj(b));return a}function Fj(a){null===tk?tk=a:tk.push.apply(tk,a)}\nfunction Ok(a){for(var b=a;;){if(b.flags&16384){var c=b.updateQueue;if(null!==c&&(c=c.stores,null!==c))for(var d=0;d<c.length;d++){var e=c[d],f=e.getSnapshot;e=e.value;try{if(!He(f(),e))return!1}catch(g){return!1}}}c=b.child;if(b.subtreeFlags&16384&&null!==c)c.return=b,b=c;else{if(b===a)break;for(;null===b.sibling;){if(null===b.return||b.return===a)return!0;b=b.return}b.sibling.return=b.return;b=b.sibling}}return!0}\nfunction Ck(a,b){b&=~rk;b&=~qk;a.suspendedLanes|=b;a.pingedLanes&=~b;for(a=a.expirationTimes;0<b;){var c=31-oc(b),d=1<<c;a[c]=-1;b&=~d}}function Ek(a){if(0!==(K&6))throw Error(p(327));Hk();var b=uc(a,0);if(0===(b&1))return Dk(a,B()),null;var c=Ik(a,b);if(0!==a.tag&&2===c){var d=xc(a);0!==d&&(b=d,c=Nk(a,d))}if(1===c)throw c=pk,Kk(a,0),Ck(a,b),Dk(a,B()),c;if(6===c)throw Error(p(345));a.finishedWork=a.current.alternate;a.finishedLanes=b;Pk(a,tk,uk);Dk(a,B());return null}\nfunction Qk(a,b){var c=K;K|=1;try{return a(b)}finally{K=c,0===K&&(Gj=B()+500,fg&&jg())}}function Rk(a){null!==wk&&0===wk.tag&&0===(K&6)&&Hk();var b=K;K|=1;var c=ok.transition,d=C;try{if(ok.transition=null,C=1,a)return a()}finally{C=d,ok.transition=c,K=b,0===(K&6)&&jg()}}function Hj(){fj=ej.current;E(ej)}\nfunction Kk(a,b){a.finishedWork=null;a.finishedLanes=0;var c=a.timeoutHandle;-1!==c&&(a.timeoutHandle=-1,Gf(c));if(null!==Y)for(c=Y.return;null!==c;){var d=c;wg(d);switch(d.tag){case 1:d=d.type.childContextTypes;null!==d&&void 0!==d&&$f();break;case 3:zh();E(Wf);E(H);Eh();break;case 5:Bh(d);break;case 4:zh();break;case 13:E(L);break;case 19:E(L);break;case 10:ah(d.type._context);break;case 22:case 23:Hj()}c=c.return}Q=a;Y=a=Pg(a.current,null);Z=fj=b;T=0;pk=null;rk=qk=rh=0;tk=sk=null;if(null!==fh){for(b=\n0;b<fh.length;b++)if(c=fh[b],d=c.interleaved,null!==d){c.interleaved=null;var e=d.next,f=c.pending;if(null!==f){var g=f.next;f.next=e;d.next=g}c.pending=d}fh=null}return a}\nfunction Mk(a,b){do{var c=Y;try{$g();Fh.current=Rh;if(Ih){for(var d=M.memoizedState;null!==d;){var e=d.queue;null!==e&&(e.pending=null);d=d.next}Ih=!1}Hh=0;O=N=M=null;Jh=!1;Kh=0;nk.current=null;if(null===c||null===c.return){T=1;pk=b;Y=null;break}a:{var f=a,g=c.return,h=c,k=b;b=Z;h.flags|=32768;if(null!==k&&\"object\"===typeof k&&\"function\"===typeof k.then){var l=k,m=h,q=m.tag;if(0===(m.mode&1)&&(0===q||11===q||15===q)){var r=m.alternate;r?(m.updateQueue=r.updateQueue,m.memoizedState=r.memoizedState,\nm.lanes=r.lanes):(m.updateQueue=null,m.memoizedState=null)}var y=Ui(g);if(null!==y){y.flags&=-257;Vi(y,g,h,f,b);y.mode&1&&Si(f,l,b);b=y;k=l;var n=b.updateQueue;if(null===n){var t=new Set;t.add(k);b.updateQueue=t}else n.add(k);break a}else{if(0===(b&1)){Si(f,l,b);tj();break a}k=Error(p(426))}}else if(I&&h.mode&1){var J=Ui(g);if(null!==J){0===(J.flags&65536)&&(J.flags|=256);Vi(J,g,h,f,b);Jg(Ji(k,h));break a}}f=k=Ji(k,h);4!==T&&(T=2);null===sk?sk=[f]:sk.push(f);f=g;do{switch(f.tag){case 3:f.flags|=65536;\nb&=-b;f.lanes|=b;var x=Ni(f,k,b);ph(f,x);break a;case 1:h=k;var w=f.type,u=f.stateNode;if(0===(f.flags&128)&&(\"function\"===typeof w.getDerivedStateFromError||null!==u&&\"function\"===typeof u.componentDidCatch&&(null===Ri||!Ri.has(u)))){f.flags|=65536;b&=-b;f.lanes|=b;var F=Qi(f,h,b);ph(f,F);break a}}f=f.return}while(null!==f)}Sk(c)}catch(na){b=na;Y===c&&null!==c&&(Y=c=c.return);continue}break}while(1)}function Jk(){var a=mk.current;mk.current=Rh;return null===a?Rh:a}\nfunction tj(){if(0===T||3===T||2===T)T=4;null===Q||0===(rh&268435455)&&0===(qk&268435455)||Ck(Q,Z)}function Ik(a,b){var c=K;K|=2;var d=Jk();if(Q!==a||Z!==b)uk=null,Kk(a,b);do try{Tk();break}catch(e){Mk(a,e)}while(1);$g();K=c;mk.current=d;if(null!==Y)throw Error(p(261));Q=null;Z=0;return T}function Tk(){for(;null!==Y;)Uk(Y)}function Lk(){for(;null!==Y&&!cc();)Uk(Y)}function Uk(a){var b=Vk(a.alternate,a,fj);a.memoizedProps=a.pendingProps;null===b?Sk(a):Y=b;nk.current=null}\nfunction Sk(a){var b=a;do{var c=b.alternate;a=b.return;if(0===(b.flags&32768)){if(c=Ej(c,b,fj),null!==c){Y=c;return}}else{c=Ij(c,b);if(null!==c){c.flags&=32767;Y=c;return}if(null!==a)a.flags|=32768,a.subtreeFlags=0,a.deletions=null;else{T=6;Y=null;return}}b=b.sibling;if(null!==b){Y=b;return}Y=b=a}while(null!==b);0===T&&(T=5)}function Pk(a,b,c){var d=C,e=ok.transition;try{ok.transition=null,C=1,Wk(a,b,c,d)}finally{ok.transition=e,C=d}return null}\nfunction Wk(a,b,c,d){do Hk();while(null!==wk);if(0!==(K&6))throw Error(p(327));c=a.finishedWork;var e=a.finishedLanes;if(null===c)return null;a.finishedWork=null;a.finishedLanes=0;if(c===a.current)throw Error(p(177));a.callbackNode=null;a.callbackPriority=0;var f=c.lanes|c.childLanes;Bc(a,f);a===Q&&(Y=Q=null,Z=0);0===(c.subtreeFlags&2064)&&0===(c.flags&2064)||vk||(vk=!0,Fk(hc,function(){Hk();return null}));f=0!==(c.flags&15990);if(0!==(c.subtreeFlags&15990)||f){f=ok.transition;ok.transition=null;\nvar g=C;C=1;var h=K;K|=4;nk.current=null;Oj(a,c);dk(c,a);Oe(Df);dd=!!Cf;Df=Cf=null;a.current=c;hk(c,a,e);dc();K=h;C=g;ok.transition=f}else a.current=c;vk&&(vk=!1,wk=a,xk=e);f=a.pendingLanes;0===f&&(Ri=null);mc(c.stateNode,d);Dk(a,B());if(null!==b)for(d=a.onRecoverableError,c=0;c<b.length;c++)e=b[c],d(e.value,{componentStack:e.stack,digest:e.digest});if(Oi)throw Oi=!1,a=Pi,Pi=null,a;0!==(xk&1)&&0!==a.tag&&Hk();f=a.pendingLanes;0!==(f&1)?a===zk?yk++:(yk=0,zk=a):yk=0;jg();return null}\nfunction Hk(){if(null!==wk){var a=Dc(xk),b=ok.transition,c=C;try{ok.transition=null;C=16>a?16:a;if(null===wk)var d=!1;else{a=wk;wk=null;xk=0;if(0!==(K&6))throw Error(p(331));var e=K;K|=4;for(V=a.current;null!==V;){var f=V,g=f.child;if(0!==(V.flags&16)){var h=f.deletions;if(null!==h){for(var k=0;k<h.length;k++){var l=h[k];for(V=l;null!==V;){var m=V;switch(m.tag){case 0:case 11:case 15:Pj(8,m,f)}var q=m.child;if(null!==q)q.return=m,V=q;else for(;null!==V;){m=V;var r=m.sibling,y=m.return;Sj(m);if(m===\nl){V=null;break}if(null!==r){r.return=y;V=r;break}V=y}}}var n=f.alternate;if(null!==n){var t=n.child;if(null!==t){n.child=null;do{var J=t.sibling;t.sibling=null;t=J}while(null!==t)}}V=f}}if(0!==(f.subtreeFlags&2064)&&null!==g)g.return=f,V=g;else b:for(;null!==V;){f=V;if(0!==(f.flags&2048))switch(f.tag){case 0:case 11:case 15:Pj(9,f,f.return)}var x=f.sibling;if(null!==x){x.return=f.return;V=x;break b}V=f.return}}var w=a.current;for(V=w;null!==V;){g=V;var u=g.child;if(0!==(g.subtreeFlags&2064)&&null!==\nu)u.return=g,V=u;else b:for(g=w;null!==V;){h=V;if(0!==(h.flags&2048))try{switch(h.tag){case 0:case 11:case 15:Qj(9,h)}}catch(na){W(h,h.return,na)}if(h===g){V=null;break b}var F=h.sibling;if(null!==F){F.return=h.return;V=F;break b}V=h.return}}K=e;jg();if(lc&&\"function\"===typeof lc.onPostCommitFiberRoot)try{lc.onPostCommitFiberRoot(kc,a)}catch(na){}d=!0}return d}finally{C=c,ok.transition=b}}return!1}function Xk(a,b,c){b=Ji(c,b);b=Ni(a,b,1);a=nh(a,b,1);b=R();null!==a&&(Ac(a,1,b),Dk(a,b))}\nfunction W(a,b,c){if(3===a.tag)Xk(a,a,c);else for(;null!==b;){if(3===b.tag){Xk(b,a,c);break}else if(1===b.tag){var d=b.stateNode;if(\"function\"===typeof b.type.getDerivedStateFromError||\"function\"===typeof d.componentDidCatch&&(null===Ri||!Ri.has(d))){a=Ji(c,a);a=Qi(b,a,1);b=nh(b,a,1);a=R();null!==b&&(Ac(b,1,a),Dk(b,a));break}}b=b.return}}\nfunction Ti(a,b,c){var d=a.pingCache;null!==d&&d.delete(b);b=R();a.pingedLanes|=a.suspendedLanes&c;Q===a&&(Z&c)===c&&(4===T||3===T&&(Z&130023424)===Z&&500>B()-fk?Kk(a,0):rk|=c);Dk(a,b)}function Yk(a,b){0===b&&(0===(a.mode&1)?b=1:(b=sc,sc<<=1,0===(sc&130023424)&&(sc=4194304)));var c=R();a=ih(a,b);null!==a&&(Ac(a,b,c),Dk(a,c))}function uj(a){var b=a.memoizedState,c=0;null!==b&&(c=b.retryLane);Yk(a,c)}\nfunction bk(a,b){var c=0;switch(a.tag){case 13:var d=a.stateNode;var e=a.memoizedState;null!==e&&(c=e.retryLane);break;case 19:d=a.stateNode;break;default:throw Error(p(314));}null!==d&&d.delete(b);Yk(a,c)}var Vk;\nVk=function(a,b,c){if(null!==a)if(a.memoizedProps!==b.pendingProps||Wf.current)dh=!0;else{if(0===(a.lanes&c)&&0===(b.flags&128))return dh=!1,yj(a,b,c);dh=0!==(a.flags&131072)?!0:!1}else dh=!1,I&&0!==(b.flags&1048576)&&ug(b,ng,b.index);b.lanes=0;switch(b.tag){case 2:var d=b.type;ij(a,b);a=b.pendingProps;var e=Yf(b,H.current);ch(b,c);e=Nh(null,b,d,a,e,c);var f=Sh();b.flags|=1;\"object\"===typeof e&&null!==e&&\"function\"===typeof e.render&&void 0===e.$$typeof?(b.tag=1,b.memoizedState=null,b.updateQueue=\nnull,Zf(d)?(f=!0,cg(b)):f=!1,b.memoizedState=null!==e.state&&void 0!==e.state?e.state:null,kh(b),e.updater=Ei,b.stateNode=e,e._reactInternals=b,Ii(b,d,a,c),b=jj(null,b,d,!0,f,c)):(b.tag=0,I&&f&&vg(b),Xi(null,b,e,c),b=b.child);return b;case 16:d=b.elementType;a:{ij(a,b);a=b.pendingProps;e=d._init;d=e(d._payload);b.type=d;e=b.tag=Zk(d);a=Ci(d,a);switch(e){case 0:b=cj(null,b,d,a,c);break a;case 1:b=hj(null,b,d,a,c);break a;case 11:b=Yi(null,b,d,a,c);break a;case 14:b=$i(null,b,d,Ci(d.type,a),c);break a}throw Error(p(306,\nd,\"\"));}return b;case 0:return d=b.type,e=b.pendingProps,e=b.elementType===d?e:Ci(d,e),cj(a,b,d,e,c);case 1:return d=b.type,e=b.pendingProps,e=b.elementType===d?e:Ci(d,e),hj(a,b,d,e,c);case 3:a:{kj(b);if(null===a)throw Error(p(387));d=b.pendingProps;f=b.memoizedState;e=f.element;lh(a,b);qh(b,d,null,c);var g=b.memoizedState;d=g.element;if(f.isDehydrated)if(f={element:d,isDehydrated:!1,cache:g.cache,pendingSuspenseBoundaries:g.pendingSuspenseBoundaries,transitions:g.transitions},b.updateQueue.baseState=\nf,b.memoizedState=f,b.flags&256){e=Ji(Error(p(423)),b);b=lj(a,b,d,c,e);break a}else if(d!==e){e=Ji(Error(p(424)),b);b=lj(a,b,d,c,e);break a}else for(yg=Lf(b.stateNode.containerInfo.firstChild),xg=b,I=!0,zg=null,c=Vg(b,null,d,c),b.child=c;c;)c.flags=c.flags&-3|4096,c=c.sibling;else{Ig();if(d===e){b=Zi(a,b,c);break a}Xi(a,b,d,c)}b=b.child}return b;case 5:return Ah(b),null===a&&Eg(b),d=b.type,e=b.pendingProps,f=null!==a?a.memoizedProps:null,g=e.children,Ef(d,e)?g=null:null!==f&&Ef(d,f)&&(b.flags|=32),\ngj(a,b),Xi(a,b,g,c),b.child;case 6:return null===a&&Eg(b),null;case 13:return oj(a,b,c);case 4:return yh(b,b.stateNode.containerInfo),d=b.pendingProps,null===a?b.child=Ug(b,null,d,c):Xi(a,b,d,c),b.child;case 11:return d=b.type,e=b.pendingProps,e=b.elementType===d?e:Ci(d,e),Yi(a,b,d,e,c);case 7:return Xi(a,b,b.pendingProps,c),b.child;case 8:return Xi(a,b,b.pendingProps.children,c),b.child;case 12:return Xi(a,b,b.pendingProps.children,c),b.child;case 10:a:{d=b.type._context;e=b.pendingProps;f=b.memoizedProps;\ng=e.value;G(Wg,d._currentValue);d._currentValue=g;if(null!==f)if(He(f.value,g)){if(f.children===e.children&&!Wf.current){b=Zi(a,b,c);break a}}else for(f=b.child,null!==f&&(f.return=b);null!==f;){var h=f.dependencies;if(null!==h){g=f.child;for(var k=h.firstContext;null!==k;){if(k.context===d){if(1===f.tag){k=mh(-1,c&-c);k.tag=2;var l=f.updateQueue;if(null!==l){l=l.shared;var m=l.pending;null===m?k.next=k:(k.next=m.next,m.next=k);l.pending=k}}f.lanes|=c;k=f.alternate;null!==k&&(k.lanes|=c);bh(f.return,\nc,b);h.lanes|=c;break}k=k.next}}else if(10===f.tag)g=f.type===b.type?null:f.child;else if(18===f.tag){g=f.return;if(null===g)throw Error(p(341));g.lanes|=c;h=g.alternate;null!==h&&(h.lanes|=c);bh(g,c,b);g=f.sibling}else g=f.child;if(null!==g)g.return=f;else for(g=f;null!==g;){if(g===b){g=null;break}f=g.sibling;if(null!==f){f.return=g.return;g=f;break}g=g.return}f=g}Xi(a,b,e.children,c);b=b.child}return b;case 9:return e=b.type,d=b.pendingProps.children,ch(b,c),e=eh(e),d=d(e),b.flags|=1,Xi(a,b,d,c),\nb.child;case 14:return d=b.type,e=Ci(d,b.pendingProps),e=Ci(d.type,e),$i(a,b,d,e,c);case 15:return bj(a,b,b.type,b.pendingProps,c);case 17:return d=b.type,e=b.pendingProps,e=b.elementType===d?e:Ci(d,e),ij(a,b),b.tag=1,Zf(d)?(a=!0,cg(b)):a=!1,ch(b,c),Gi(b,d,e),Ii(b,d,e,c),jj(null,b,d,!0,a,c);case 19:return xj(a,b,c);case 22:return dj(a,b,c)}throw Error(p(156,b.tag));};function Fk(a,b){return ac(a,b)}\nfunction $k(a,b,c,d){this.tag=a;this.key=c;this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null;this.index=0;this.ref=null;this.pendingProps=b;this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null;this.mode=d;this.subtreeFlags=this.flags=0;this.deletions=null;this.childLanes=this.lanes=0;this.alternate=null}function Bg(a,b,c,d){return new $k(a,b,c,d)}function aj(a){a=a.prototype;return!(!a||!a.isReactComponent)}\nfunction Zk(a){if(\"function\"===typeof a)return aj(a)?1:0;if(void 0!==a&&null!==a){a=a.$$typeof;if(a===Da)return 11;if(a===Ga)return 14}return 2}\nfunction Pg(a,b){var c=a.alternate;null===c?(c=Bg(a.tag,b,a.key,a.mode),c.elementType=a.elementType,c.type=a.type,c.stateNode=a.stateNode,c.alternate=a,a.alternate=c):(c.pendingProps=b,c.type=a.type,c.flags=0,c.subtreeFlags=0,c.deletions=null);c.flags=a.flags&14680064;c.childLanes=a.childLanes;c.lanes=a.lanes;c.child=a.child;c.memoizedProps=a.memoizedProps;c.memoizedState=a.memoizedState;c.updateQueue=a.updateQueue;b=a.dependencies;c.dependencies=null===b?null:{lanes:b.lanes,firstContext:b.firstContext};\nc.sibling=a.sibling;c.index=a.index;c.ref=a.ref;return c}\nfunction Rg(a,b,c,d,e,f){var g=2;d=a;if(\"function\"===typeof a)aj(a)&&(g=1);else if(\"string\"===typeof a)g=5;else a:switch(a){case ya:return Tg(c.children,e,f,b);case za:g=8;e|=8;break;case Aa:return a=Bg(12,c,b,e|2),a.elementType=Aa,a.lanes=f,a;case Ea:return a=Bg(13,c,b,e),a.elementType=Ea,a.lanes=f,a;case Fa:return a=Bg(19,c,b,e),a.elementType=Fa,a.lanes=f,a;case Ia:return pj(c,e,f,b);default:if(\"object\"===typeof a&&null!==a)switch(a.$$typeof){case Ba:g=10;break a;case Ca:g=9;break a;case Da:g=11;\nbreak a;case Ga:g=14;break a;case Ha:g=16;d=null;break a}throw Error(p(130,null==a?a:typeof a,\"\"));}b=Bg(g,c,b,e);b.elementType=a;b.type=d;b.lanes=f;return b}function Tg(a,b,c,d){a=Bg(7,a,d,b);a.lanes=c;return a}function pj(a,b,c,d){a=Bg(22,a,d,b);a.elementType=Ia;a.lanes=c;a.stateNode={isHidden:!1};return a}function Qg(a,b,c){a=Bg(6,a,null,b);a.lanes=c;return a}\nfunction Sg(a,b,c){b=Bg(4,null!==a.children?a.children:[],a.key,b);b.lanes=c;b.stateNode={containerInfo:a.containerInfo,pendingChildren:null,implementation:a.implementation};return b}\nfunction al(a,b,c,d,e){this.tag=b;this.containerInfo=a;this.finishedWork=this.pingCache=this.current=this.pendingChildren=null;this.timeoutHandle=-1;this.callbackNode=this.pendingContext=this.context=null;this.callbackPriority=0;this.eventTimes=zc(0);this.expirationTimes=zc(-1);this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0;this.entanglements=zc(0);this.identifierPrefix=d;this.onRecoverableError=e;this.mutableSourceEagerHydrationData=\nnull}function bl(a,b,c,d,e,f,g,h,k){a=new al(a,b,c,h,k);1===b?(b=1,!0===f&&(b|=8)):b=0;f=Bg(3,null,null,b);a.current=f;f.stateNode=a;f.memoizedState={element:d,isDehydrated:c,cache:null,transitions:null,pendingSuspenseBoundaries:null};kh(f);return a}function cl(a,b,c){var d=3<arguments.length&&void 0!==arguments[3]?arguments[3]:null;return{$$typeof:wa,key:null==d?null:\"\"+d,children:a,containerInfo:b,implementation:c}}\nfunction dl(a){if(!a)return Vf;a=a._reactInternals;a:{if(Vb(a)!==a||1!==a.tag)throw Error(p(170));var b=a;do{switch(b.tag){case 3:b=b.stateNode.context;break a;case 1:if(Zf(b.type)){b=b.stateNode.__reactInternalMemoizedMergedChildContext;break a}}b=b.return}while(null!==b);throw Error(p(171));}if(1===a.tag){var c=a.type;if(Zf(c))return bg(a,c,b)}return b}\nfunction el(a,b,c,d,e,f,g,h,k){a=bl(c,d,!0,a,e,f,g,h,k);a.context=dl(null);c=a.current;d=R();e=yi(c);f=mh(d,e);f.callback=void 0!==b&&null!==b?b:null;nh(c,f,e);a.current.lanes=e;Ac(a,e,d);Dk(a,d);return a}function fl(a,b,c,d){var e=b.current,f=R(),g=yi(e);c=dl(c);null===b.context?b.context=c:b.pendingContext=c;b=mh(f,g);b.payload={element:a};d=void 0===d?null:d;null!==d&&(b.callback=d);a=nh(e,b,g);null!==a&&(gi(a,e,g,f),oh(a,e,g));return g}\nfunction gl(a){a=a.current;if(!a.child)return null;switch(a.child.tag){case 5:return a.child.stateNode;default:return a.child.stateNode}}function hl(a,b){a=a.memoizedState;if(null!==a&&null!==a.dehydrated){var c=a.retryLane;a.retryLane=0!==c&&c<b?c:b}}function il(a,b){hl(a,b);(a=a.alternate)&&hl(a,b)}function jl(){return null}var kl=\"function\"===typeof reportError?reportError:function(a){console.error(a)};function ll(a){this._internalRoot=a}\nml.prototype.render=ll.prototype.render=function(a){var b=this._internalRoot;if(null===b)throw Error(p(409));fl(a,b,null,null)};ml.prototype.unmount=ll.prototype.unmount=function(){var a=this._internalRoot;if(null!==a){this._internalRoot=null;var b=a.containerInfo;Rk(function(){fl(null,a,null,null)});b[uf]=null}};function ml(a){this._internalRoot=a}\nml.prototype.unstable_scheduleHydration=function(a){if(a){var b=Hc();a={blockedOn:null,target:a,priority:b};for(var c=0;c<Qc.length&&0!==b&&b<Qc[c].priority;c++);Qc.splice(c,0,a);0===c&&Vc(a)}};function nl(a){return!(!a||1!==a.nodeType&&9!==a.nodeType&&11!==a.nodeType)}function ol(a){return!(!a||1!==a.nodeType&&9!==a.nodeType&&11!==a.nodeType&&(8!==a.nodeType||\" react-mount-point-unstable \"!==a.nodeValue))}function pl(){}\nfunction ql(a,b,c,d,e){if(e){if(\"function\"===typeof d){var f=d;d=function(){var a=gl(g);f.call(a)}}var g=el(b,d,a,0,null,!1,!1,\"\",pl);a._reactRootContainer=g;a[uf]=g.current;sf(8===a.nodeType?a.parentNode:a);Rk();return g}for(;e=a.lastChild;)a.removeChild(e);if(\"function\"===typeof d){var h=d;d=function(){var a=gl(k);h.call(a)}}var k=bl(a,0,!1,null,null,!1,!1,\"\",pl);a._reactRootContainer=k;a[uf]=k.current;sf(8===a.nodeType?a.parentNode:a);Rk(function(){fl(b,k,c,d)});return k}\nfunction rl(a,b,c,d,e){var f=c._reactRootContainer;if(f){var g=f;if(\"function\"===typeof e){var h=e;e=function(){var a=gl(g);h.call(a)}}fl(b,g,a,e)}else g=ql(c,b,a,e,d);return gl(g)}Ec=function(a){switch(a.tag){case 3:var b=a.stateNode;if(b.current.memoizedState.isDehydrated){var c=tc(b.pendingLanes);0!==c&&(Cc(b,c|1),Dk(b,B()),0===(K&6)&&(Gj=B()+500,jg()))}break;case 13:Rk(function(){var b=ih(a,1);if(null!==b){var c=R();gi(b,a,1,c)}}),il(a,1)}};\nFc=function(a){if(13===a.tag){var b=ih(a,134217728);if(null!==b){var c=R();gi(b,a,134217728,c)}il(a,134217728)}};Gc=function(a){if(13===a.tag){var b=yi(a),c=ih(a,b);if(null!==c){var d=R();gi(c,a,b,d)}il(a,b)}};Hc=function(){return C};Ic=function(a,b){var c=C;try{return C=a,b()}finally{C=c}};\nyb=function(a,b,c){switch(b){case \"input\":bb(a,c);b=c.name;if(\"radio\"===c.type&&null!=b){for(c=a;c.parentNode;)c=c.parentNode;c=c.querySelectorAll(\"input[name=\"+JSON.stringify(\"\"+b)+'][type=\"radio\"]');for(b=0;b<c.length;b++){var d=c[b];if(d!==a&&d.form===a.form){var e=Db(d);if(!e)throw Error(p(90));Wa(d);bb(d,e)}}}break;case \"textarea\":ib(a,c);break;case \"select\":b=c.value,null!=b&&fb(a,!!c.multiple,b,!1)}};Gb=Qk;Hb=Rk;\nvar sl={usingClientEntryPoint:!1,Events:[Cb,ue,Db,Eb,Fb,Qk]},tl={findFiberByHostInstance:Wc,bundleType:0,version:\"18.3.1\",rendererPackageName:\"react-dom\"};\nvar ul={bundleType:tl.bundleType,version:tl.version,rendererPackageName:tl.rendererPackageName,rendererConfig:tl.rendererConfig,overrideHookState:null,overrideHookStateDeletePath:null,overrideHookStateRenamePath:null,overrideProps:null,overridePropsDeletePath:null,overridePropsRenamePath:null,setErrorHandler:null,setSuspenseHandler:null,scheduleUpdate:null,currentDispatcherRef:ua.ReactCurrentDispatcher,findHostInstanceByFiber:function(a){a=Zb(a);return null===a?null:a.stateNode},findFiberByHostInstance:tl.findFiberByHostInstance||\njl,findHostInstancesForRefresh:null,scheduleRefresh:null,scheduleRoot:null,setRefreshHandler:null,getCurrentFiber:null,reconcilerVersion:\"18.3.1-next-f1338f8080-20240426\"};if(\"undefined\"!==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__){var vl=__REACT_DEVTOOLS_GLOBAL_HOOK__;if(!vl.isDisabled&&vl.supportsFiber)try{kc=vl.inject(ul),lc=vl}catch(a){}}exports.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED=sl;\nexports.createPortal=function(a,b){var c=2<arguments.length&&void 0!==arguments[2]?arguments[2]:null;if(!nl(b))throw Error(p(200));return cl(a,b,null,c)};exports.createRoot=function(a,b){if(!nl(a))throw Error(p(299));var c=!1,d=\"\",e=kl;null!==b&&void 0!==b&&(!0===b.unstable_strictMode&&(c=!0),void 0!==b.identifierPrefix&&(d=b.identifierPrefix),void 0!==b.onRecoverableError&&(e=b.onRecoverableError));b=bl(a,1,!1,null,null,c,!1,d,e);a[uf]=b.current;sf(8===a.nodeType?a.parentNode:a);return new ll(b)};\nexports.findDOMNode=function(a){if(null==a)return null;if(1===a.nodeType)return a;var b=a._reactInternals;if(void 0===b){if(\"function\"===typeof a.render)throw Error(p(188));a=Object.keys(a).join(\",\");throw Error(p(268,a));}a=Zb(b);a=null===a?null:a.stateNode;return a};exports.flushSync=function(a){return Rk(a)};exports.hydrate=function(a,b,c){if(!ol(b))throw Error(p(200));return rl(null,a,b,!0,c)};\nexports.hydrateRoot=function(a,b,c){if(!nl(a))throw Error(p(405));var d=null!=c&&c.hydratedSources||null,e=!1,f=\"\",g=kl;null!==c&&void 0!==c&&(!0===c.unstable_strictMode&&(e=!0),void 0!==c.identifierPrefix&&(f=c.identifierPrefix),void 0!==c.onRecoverableError&&(g=c.onRecoverableError));b=el(b,null,a,1,null!=c?c:null,e,!1,f,g);a[uf]=b.current;sf(a);if(d)for(a=0;a<d.length;a++)c=d[a],e=c._getVersion,e=e(c._source),null==b.mutableSourceEagerHydrationData?b.mutableSourceEagerHydrationData=[c,e]:b.mutableSourceEagerHydrationData.push(c,\ne);return new ml(b)};exports.render=function(a,b,c){if(!ol(b))throw Error(p(200));return rl(null,a,b,!1,c)};exports.unmountComponentAtNode=function(a){if(!ol(a))throw Error(p(40));return a._reactRootContainer?(Rk(function(){rl(null,null,a,!1,function(){a._reactRootContainer=null;a[uf]=null})}),!0):!1};exports.unstable_batchedUpdates=Qk;\nexports.unstable_renderSubtreeIntoContainer=function(a,b,c,d){if(!ol(c))throw Error(p(200));if(null==a||void 0===a._reactInternals)throw Error(p(38));return rl(a,b,c,!1,d)};exports.version=\"18.3.1-next-f1338f8080-20240426\";\n","'use strict';\n\nvar m = require('react-dom');\nif (process.env.NODE_ENV === 'production') {\n exports.createRoot = m.createRoot;\n exports.hydrateRoot = m.hydrateRoot;\n} else {\n var i = m.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;\n exports.createRoot = function(c, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.createRoot(c, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n exports.hydrateRoot = function(c, h, o) {\n i.usingClientEntryPoint = true;\n try {\n return m.hydrateRoot(c, h, o);\n } finally {\n i.usingClientEntryPoint = false;\n }\n };\n}\n","'use strict';\n\nfunction checkDCE() {\n /* global __REACT_DEVTOOLS_GLOBAL_HOOK__ */\n if (\n typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ === 'undefined' ||\n typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE !== 'function'\n ) {\n return;\n }\n if (process.env.NODE_ENV !== 'production') {\n // This branch is unreachable because this function is only called\n // in production, but the condition is true only in development.\n // Therefore if the branch is still here, dead code elimination wasn't\n // properly applied.\n // Don't change the message. React DevTools relies on it. Also make sure\n // this message doesn't occur elsewhere in this function, or it will cause\n // a false positive.\n throw new Error('^_^');\n }\n try {\n // Verify that the code above has been dead code eliminated (DCE'd).\n __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(checkDCE);\n } catch (err) {\n // DevTools shouldn't crash React, no matter what.\n // We should still report in case we break this code.\n console.error(err);\n }\n}\n\nif (process.env.NODE_ENV === 'production') {\n // DCE check should happen before ReactDOM bundle executes so that\n // DevTools can report bad minification during injection.\n checkDCE();\n module.exports = require('./cjs/react-dom.production.min.js');\n} else {\n module.exports = require('./cjs/react-dom.development.js');\n}\n","/**\n * @license React\n * react-jsx-runtime.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';var f=require(\"react\"),k=Symbol.for(\"react.element\"),l=Symbol.for(\"react.fragment\"),m=Object.prototype.hasOwnProperty,n=f.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ReactCurrentOwner,p={key:!0,ref:!0,__self:!0,__source:!0};\nfunction q(c,a,g){var b,d={},e=null,h=null;void 0!==g&&(e=\"\"+g);void 0!==a.key&&(e=\"\"+a.key);void 0!==a.ref&&(h=a.ref);for(b in a)m.call(a,b)&&!p.hasOwnProperty(b)&&(d[b]=a[b]);if(c&&c.defaultProps)for(b in a=c.defaultProps,a)void 0===d[b]&&(d[b]=a[b]);return{$$typeof:k,type:c,key:e,ref:h,props:d,_owner:n.current}}exports.Fragment=l;exports.jsx=q;exports.jsxs=q;\n","/**\n * @license React\n * react.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';var l=Symbol.for(\"react.element\"),n=Symbol.for(\"react.portal\"),p=Symbol.for(\"react.fragment\"),q=Symbol.for(\"react.strict_mode\"),r=Symbol.for(\"react.profiler\"),t=Symbol.for(\"react.provider\"),u=Symbol.for(\"react.context\"),v=Symbol.for(\"react.forward_ref\"),w=Symbol.for(\"react.suspense\"),x=Symbol.for(\"react.memo\"),y=Symbol.for(\"react.lazy\"),z=Symbol.iterator;function A(a){if(null===a||\"object\"!==typeof a)return null;a=z&&a[z]||a[\"@@iterator\"];return\"function\"===typeof a?a:null}\nvar B={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},C=Object.assign,D={};function E(a,b,e){this.props=a;this.context=b;this.refs=D;this.updater=e||B}E.prototype.isReactComponent={};\nE.prototype.setState=function(a,b){if(\"object\"!==typeof a&&\"function\"!==typeof a&&null!=a)throw Error(\"setState(...): takes an object of state variables to update or a function which returns an object of state variables.\");this.updater.enqueueSetState(this,a,b,\"setState\")};E.prototype.forceUpdate=function(a){this.updater.enqueueForceUpdate(this,a,\"forceUpdate\")};function F(){}F.prototype=E.prototype;function G(a,b,e){this.props=a;this.context=b;this.refs=D;this.updater=e||B}var H=G.prototype=new F;\nH.constructor=G;C(H,E.prototype);H.isPureReactComponent=!0;var I=Array.isArray,J=Object.prototype.hasOwnProperty,K={current:null},L={key:!0,ref:!0,__self:!0,__source:!0};\nfunction M(a,b,e){var d,c={},k=null,h=null;if(null!=b)for(d in void 0!==b.ref&&(h=b.ref),void 0!==b.key&&(k=\"\"+b.key),b)J.call(b,d)&&!L.hasOwnProperty(d)&&(c[d]=b[d]);var g=arguments.length-2;if(1===g)c.children=e;else if(1<g){for(var f=Array(g),m=0;m<g;m++)f[m]=arguments[m+2];c.children=f}if(a&&a.defaultProps)for(d in g=a.defaultProps,g)void 0===c[d]&&(c[d]=g[d]);return{$$typeof:l,type:a,key:k,ref:h,props:c,_owner:K.current}}\nfunction N(a,b){return{$$typeof:l,type:a.type,key:b,ref:a.ref,props:a.props,_owner:a._owner}}function O(a){return\"object\"===typeof a&&null!==a&&a.$$typeof===l}function escape(a){var b={\"=\":\"=0\",\":\":\"=2\"};return\"$\"+a.replace(/[=:]/g,function(a){return b[a]})}var P=/\\/+/g;function Q(a,b){return\"object\"===typeof a&&null!==a&&null!=a.key?escape(\"\"+a.key):b.toString(36)}\nfunction R(a,b,e,d,c){var k=typeof a;if(\"undefined\"===k||\"boolean\"===k)a=null;var h=!1;if(null===a)h=!0;else switch(k){case \"string\":case \"number\":h=!0;break;case \"object\":switch(a.$$typeof){case l:case n:h=!0}}if(h)return h=a,c=c(h),a=\"\"===d?\".\"+Q(h,0):d,I(c)?(e=\"\",null!=a&&(e=a.replace(P,\"$&/\")+\"/\"),R(c,b,e,\"\",function(a){return a})):null!=c&&(O(c)&&(c=N(c,e+(!c.key||h&&h.key===c.key?\"\":(\"\"+c.key).replace(P,\"$&/\")+\"/\")+a)),b.push(c)),1;h=0;d=\"\"===d?\".\":d+\":\";if(I(a))for(var g=0;g<a.length;g++){k=\na[g];var f=d+Q(k,g);h+=R(k,b,e,f,c)}else if(f=A(a),\"function\"===typeof f)for(a=f.call(a),g=0;!(k=a.next()).done;)k=k.value,f=d+Q(k,g++),h+=R(k,b,e,f,c);else if(\"object\"===k)throw b=String(a),Error(\"Objects are not valid as a React child (found: \"+(\"[object Object]\"===b?\"object with keys {\"+Object.keys(a).join(\", \")+\"}\":b)+\"). If you meant to render a collection of children, use an array instead.\");return h}\nfunction S(a,b,e){if(null==a)return a;var d=[],c=0;R(a,d,\"\",\"\",function(a){return b.call(e,a,c++)});return d}function T(a){if(-1===a._status){var b=a._result;b=b();b.then(function(b){if(0===a._status||-1===a._status)a._status=1,a._result=b},function(b){if(0===a._status||-1===a._status)a._status=2,a._result=b});-1===a._status&&(a._status=0,a._result=b)}if(1===a._status)return a._result.default;throw a._result;}\nvar U={current:null},V={transition:null},W={ReactCurrentDispatcher:U,ReactCurrentBatchConfig:V,ReactCurrentOwner:K};function X(){throw Error(\"act(...) is not supported in production builds of React.\");}\nexports.Children={map:S,forEach:function(a,b,e){S(a,function(){b.apply(this,arguments)},e)},count:function(a){var b=0;S(a,function(){b++});return b},toArray:function(a){return S(a,function(a){return a})||[]},only:function(a){if(!O(a))throw Error(\"React.Children.only expected to receive a single React element child.\");return a}};exports.Component=E;exports.Fragment=p;exports.Profiler=r;exports.PureComponent=G;exports.StrictMode=q;exports.Suspense=w;\nexports.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED=W;exports.act=X;\nexports.cloneElement=function(a,b,e){if(null===a||void 0===a)throw Error(\"React.cloneElement(...): The argument must be a React element, but you passed \"+a+\".\");var d=C({},a.props),c=a.key,k=a.ref,h=a._owner;if(null!=b){void 0!==b.ref&&(k=b.ref,h=K.current);void 0!==b.key&&(c=\"\"+b.key);if(a.type&&a.type.defaultProps)var g=a.type.defaultProps;for(f in b)J.call(b,f)&&!L.hasOwnProperty(f)&&(d[f]=void 0===b[f]&&void 0!==g?g[f]:b[f])}var f=arguments.length-2;if(1===f)d.children=e;else if(1<f){g=Array(f);\nfor(var m=0;m<f;m++)g[m]=arguments[m+2];d.children=g}return{$$typeof:l,type:a.type,key:c,ref:k,props:d,_owner:h}};exports.createContext=function(a){a={$$typeof:u,_currentValue:a,_currentValue2:a,_threadCount:0,Provider:null,Consumer:null,_defaultValue:null,_globalName:null};a.Provider={$$typeof:t,_context:a};return a.Consumer=a};exports.createElement=M;exports.createFactory=function(a){var b=M.bind(null,a);b.type=a;return b};exports.createRef=function(){return{current:null}};\nexports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.isValidElement=O;exports.lazy=function(a){return{$$typeof:y,_payload:{_status:-1,_result:a},_init:T}};exports.memo=function(a,b){return{$$typeof:x,type:a,compare:void 0===b?null:b}};exports.startTransition=function(a){var b=V.transition;V.transition={};try{a()}finally{V.transition=b}};exports.unstable_act=X;exports.useCallback=function(a,b){return U.current.useCallback(a,b)};exports.useContext=function(a){return U.current.useContext(a)};\nexports.useDebugValue=function(){};exports.useDeferredValue=function(a){return U.current.useDeferredValue(a)};exports.useEffect=function(a,b){return U.current.useEffect(a,b)};exports.useId=function(){return U.current.useId()};exports.useImperativeHandle=function(a,b,e){return U.current.useImperativeHandle(a,b,e)};exports.useInsertionEffect=function(a,b){return U.current.useInsertionEffect(a,b)};exports.useLayoutEffect=function(a,b){return U.current.useLayoutEffect(a,b)};\nexports.useMemo=function(a,b){return U.current.useMemo(a,b)};exports.useReducer=function(a,b,e){return U.current.useReducer(a,b,e)};exports.useRef=function(a){return U.current.useRef(a)};exports.useState=function(a){return U.current.useState(a)};exports.useSyncExternalStore=function(a,b,e){return U.current.useSyncExternalStore(a,b,e)};exports.useTransition=function(){return U.current.useTransition()};exports.version=\"18.3.1\";\n","'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/react.production.min.js');\n} else {\n module.exports = require('./cjs/react.development.js');\n}\n","'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/react-jsx-runtime.production.min.js');\n} else {\n module.exports = require('./cjs/react-jsx-runtime.development.js');\n}\n","/**\n * @license React\n * scheduler.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';function f(a,b){var c=a.length;a.push(b);a:for(;0<c;){var d=c-1>>>1,e=a[d];if(0<g(e,b))a[d]=b,a[c]=e,c=d;else break a}}function h(a){return 0===a.length?null:a[0]}function k(a){if(0===a.length)return null;var b=a[0],c=a.pop();if(c!==b){a[0]=c;a:for(var d=0,e=a.length,w=e>>>1;d<w;){var m=2*(d+1)-1,C=a[m],n=m+1,x=a[n];if(0>g(C,c))n<e&&0>g(x,C)?(a[d]=x,a[n]=c,d=n):(a[d]=C,a[m]=c,d=m);else if(n<e&&0>g(x,c))a[d]=x,a[n]=c,d=n;else break a}}return b}\nfunction g(a,b){var c=a.sortIndex-b.sortIndex;return 0!==c?c:a.id-b.id}if(\"object\"===typeof performance&&\"function\"===typeof performance.now){var l=performance;exports.unstable_now=function(){return l.now()}}else{var p=Date,q=p.now();exports.unstable_now=function(){return p.now()-q}}var r=[],t=[],u=1,v=null,y=3,z=!1,A=!1,B=!1,D=\"function\"===typeof setTimeout?setTimeout:null,E=\"function\"===typeof clearTimeout?clearTimeout:null,F=\"undefined\"!==typeof setImmediate?setImmediate:null;\n\"undefined\"!==typeof navigator&&void 0!==navigator.scheduling&&void 0!==navigator.scheduling.isInputPending&&navigator.scheduling.isInputPending.bind(navigator.scheduling);function G(a){for(var b=h(t);null!==b;){if(null===b.callback)k(t);else if(b.startTime<=a)k(t),b.sortIndex=b.expirationTime,f(r,b);else break;b=h(t)}}function H(a){B=!1;G(a);if(!A)if(null!==h(r))A=!0,I(J);else{var b=h(t);null!==b&&K(H,b.startTime-a)}}\nfunction J(a,b){A=!1;B&&(B=!1,E(L),L=-1);z=!0;var c=y;try{G(b);for(v=h(r);null!==v&&(!(v.expirationTime>b)||a&&!M());){var d=v.callback;if(\"function\"===typeof d){v.callback=null;y=v.priorityLevel;var e=d(v.expirationTime<=b);b=exports.unstable_now();\"function\"===typeof e?v.callback=e:v===h(r)&&k(r);G(b)}else k(r);v=h(r)}if(null!==v)var w=!0;else{var m=h(t);null!==m&&K(H,m.startTime-b);w=!1}return w}finally{v=null,y=c,z=!1}}var N=!1,O=null,L=-1,P=5,Q=-1;\nfunction M(){return exports.unstable_now()-Q<P?!1:!0}function R(){if(null!==O){var a=exports.unstable_now();Q=a;var b=!0;try{b=O(!0,a)}finally{b?S():(N=!1,O=null)}}else N=!1}var S;if(\"function\"===typeof F)S=function(){F(R)};else if(\"undefined\"!==typeof MessageChannel){var T=new MessageChannel,U=T.port2;T.port1.onmessage=R;S=function(){U.postMessage(null)}}else S=function(){D(R,0)};function I(a){O=a;N||(N=!0,S())}function K(a,b){L=D(function(){a(exports.unstable_now())},b)}\nexports.unstable_IdlePriority=5;exports.unstable_ImmediatePriority=1;exports.unstable_LowPriority=4;exports.unstable_NormalPriority=3;exports.unstable_Profiling=null;exports.unstable_UserBlockingPriority=2;exports.unstable_cancelCallback=function(a){a.callback=null};exports.unstable_continueExecution=function(){A||z||(A=!0,I(J))};\nexports.unstable_forceFrameRate=function(a){0>a||125<a?console.error(\"forceFrameRate takes a positive int between 0 and 125, forcing frame rates higher than 125 fps is not supported\"):P=0<a?Math.floor(1E3/a):5};exports.unstable_getCurrentPriorityLevel=function(){return y};exports.unstable_getFirstCallbackNode=function(){return h(r)};exports.unstable_next=function(a){switch(y){case 1:case 2:case 3:var b=3;break;default:b=y}var c=y;y=b;try{return a()}finally{y=c}};exports.unstable_pauseExecution=function(){};\nexports.unstable_requestPaint=function(){};exports.unstable_runWithPriority=function(a,b){switch(a){case 1:case 2:case 3:case 4:case 5:break;default:a=3}var c=y;y=a;try{return b()}finally{y=c}};\nexports.unstable_scheduleCallback=function(a,b,c){var d=exports.unstable_now();\"object\"===typeof c&&null!==c?(c=c.delay,c=\"number\"===typeof c&&0<c?d+c:d):c=d;switch(a){case 1:var e=-1;break;case 2:e=250;break;case 5:e=1073741823;break;case 4:e=1E4;break;default:e=5E3}e=c+e;a={id:u++,callback:b,priorityLevel:a,startTime:c,expirationTime:e,sortIndex:-1};c>d?(a.sortIndex=c,f(t,a),null===h(r)&&a===h(t)&&(B?(E(L),L=-1):B=!0,K(H,c-d))):(a.sortIndex=e,f(r,a),A||z||(A=!0,I(J)));return a};\nexports.unstable_shouldYield=M;exports.unstable_wrapCallback=function(a){var b=y;return function(){var c=y;y=b;try{return a.apply(this,arguments)}finally{y=c}}};\n","'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/scheduler.production.min.js');\n} else {\n module.exports = require('./cjs/scheduler.development.js');\n}\n","/* (ignored) */","import {\n __require,\n __toESM\n} from \"./chunk-6jf1natv.js\";\n\n// src/renderEntry.tsx\nimport { useContext, useEffect, useRef, useState } from \"react\";\nimport ReactDOM from \"react-dom/client\";\nimport {\n AbsoluteFill,\n getInputProps,\n getRemotionEnvironment,\n continueRender as globalContinueRender,\n delayRender as globalDelayRender,\n Internals,\n useDelayRender\n} from \"remotion\";\nimport { NoReactInternals } from \"remotion/no-react\";\nimport { jsx, jsxs } from \"react/jsx-runtime\";\nvar currentBundleMode = {\n type: \"index\"\n};\nvar setBundleMode = (state) => {\n currentBundleMode = state;\n};\nvar getBundleMode = () => {\n return currentBundleMode;\n};\nInternals.CSSUtils.injectCSS(Internals.CSSUtils.makeDefaultPreviewCSS(null, \"#1f2428\"));\nvar getCanSerializeDefaultProps = (object) => {\n try {\n const str = JSON.stringify(object);\n return str.length < 256 * 1024 * 1024 * 0.9;\n } catch (err) {\n if (err.message.includes(\"Invalid string length\")) {\n return false;\n }\n throw err;\n }\n};\nvar isInHeadlessBrowser = () => {\n return typeof window.remotion_puppeteerTimeout !== \"undefined\";\n};\nvar DelayedSpinner = () => {\n const [show, setShow] = useState(false);\n useEffect(() => {\n const timeout = setTimeout(() => {\n setShow(true);\n }, 2000);\n return () => {\n clearTimeout(timeout);\n };\n }, []);\n if (!show) {\n return null;\n }\n return /* @__PURE__ */ jsx(AbsoluteFill, {\n style: {\n justifyContent: \"center\",\n alignItems: \"center\",\n fontSize: 13,\n opacity: 0.6,\n color: \"white\",\n fontFamily: \"Helvetica, Arial, sans-serif\"\n },\n children: \"Loading Studio\"\n });\n};\nvar GetVideoComposition = ({ state }) => {\n const { compositions, currentCompositionMetadata, canvasContent } = useContext(Internals.CompositionManager);\n const { setCanvasContent } = useContext(Internals.CompositionSetters);\n const portalContainer = useRef(null);\n const { delayRender, continueRender } = useDelayRender();\n const [handle] = useState(() => delayRender(`Waiting for Composition \"${state.compositionName}\"`));\n useEffect(() => {\n return () => continueRender(handle);\n }, [handle, continueRender]);\n useEffect(() => {\n if (compositions.length === 0) {\n return;\n }\n const foundComposition = compositions.find((c) => c.id === state.compositionName);\n if (!foundComposition) {\n throw new Error(`Found no composition with the name ${state.compositionName}. The following compositions were found instead: ${compositions.map((c) => c.id).join(\", \")}. All compositions must have their ID calculated deterministically and must be mounted at the same time.`);\n }\n setCanvasContent({\n type: \"composition\",\n compositionId: foundComposition.id\n });\n }, [compositions, state, currentCompositionMetadata, setCanvasContent]);\n useEffect(() => {\n if (!canvasContent) {\n return;\n }\n const { current } = portalContainer;\n if (!current) {\n throw new Error(\"portal did not render\");\n }\n current.appendChild(Internals.portalNode());\n continueRender(handle);\n return () => {\n current.removeChild(Internals.portalNode());\n };\n }, [canvasContent, handle, continueRender]);\n if (!currentCompositionMetadata) {\n return null;\n }\n return /* @__PURE__ */ jsx(\"div\", {\n ref: portalContainer,\n id: \"remotion-canvas\",\n style: {\n width: currentCompositionMetadata.width,\n height: currentCompositionMetadata.height,\n display: \"flex\",\n backgroundColor: \"transparent\"\n }\n });\n};\nvar DEFAULT_ROOT_COMPONENT_TIMEOUT = 1e4;\nvar waitForRootHandle = globalDelayRender(\"Loading root component - See https://remotion.dev/docs/troubleshooting/loading-root-component if you experience a timeout\", {\n timeoutInMilliseconds: typeof window === \"undefined\" ? DEFAULT_ROOT_COMPONENT_TIMEOUT : window.remotion_puppeteerTimeout ?? DEFAULT_ROOT_COMPONENT_TIMEOUT\n});\nvar videoContainer = document.getElementById(\"video-container\");\nvar root = null;\nvar getRootForElement = () => {\n if (root) {\n return root;\n }\n root = ReactDOM.createRoot(videoContainer);\n return root;\n};\nvar renderToDOM = (content) => {\n if (!ReactDOM.createRoot) {\n if (NoReactInternals.ENABLE_V5_BREAKING_CHANGES) {\n throw new Error(\"Remotion 5.0 does only support React 18+. However, ReactDOM.createRoot() is undefined.\");\n }\n ReactDOM.render(content, videoContainer);\n return;\n }\n getRootForElement().render(content);\n};\nvar renderContent = (Root) => {\n const bundleMode = getBundleMode();\n if (bundleMode.type === \"composition\") {\n const markup = /* @__PURE__ */ jsx(Internals.CompositionManagerProvider, {\n initialCanvasContent: null,\n onlyRenderComposition: bundleMode.compositionName,\n currentCompositionMetadata: {\n props: NoReactInternals.deserializeJSONWithSpecialTypes(bundleMode.serializedResolvedPropsWithSchema),\n durationInFrames: bundleMode.compositionDurationInFrames,\n fps: bundleMode.compositionFps,\n height: bundleMode.compositionHeight,\n width: bundleMode.compositionWidth,\n defaultCodec: bundleMode.compositionDefaultCodec,\n defaultOutName: bundleMode.compositionDefaultOutName,\n defaultVideoImageFormat: bundleMode.compositionDefaultVideoImageFormat,\n defaultPixelFormat: bundleMode.compositionDefaultPixelFormat,\n defaultProResProfile: bundleMode.compositionDefaultProResProfile\n },\n initialCompositions: [],\n children: /* @__PURE__ */ jsx(Internals.RemotionRootContexts, {\n frameState: null,\n audioEnabled: window.remotion_audioEnabled,\n videoEnabled: window.remotion_videoEnabled,\n logLevel: window.remotion_logLevel,\n numberOfAudioTags: 0,\n audioLatencyHint: window.remotion_audioLatencyHint ?? \"interactive\",\n visualModeEnabled: false,\n children: /* @__PURE__ */ jsxs(Internals.RenderAssetManagerProvider, {\n collectAssets: null,\n children: [\n /* @__PURE__ */ jsx(Root, {}),\n /* @__PURE__ */ jsx(GetVideoComposition, {\n state: bundleMode\n })\n ]\n })\n })\n });\n renderToDOM(markup);\n }\n if (bundleMode.type === \"evaluation\") {\n const markup = /* @__PURE__ */ jsx(Internals.CompositionManagerProvider, {\n initialCanvasContent: null,\n onlyRenderComposition: null,\n currentCompositionMetadata: null,\n initialCompositions: [],\n children: /* @__PURE__ */ jsx(Internals.RemotionRootContexts, {\n frameState: null,\n audioEnabled: window.remotion_audioEnabled,\n videoEnabled: window.remotion_videoEnabled,\n logLevel: window.remotion_logLevel,\n numberOfAudioTags: 0,\n audioLatencyHint: window.remotion_audioLatencyHint ?? \"interactive\",\n visualModeEnabled: false,\n children: /* @__PURE__ */ jsx(Internals.RenderAssetManagerProvider, {\n collectAssets: null,\n children: /* @__PURE__ */ jsx(Root, {})\n })\n })\n });\n renderToDOM(markup);\n }\n if (bundleMode.type === \"index\") {\n if (isInHeadlessBrowser()) {\n return;\n }\n renderToDOM(/* @__PURE__ */ jsx(\"div\", {\n children: /* @__PURE__ */ jsx(DelayedSpinner, {})\n }));\n import(\"./chunk-x88z6n54.js\").then(({ StudioInternals }) => {\n window.remotion_isStudio = true;\n window.remotion_isReadOnlyStudio = true;\n window.remotion_inputProps = \"{}\";\n renderToDOM(/* @__PURE__ */ jsx(StudioInternals.Studio, {\n readOnly: true,\n rootComponent: Root,\n visualModeEnabled: false\n }));\n }).catch((err) => {\n renderToDOM(/* @__PURE__ */ jsxs(\"div\", {\n children: [\n \"Failed to load Remotion Studio: \",\n err.message\n ]\n }));\n });\n }\n};\nInternals.waitForRoot((Root) => {\n renderContent(Root);\n globalContinueRender(waitForRootHandle);\n});\nvar setBundleModeAndUpdate = (state) => {\n setBundleMode(state);\n const delay = globalDelayRender(\"Waiting for root component to load - See https://remotion.dev/docs/troubleshooting/loading-root-component if you experience a timeout\");\n Internals.waitForRoot((Root) => {\n renderContent(Root);\n requestAnimationFrame(() => {\n globalContinueRender(delay);\n });\n });\n};\nif (typeof window !== \"undefined\") {\n const getUnevaluatedComps = () => {\n if (!Internals.getRoot()) {\n throw new Error(\"registerRoot() was never called. 1. Make sure you specified the correct entrypoint for your bundle. 2. If your registerRoot() call is deferred, use the delayRender/continueRender pattern to tell Remotion to wait.\");\n }\n if (!Internals.compositionsRef.current) {\n throw new Error(\"Unexpectedly did not have a CompositionManager\");\n }\n const compositions = Internals.compositionsRef.current.getCompositions();\n const canSerializeDefaultProps = getCanSerializeDefaultProps(compositions);\n if (!canSerializeDefaultProps) {\n Internals.Log.warn({ logLevel: window.remotion_logLevel, tag: null }, \"defaultProps are too big to serialize - trying to find the problematic composition...\");\n Internals.Log.warn({ logLevel: window.remotion_logLevel, tag: null }, \"Serialization:\", compositions);\n for (const comp of compositions) {\n if (!getCanSerializeDefaultProps(comp)) {\n throw new Error(`defaultProps too big - could not serialize - the defaultProps of composition with ID ${comp.id} - the object that was passed to defaultProps was too big. Learn how to mitigate this error by visiting https://remotion.dev/docs/troubleshooting/serialize-defaultprops`);\n }\n }\n Internals.Log.warn({ logLevel: window.remotion_logLevel, tag: null }, \"Could not single out a problematic composition - The composition list as a whole is too big to serialize.\");\n throw new Error(\"defaultProps too big - Could not serialize - an object that was passed to defaultProps was too big. Learn how to mitigate this error by visiting https://remotion.dev/docs/troubleshooting/serialize-defaultprops\");\n }\n return compositions;\n };\n window.getStaticCompositions = () => {\n const compositions = getUnevaluatedComps();\n const inputProps = typeof window === \"undefined\" || getRemotionEnvironment().isPlayer ? {} : getInputProps() ?? {};\n return Promise.all(compositions.map(async (c) => {\n const handle = globalDelayRender(`Running calculateMetadata() for composition ${c.id}. If you didn't want to evaluate this composition, use \"selectComposition()\" instead of \"getCompositions()\"`);\n const originalProps = {\n ...c.defaultProps ?? {},\n ...inputProps ?? {}\n };\n const comp = Internals.resolveVideoConfig({\n calculateMetadata: c.calculateMetadata,\n compositionDurationInFrames: c.durationInFrames ?? null,\n compositionFps: c.fps ?? null,\n compositionHeight: c.height ?? null,\n compositionWidth: c.width ?? null,\n signal: new AbortController().signal,\n inputProps: originalProps,\n defaultProps: c.defaultProps ?? {},\n compositionId: c.id\n });\n const resolved = await Promise.resolve(comp);\n globalContinueRender(handle);\n const { props, defaultProps, ...data } = resolved;\n return {\n ...data,\n serializedResolvedPropsWithCustomSchema: NoReactInternals.serializeJSONWithSpecialTypes({\n data: props,\n indent: undefined,\n staticBase: null\n }).serializedString,\n serializedDefaultPropsWithCustomSchema: NoReactInternals.serializeJSONWithSpecialTypes({\n data: defaultProps,\n indent: undefined,\n staticBase: null\n }).serializedString\n };\n }));\n };\n window.remotion_getCompositionNames = () => {\n return getUnevaluatedComps().map((c) => c.id);\n };\n window.remotion_calculateComposition = async (compId) => {\n const compositions = getUnevaluatedComps();\n const selectedComp = compositions.find((c) => c.id === compId);\n if (!selectedComp) {\n throw new Error(`Could not find composition with ID ${compId}. Available compositions: ${compositions.map((c) => c.id).join(\", \")}`);\n }\n const abortController = new AbortController;\n const handle = globalDelayRender(`Running the calculateMetadata() function for composition ${compId}`);\n const inputProps = typeof window === \"undefined\" || getRemotionEnvironment().isPlayer ? {} : getInputProps() ?? {};\n const originalProps = {\n ...selectedComp.defaultProps ?? {},\n ...inputProps ?? {}\n };\n const prom = await Promise.resolve(Internals.resolveVideoConfig({\n calculateMetadata: selectedComp.calculateMetadata,\n compositionDurationInFrames: selectedComp.durationInFrames ?? null,\n compositionFps: selectedComp.fps ?? null,\n compositionHeight: selectedComp.height ?? null,\n compositionWidth: selectedComp.width ?? null,\n inputProps: originalProps,\n signal: abortController.signal,\n defaultProps: selectedComp.defaultProps ?? {},\n compositionId: selectedComp.id\n }));\n globalContinueRender(handle);\n const { props, defaultProps, ...data } = prom;\n return {\n ...data,\n serializedResolvedPropsWithCustomSchema: NoReactInternals.serializeJSONWithSpecialTypes({\n data: props,\n indent: undefined,\n staticBase: null\n }).serializedString,\n serializedDefaultPropsWithCustomSchema: NoReactInternals.serializeJSONWithSpecialTypes({\n data: defaultProps,\n indent: undefined,\n staticBase: null\n }).serializedString\n };\n };\n window.remotion_setBundleMode = setBundleModeAndUpdate;\n}\nexport {\n setBundleModeAndUpdate\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { Bitstream } from './bitstream.js';\nexport const aacFrequencyTable = [\n 96000, 88200, 64000, 48000, 44100, 32000,\n 24000, 22050, 16000, 12000, 11025, 8000, 7350,\n];\nexport const aacChannelMap = [-1, 1, 2, 3, 4, 5, 6, 8];\nexport const parseAacAudioSpecificConfig = (bytes) => {\n if (!bytes || bytes.byteLength < 2) {\n throw new TypeError('AAC description must be at least 2 bytes long.');\n }\n const bitstream = new Bitstream(bytes);\n let objectType = bitstream.readBits(5);\n if (objectType === 31) {\n objectType = 32 + bitstream.readBits(6);\n }\n const frequencyIndex = bitstream.readBits(4);\n let sampleRate = null;\n if (frequencyIndex === 15) {\n sampleRate = bitstream.readBits(24);\n }\n else {\n if (frequencyIndex < aacFrequencyTable.length) {\n sampleRate = aacFrequencyTable[frequencyIndex];\n }\n }\n const channelConfiguration = bitstream.readBits(4);\n let numberOfChannels = null;\n if (channelConfiguration >= 1 && channelConfiguration <= 7) {\n numberOfChannels = aacChannelMap[channelConfiguration];\n }\n return {\n objectType,\n frequencyIndex,\n sampleRate,\n channelConfiguration,\n numberOfChannels,\n };\n};\nexport const buildAacAudioSpecificConfig = (config) => {\n let frequencyIndex = aacFrequencyTable.indexOf(config.sampleRate);\n let customSampleRate = null;\n if (frequencyIndex === -1) {\n frequencyIndex = 15;\n customSampleRate = config.sampleRate;\n }\n const channelConfiguration = aacChannelMap.indexOf(config.numberOfChannels);\n if (channelConfiguration === -1) {\n throw new TypeError(`Unsupported number of channels: ${config.numberOfChannels}`);\n }\n let bitCount = 5 + 4 + 4;\n if (config.objectType >= 32) {\n bitCount += 6;\n }\n if (frequencyIndex === 15) {\n bitCount += 24;\n }\n const byteCount = Math.ceil(bitCount / 8);\n const bytes = new Uint8Array(byteCount);\n const bitstream = new Bitstream(bytes);\n if (config.objectType < 32) {\n bitstream.writeBits(5, config.objectType);\n }\n else {\n bitstream.writeBits(5, 31);\n bitstream.writeBits(6, config.objectType - 32);\n }\n bitstream.writeBits(4, frequencyIndex);\n if (frequencyIndex === 15) {\n bitstream.writeBits(24, customSampleRate);\n }\n bitstream.writeBits(4, channelConfiguration);\n return bytes;\n};\nexport const buildAdtsHeaderTemplate = (config) => {\n const header = new Uint8Array(7);\n const bitstream = new Bitstream(header);\n const { objectType, frequencyIndex, channelConfiguration } = config;\n const profile = objectType - 1;\n bitstream.writeBits(12, 0b1111_11111111); // Syncword\n bitstream.writeBits(1, 0); // MPEG Version\n bitstream.writeBits(2, 0); // Layer\n bitstream.writeBits(1, 1); // Protection absence\n bitstream.writeBits(2, profile); // Profile\n bitstream.writeBits(4, frequencyIndex); // MPEG-4 Sampling Frequency Index\n bitstream.writeBits(1, 0); // Private bit\n bitstream.writeBits(3, channelConfiguration); // MPEG-4 Channel Configuration\n bitstream.writeBits(1, 0); // Originality\n bitstream.writeBits(1, 0); // Home\n bitstream.writeBits(1, 0); // Copyright ID bit\n bitstream.writeBits(1, 0); // Copyright ID start\n bitstream.skipBits(13); // Frame length (to be filled per packet)\n bitstream.writeBits(11, 0x7ff); // Buffer fullness\n bitstream.writeBits(2, 0); // Number of AAC frames minus 1\n // Omit CRC check\n return { header, bitstream };\n};\nexport const writeAdtsFrameLength = (bitstream, frameLength) => {\n bitstream.pos = 30;\n bitstream.writeBits(13, frameLength);\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\n/** Sample rates indexed by fscod (Table 4.1) */\nexport const AC3_SAMPLE_RATES = [48000, 44100, 32000];\n/** E-AC-3 reduced sample rates for fscod2 per ATSC A/52:2018 */\nexport const EAC3_REDUCED_SAMPLE_RATES = [24000, 22050, 16000];\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport class Bitstream {\n constructor(bytes) {\n this.bytes = bytes;\n /** Current offset in bits. */\n this.pos = 0;\n }\n seekToByte(byteOffset) {\n this.pos = 8 * byteOffset;\n }\n readBit() {\n const byteIndex = Math.floor(this.pos / 8);\n const byte = this.bytes[byteIndex] ?? 0;\n const bitIndex = 0b111 - (this.pos & 0b111);\n const bit = (byte & (1 << bitIndex)) >> bitIndex;\n this.pos++;\n return bit;\n }\n readBits(n) {\n if (n === 1) {\n return this.readBit();\n }\n let result = 0;\n for (let i = 0; i < n; i++) {\n result <<= 1;\n result |= this.readBit();\n }\n return result;\n }\n writeBits(n, value) {\n const end = this.pos + n;\n for (let i = this.pos; i < end; i++) {\n const byteIndex = Math.floor(i / 8);\n let byte = this.bytes[byteIndex];\n const bitIndex = 0b111 - (i & 0b111);\n byte &= ~(1 << bitIndex);\n byte |= ((value & (1 << (end - i - 1))) >> (end - i - 1)) << bitIndex;\n this.bytes[byteIndex] = byte;\n }\n this.pos = end;\n }\n ;\n readAlignedByte() {\n if (this.pos % 8 !== 0) {\n throw new Error('Bitstream is not byte-aligned.');\n }\n const byteIndex = this.pos / 8;\n const byte = this.bytes[byteIndex] ?? 0;\n this.pos += 8;\n return byte;\n }\n skipBits(n) {\n this.pos += n;\n }\n getBitsLeft() {\n return this.bytes.length * 8 - this.pos;\n }\n clone() {\n const clone = new Bitstream(this.bytes);\n clone.pos = this.pos;\n return clone;\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport const FRAME_HEADER_SIZE = 4;\nexport const SAMPLING_RATES = [44100, 48000, 32000];\nexport const KILOBIT_RATES = [\n // lowSamplingFrequency === 0\n -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // layer = 0\n -1, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, -1, // layer 1\n -1, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, -1, // layer = 2\n -1, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, -1, // layer = 3\n // lowSamplingFrequency === 1\n -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // layer = 0\n -1, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, -1, // layer = 1\n -1, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, -1, // layer = 2\n -1, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, -1, // layer = 3\n];\n/** 'Xing' */\nexport const XING = 0x58696e67;\n/** 'Info' */\nexport const INFO = 0x496e666f;\nexport const computeMp3FrameSize = (lowSamplingFrequency, layer, bitrate, sampleRate, padding) => {\n if (layer === 0) {\n return 0; // Not expected that this is hit\n }\n else if (layer === 1) {\n return Math.floor(144 * bitrate / (sampleRate << lowSamplingFrequency)) + padding;\n }\n else if (layer === 2) {\n return Math.floor(144 * bitrate / sampleRate) + padding;\n }\n else { // layer === 3\n return (Math.floor(12 * bitrate / sampleRate) + padding) * 4;\n }\n};\nexport const getXingOffset = (mpegVersionId, channel) => {\n return mpegVersionId === 3\n ? (channel === 3 ? 21 : 36)\n : (channel === 3 ? 13 : 21);\n};\nexport const readMp3FrameHeader = (word, remainingBytes) => {\n const firstByte = word >>> 24;\n const secondByte = (word >>> 16) & 0xff;\n const thirdByte = (word >>> 8) & 0xff;\n const fourthByte = word & 0xff;\n if (firstByte !== 0xff && secondByte !== 0xff && thirdByte !== 0xff && fourthByte !== 0xff) {\n return {\n header: null,\n bytesAdvanced: 4,\n };\n }\n if (firstByte !== 0xff) {\n return { header: null, bytesAdvanced: 1 };\n }\n if ((secondByte & 0xe0) !== 0xe0) {\n return { header: null, bytesAdvanced: 1 };\n }\n let lowSamplingFrequency = 0;\n let mpeg25 = 0;\n if (secondByte & (1 << 4)) {\n lowSamplingFrequency = (secondByte & (1 << 3)) ? 0 : 1;\n }\n else {\n lowSamplingFrequency = 1;\n mpeg25 = 1;\n }\n const mpegVersionId = (secondByte >> 3) & 0x3;\n const layer = (secondByte >> 1) & 0x3;\n const bitrateIndex = (thirdByte >> 4) & 0xf;\n const frequencyIndex = ((thirdByte >> 2) & 0x3) % 3;\n const padding = (thirdByte >> 1) & 0x1;\n const channel = (fourthByte >> 6) & 0x3;\n const modeExtension = (fourthByte >> 4) & 0x3;\n const copyright = (fourthByte >> 3) & 0x1;\n const original = (fourthByte >> 2) & 0x1;\n const emphasis = fourthByte & 0x3;\n const kilobitRate = KILOBIT_RATES[lowSamplingFrequency * 16 * 4 + layer * 16 + bitrateIndex];\n if (kilobitRate === -1) {\n return { header: null, bytesAdvanced: 1 };\n }\n const bitrate = kilobitRate * 1000;\n const sampleRate = SAMPLING_RATES[frequencyIndex] >> (lowSamplingFrequency + mpeg25);\n const frameLength = computeMp3FrameSize(lowSamplingFrequency, layer, bitrate, sampleRate, padding);\n if (remainingBytes !== null && remainingBytes < frameLength) {\n // The frame doesn't fit into the rest of the file\n return { header: null, bytesAdvanced: 1 };\n }\n let audioSamplesInFrame;\n if (mpegVersionId === 3) {\n audioSamplesInFrame = layer === 3 ? 384 : 1152;\n }\n else {\n if (layer === 3) {\n audioSamplesInFrame = 384;\n }\n else if (layer === 2) {\n audioSamplesInFrame = 1152;\n }\n else {\n audioSamplesInFrame = 576;\n }\n }\n return {\n header: {\n totalSize: frameLength,\n mpegVersionId,\n layer,\n bitrate,\n frequencyIndex,\n sampleRate,\n channel,\n modeExtension,\n copyright,\n original,\n emphasis,\n audioSamplesInFrame,\n },\n bytesAdvanced: 1,\n };\n};\nexport const encodeSynchsafe = (unsynchsafed) => {\n let mask = 0x7f;\n let synchsafed = 0;\n let unsynchsafedRest = unsynchsafed;\n while ((mask ^ 0x7fffffff) !== 0) {\n synchsafed = unsynchsafedRest & ~mask;\n synchsafed <<= 1;\n synchsafed |= unsynchsafedRest & mask;\n mask = ((mask + 1) << 8) - 1;\n unsynchsafedRest = synchsafed;\n }\n return synchsafed;\n};\nexport const decodeSynchsafe = (synchsafed) => {\n let mask = 0x7f000000;\n let unsynchsafed = 0;\n while (mask !== 0) {\n unsynchsafed >>= 1;\n unsynchsafed |= synchsafed & mask;\n mask >>= 8;\n }\n return unsynchsafed;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { Bitstream } from '../../shared/bitstream.js';\nimport { readBytes } from '../reader.js';\nexport const MIN_ADTS_FRAME_HEADER_SIZE = 7;\nexport const MAX_ADTS_FRAME_HEADER_SIZE = 9;\nexport const readAdtsFrameHeader = (slice) => {\n // https://wiki.multimedia.cx/index.php/ADTS (last visited: 2025/08/17)\n const startPos = slice.filePos;\n const bytes = readBytes(slice, 9); // 9 with CRC, 7 without CRC\n const bitstream = new Bitstream(bytes);\n const syncword = bitstream.readBits(12);\n if (syncword !== 0b1111_11111111) {\n return null;\n }\n bitstream.skipBits(1); // MPEG version\n const layer = bitstream.readBits(2);\n if (layer !== 0) {\n return null;\n }\n const protectionAbsence = bitstream.readBits(1);\n const objectType = bitstream.readBits(2) + 1;\n const samplingFrequencyIndex = bitstream.readBits(4);\n if (samplingFrequencyIndex === 15) {\n return null;\n }\n bitstream.skipBits(1); // Private bit\n const channelConfiguration = bitstream.readBits(3);\n if (channelConfiguration === 0) {\n throw new Error('ADTS frames with channel configuration 0 are not supported.');\n }\n bitstream.skipBits(1); // Originality\n bitstream.skipBits(1); // Home\n bitstream.skipBits(1); // Copyright ID bit\n bitstream.skipBits(1); // Copyright ID start\n const frameLength = bitstream.readBits(13);\n bitstream.skipBits(11); // Buffer fullness\n const numberOfAacFrames = bitstream.readBits(2) + 1;\n if (numberOfAacFrames !== 1) {\n throw new Error('ADTS frames with more than one AAC frame are not supported.');\n }\n let crcCheck = null;\n if (protectionAbsence === 1) { // No CRC\n slice.filePos -= 2;\n }\n else { // CRC\n crcCheck = bitstream.readBits(16);\n }\n return {\n objectType,\n samplingFrequencyIndex,\n channelConfiguration,\n frameLength,\n numberOfAacFrames,\n crcCheck,\n startPos,\n };\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { AVC_LEVEL_TABLE, VP9_LEVEL_TABLE } from './codec.js';\nimport { assert, assertNever, base64ToBytes, bytesToBase64, keyValueIterator, getUint24, last, readExpGolomb, readSignedExpGolomb, textDecoder, textEncoder, toDataView, toUint8Array, getChromiumVersion, isChromium, setUint24, } from './misc.js';\nimport { AC3_SAMPLE_RATES, EAC3_REDUCED_SAMPLE_RATES } from '../shared/ac3-misc.js';\nimport { Bitstream } from '../shared/bitstream.js';\n// References for AVC/HEVC code:\n// ISO 14496-15\n// Rec. ITU-T H.264\n// Rec. ITU-T H.265\n// https://stackoverflow.com/questions/24884827\nexport var AvcNalUnitType;\n(function (AvcNalUnitType) {\n AvcNalUnitType[AvcNalUnitType[\"NON_IDR_SLICE\"] = 1] = \"NON_IDR_SLICE\";\n AvcNalUnitType[AvcNalUnitType[\"SLICE_DPA\"] = 2] = \"SLICE_DPA\";\n AvcNalUnitType[AvcNalUnitType[\"SLICE_DPB\"] = 3] = \"SLICE_DPB\";\n AvcNalUnitType[AvcNalUnitType[\"SLICE_DPC\"] = 4] = \"SLICE_DPC\";\n AvcNalUnitType[AvcNalUnitType[\"IDR\"] = 5] = \"IDR\";\n AvcNalUnitType[AvcNalUnitType[\"SEI\"] = 6] = \"SEI\";\n AvcNalUnitType[AvcNalUnitType[\"SPS\"] = 7] = \"SPS\";\n AvcNalUnitType[AvcNalUnitType[\"PPS\"] = 8] = \"PPS\";\n AvcNalUnitType[AvcNalUnitType[\"AUD\"] = 9] = \"AUD\";\n AvcNalUnitType[AvcNalUnitType[\"SPS_EXT\"] = 13] = \"SPS_EXT\";\n})(AvcNalUnitType || (AvcNalUnitType = {}));\nexport var HevcNalUnitType;\n(function (HevcNalUnitType) {\n HevcNalUnitType[HevcNalUnitType[\"RASL_N\"] = 8] = \"RASL_N\";\n HevcNalUnitType[HevcNalUnitType[\"RASL_R\"] = 9] = \"RASL_R\";\n HevcNalUnitType[HevcNalUnitType[\"BLA_W_LP\"] = 16] = \"BLA_W_LP\";\n HevcNalUnitType[HevcNalUnitType[\"RSV_IRAP_VCL23\"] = 23] = \"RSV_IRAP_VCL23\";\n HevcNalUnitType[HevcNalUnitType[\"VPS_NUT\"] = 32] = \"VPS_NUT\";\n HevcNalUnitType[HevcNalUnitType[\"SPS_NUT\"] = 33] = \"SPS_NUT\";\n HevcNalUnitType[HevcNalUnitType[\"PPS_NUT\"] = 34] = \"PPS_NUT\";\n HevcNalUnitType[HevcNalUnitType[\"AUD_NUT\"] = 35] = \"AUD_NUT\";\n HevcNalUnitType[HevcNalUnitType[\"PREFIX_SEI_NUT\"] = 39] = \"PREFIX_SEI_NUT\";\n HevcNalUnitType[HevcNalUnitType[\"SUFFIX_SEI_NUT\"] = 40] = \"SUFFIX_SEI_NUT\";\n})(HevcNalUnitType || (HevcNalUnitType = {}));\nexport const iterateNalUnitsInAnnexB = function* (packetData) {\n let i = 0;\n let nalStart = -1;\n while (i < packetData.length - 2) {\n const zeroIndex = packetData.indexOf(0, i);\n if (zeroIndex === -1 || zeroIndex >= packetData.length - 2) {\n break;\n }\n i = zeroIndex;\n let startCodeLength = 0;\n // Check for 4-byte start code (0x00000001)\n if (i + 3 < packetData.length\n && packetData[i + 1] === 0\n && packetData[i + 2] === 0\n && packetData[i + 3] === 1) {\n startCodeLength = 4;\n }\n else if (packetData[i + 1] === 0 && packetData[i + 2] === 1) {\n // Check for 3-byte start code (0x000001)\n startCodeLength = 3;\n }\n if (startCodeLength === 0) {\n i++;\n continue;\n }\n // If we had a previous NAL unit, yield it\n if (nalStart !== -1 && i > nalStart) {\n yield {\n offset: nalStart,\n length: i - nalStart,\n };\n }\n nalStart = i + startCodeLength;\n i = nalStart;\n }\n // Yield the last NAL unit if there is one\n if (nalStart !== -1 && nalStart < packetData.length) {\n yield {\n offset: nalStart,\n length: packetData.length - nalStart,\n };\n }\n};\nexport const iterateNalUnitsInLengthPrefixed = function* (packetData, lengthSize) {\n let offset = 0;\n const dataView = new DataView(packetData.buffer, packetData.byteOffset, packetData.byteLength);\n while (offset + lengthSize <= packetData.length) {\n let nalUnitLength;\n if (lengthSize === 1) {\n nalUnitLength = dataView.getUint8(offset);\n }\n else if (lengthSize === 2) {\n nalUnitLength = dataView.getUint16(offset, false);\n }\n else if (lengthSize === 3) {\n nalUnitLength = getUint24(dataView, offset, false);\n }\n else {\n assert(lengthSize === 4);\n nalUnitLength = dataView.getUint32(offset, false);\n }\n offset += lengthSize;\n yield {\n offset,\n length: nalUnitLength,\n };\n offset += nalUnitLength;\n }\n};\nexport const iterateAvcNalUnits = (packetData, decoderConfig) => {\n if (decoderConfig.description) {\n const bytes = toUint8Array(decoderConfig.description);\n const lengthSizeMinusOne = bytes[4] & 0b11;\n const lengthSize = (lengthSizeMinusOne + 1);\n return iterateNalUnitsInLengthPrefixed(packetData, lengthSize);\n }\n else {\n return iterateNalUnitsInAnnexB(packetData);\n }\n};\nexport const extractNalUnitTypeForAvc = (byte) => {\n return byte & 0x1F;\n};\nconst removeEmulationPreventionBytes = (data) => {\n const result = [];\n const len = data.length;\n for (let i = 0; i < len; i++) {\n // Look for the 0x000003 pattern\n if (i + 2 < len && data[i] === 0x00 && data[i + 1] === 0x00 && data[i + 2] === 0x03) {\n result.push(0x00, 0x00); // Push the first two bytes\n i += 2; // Skip the 0x03 byte\n }\n else {\n result.push(data[i]);\n }\n }\n return new Uint8Array(result);\n};\nconst ANNEX_B_START_CODE = new Uint8Array([0, 0, 0, 1]);\nexport const concatNalUnitsInAnnexB = (nalUnits) => {\n const totalLength = nalUnits.reduce((a, b) => a + ANNEX_B_START_CODE.byteLength + b.byteLength, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const nalUnit of nalUnits) {\n result.set(ANNEX_B_START_CODE, offset);\n offset += ANNEX_B_START_CODE.byteLength;\n result.set(nalUnit, offset);\n offset += nalUnit.byteLength;\n }\n return result;\n};\nexport const concatNalUnitsInLengthPrefixed = (nalUnits, lengthSize) => {\n const totalLength = nalUnits.reduce((a, b) => a + lengthSize + b.byteLength, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const nalUnit of nalUnits) {\n const dataView = new DataView(result.buffer, result.byteOffset, result.byteLength);\n switch (lengthSize) {\n case 1:\n dataView.setUint8(offset, nalUnit.byteLength);\n break;\n case 2:\n dataView.setUint16(offset, nalUnit.byteLength, false);\n break;\n case 3:\n setUint24(dataView, offset, nalUnit.byteLength, false);\n break;\n case 4:\n dataView.setUint32(offset, nalUnit.byteLength, false);\n break;\n }\n offset += lengthSize;\n result.set(nalUnit, offset);\n offset += nalUnit.byteLength;\n }\n return result;\n};\nexport const concatAvcNalUnits = (nalUnits, decoderConfig) => {\n if (decoderConfig.description) {\n // Stream is length-prefixed. Let's extract the size of the length prefix from the decoder config\n const bytes = toUint8Array(decoderConfig.description);\n const lengthSizeMinusOne = bytes[4] & 0b11;\n const lengthSize = (lengthSizeMinusOne + 1);\n return concatNalUnitsInLengthPrefixed(nalUnits, lengthSize);\n }\n else {\n // Stream is in Annex B format\n return concatNalUnitsInAnnexB(nalUnits);\n }\n};\n/** Builds an AvcDecoderConfigurationRecord from an AVC packet in Annex B format. */\nexport const extractAvcDecoderConfigurationRecord = (packetData) => {\n try {\n const spsUnits = [];\n const ppsUnits = [];\n const spsExtUnits = [];\n for (const loc of iterateNalUnitsInAnnexB(packetData)) {\n const nalUnit = packetData.subarray(loc.offset, loc.offset + loc.length);\n const type = extractNalUnitTypeForAvc(nalUnit[0]);\n if (type === AvcNalUnitType.SPS) {\n spsUnits.push(nalUnit);\n }\n else if (type === AvcNalUnitType.PPS) {\n ppsUnits.push(nalUnit);\n }\n else if (type === AvcNalUnitType.SPS_EXT) {\n spsExtUnits.push(nalUnit);\n }\n }\n if (spsUnits.length === 0) {\n return null;\n }\n if (ppsUnits.length === 0) {\n return null;\n }\n // Let's get the first SPS for profile and level information\n const spsData = spsUnits[0];\n const spsInfo = parseAvcSps(spsData);\n assert(spsInfo !== null);\n const hasExtendedData = spsInfo.profileIdc === 100\n || spsInfo.profileIdc === 110\n || spsInfo.profileIdc === 122\n || spsInfo.profileIdc === 144;\n return {\n configurationVersion: 1,\n avcProfileIndication: spsInfo.profileIdc,\n profileCompatibility: spsInfo.constraintFlags,\n avcLevelIndication: spsInfo.levelIdc,\n lengthSizeMinusOne: 3, // Typically 4 bytes for length field\n sequenceParameterSets: spsUnits,\n pictureParameterSets: ppsUnits,\n chromaFormat: hasExtendedData ? spsInfo.chromaFormatIdc : null,\n bitDepthLumaMinus8: hasExtendedData ? spsInfo.bitDepthLumaMinus8 : null,\n bitDepthChromaMinus8: hasExtendedData ? spsInfo.bitDepthChromaMinus8 : null,\n sequenceParameterSetExt: hasExtendedData ? spsExtUnits : null,\n };\n }\n catch (error) {\n console.error('Error building AVC Decoder Configuration Record:', error);\n return null;\n }\n};\n/** Serializes an AvcDecoderConfigurationRecord into the format specified in Section 5.3.3.1 of ISO 14496-15. */\nexport const serializeAvcDecoderConfigurationRecord = (record) => {\n const bytes = [];\n // Write header\n bytes.push(record.configurationVersion);\n bytes.push(record.avcProfileIndication);\n bytes.push(record.profileCompatibility);\n bytes.push(record.avcLevelIndication);\n bytes.push(0xFC | (record.lengthSizeMinusOne & 0x03)); // Reserved bits (6) + lengthSizeMinusOne (2)\n // Reserved bits (3) + numOfSequenceParameterSets (5)\n bytes.push(0xE0 | (record.sequenceParameterSets.length & 0x1F));\n // Write SPS\n for (const sps of record.sequenceParameterSets) {\n const length = sps.byteLength;\n bytes.push(length >> 8); // High byte\n bytes.push(length & 0xFF); // Low byte\n for (let i = 0; i < length; i++) {\n bytes.push(sps[i]);\n }\n }\n bytes.push(record.pictureParameterSets.length);\n // Write PPS\n for (const pps of record.pictureParameterSets) {\n const length = pps.byteLength;\n bytes.push(length >> 8); // High byte\n bytes.push(length & 0xFF); // Low byte\n for (let i = 0; i < length; i++) {\n bytes.push(pps[i]);\n }\n }\n if (record.avcProfileIndication === 100\n || record.avcProfileIndication === 110\n || record.avcProfileIndication === 122\n || record.avcProfileIndication === 144) {\n assert(record.chromaFormat !== null);\n assert(record.bitDepthLumaMinus8 !== null);\n assert(record.bitDepthChromaMinus8 !== null);\n assert(record.sequenceParameterSetExt !== null);\n bytes.push(0xFC | (record.chromaFormat & 0x03)); // Reserved bits + chroma_format\n bytes.push(0xF8 | (record.bitDepthLumaMinus8 & 0x07)); // Reserved bits + bit_depth_luma_minus8\n bytes.push(0xF8 | (record.bitDepthChromaMinus8 & 0x07)); // Reserved bits + bit_depth_chroma_minus8\n bytes.push(record.sequenceParameterSetExt.length);\n // Write SPS Ext\n for (const spsExt of record.sequenceParameterSetExt) {\n const length = spsExt.byteLength;\n bytes.push(length >> 8); // High byte\n bytes.push(length & 0xFF); // Low byte\n for (let i = 0; i < length; i++) {\n bytes.push(spsExt[i]);\n }\n }\n }\n return new Uint8Array(bytes);\n};\n/** Deserializes an AvcDecoderConfigurationRecord from the format specified in Section 5.3.3.1 of ISO 14496-15. */\nexport const deserializeAvcDecoderConfigurationRecord = (data) => {\n try {\n const view = toDataView(data);\n let offset = 0;\n // Read header\n const configurationVersion = view.getUint8(offset++);\n const avcProfileIndication = view.getUint8(offset++);\n const profileCompatibility = view.getUint8(offset++);\n const avcLevelIndication = view.getUint8(offset++);\n const lengthSizeMinusOne = view.getUint8(offset++) & 0x03;\n const numOfSequenceParameterSets = view.getUint8(offset++) & 0x1F;\n // Read SPS\n const sequenceParameterSets = [];\n for (let i = 0; i < numOfSequenceParameterSets; i++) {\n const length = view.getUint16(offset, false);\n offset += 2;\n sequenceParameterSets.push(data.subarray(offset, offset + length));\n offset += length;\n }\n const numOfPictureParameterSets = view.getUint8(offset++);\n // Read PPS\n const pictureParameterSets = [];\n for (let i = 0; i < numOfPictureParameterSets; i++) {\n const length = view.getUint16(offset, false);\n offset += 2;\n pictureParameterSets.push(data.subarray(offset, offset + length));\n offset += length;\n }\n const record = {\n configurationVersion,\n avcProfileIndication,\n profileCompatibility,\n avcLevelIndication,\n lengthSizeMinusOne,\n sequenceParameterSets,\n pictureParameterSets,\n chromaFormat: null,\n bitDepthLumaMinus8: null,\n bitDepthChromaMinus8: null,\n sequenceParameterSetExt: null,\n };\n // Check if there are extended profile fields\n if ((avcProfileIndication === 100\n || avcProfileIndication === 110\n || avcProfileIndication === 122\n || avcProfileIndication === 144)\n && offset + 4 <= data.length) {\n const chromaFormat = view.getUint8(offset++) & 0x03;\n const bitDepthLumaMinus8 = view.getUint8(offset++) & 0x07;\n const bitDepthChromaMinus8 = view.getUint8(offset++) & 0x07;\n const numOfSequenceParameterSetExt = view.getUint8(offset++);\n record.chromaFormat = chromaFormat;\n record.bitDepthLumaMinus8 = bitDepthLumaMinus8;\n record.bitDepthChromaMinus8 = bitDepthChromaMinus8;\n // Read SPS Ext\n const sequenceParameterSetExt = [];\n for (let i = 0; i < numOfSequenceParameterSetExt; i++) {\n const length = view.getUint16(offset, false);\n offset += 2;\n sequenceParameterSetExt.push(data.subarray(offset, offset + length));\n offset += length;\n }\n record.sequenceParameterSetExt = sequenceParameterSetExt;\n }\n return record;\n }\n catch (error) {\n console.error('Error deserializing AVC Decoder Configuration Record:', error);\n return null;\n }\n};\nconst AVC_HEVC_ASPECT_RATIO_IDC_TABLE = {\n 1: { num: 1, den: 1 },\n 2: { num: 12, den: 11 },\n 3: { num: 10, den: 11 },\n 4: { num: 16, den: 11 },\n 5: { num: 40, den: 33 },\n 6: { num: 24, den: 11 },\n 7: { num: 20, den: 11 },\n 8: { num: 32, den: 11 },\n 9: { num: 80, den: 33 },\n 10: { num: 18, den: 11 },\n 11: { num: 15, den: 11 },\n 12: { num: 64, den: 33 },\n 13: { num: 160, den: 99 },\n 14: { num: 4, den: 3 },\n 15: { num: 3, den: 2 },\n 16: { num: 2, den: 1 },\n};\n/** Parses an AVC SPS (Sequence Parameter Set) to extract basic information. */\nexport const parseAvcSps = (sps) => {\n try {\n const bitstream = new Bitstream(removeEmulationPreventionBytes(sps));\n bitstream.skipBits(1); // forbidden_zero_bit\n bitstream.skipBits(2); // nal_ref_idc\n const nalUnitType = bitstream.readBits(5);\n if (nalUnitType !== 7) { // SPS NAL unit type is 7\n return null;\n }\n const profileIdc = bitstream.readAlignedByte();\n const constraintFlags = bitstream.readAlignedByte();\n const levelIdc = bitstream.readAlignedByte();\n readExpGolomb(bitstream); // seq_parameter_set_id\n // \"When chroma_format_idc is not present, it shall be inferred to be equal to 1 (4:2:0 chroma format).\"\n let chromaFormatIdc = 1;\n // \"When bit_depth_luma_minus8 is not present, it shall be inferred to be equal to 0.\"\"\n let bitDepthLumaMinus8 = 0;\n // \"When bit_depth_chroma_minus8 is not present, it shall be inferred to be equal to 0.\"\n let bitDepthChromaMinus8 = 0;\n // \"When separate_colour_plane_flag is not present, it shall be inferred to be equal to 0.\"\n let separateColourPlaneFlag = 0;\n // Handle high profile chroma_format_idc\n if (profileIdc === 100\n || profileIdc === 110\n || profileIdc === 122\n || profileIdc === 244\n || profileIdc === 44\n || profileIdc === 83\n || profileIdc === 86\n || profileIdc === 118\n || profileIdc === 128) {\n chromaFormatIdc = readExpGolomb(bitstream);\n if (chromaFormatIdc === 3) {\n separateColourPlaneFlag = bitstream.readBits(1);\n }\n bitDepthLumaMinus8 = readExpGolomb(bitstream);\n bitDepthChromaMinus8 = readExpGolomb(bitstream);\n bitstream.skipBits(1); // qpprime_y_zero_transform_bypass_flag\n const seqScalingMatrixPresentFlag = bitstream.readBits(1);\n if (seqScalingMatrixPresentFlag) {\n for (let i = 0; i < (chromaFormatIdc !== 3 ? 8 : 12); i++) {\n const seqScalingListPresentFlag = bitstream.readBits(1);\n if (seqScalingListPresentFlag) {\n const sizeOfScalingList = i < 6 ? 16 : 64;\n let lastScale = 8;\n let nextScale = 8;\n for (let j = 0; j < sizeOfScalingList; j++) {\n if (nextScale !== 0) {\n const deltaScale = readSignedExpGolomb(bitstream);\n nextScale = (lastScale + deltaScale + 256) % 256;\n }\n lastScale = nextScale === 0 ? lastScale : nextScale;\n }\n }\n }\n }\n }\n readExpGolomb(bitstream); // log2_max_frame_num_minus4\n const picOrderCntType = readExpGolomb(bitstream);\n if (picOrderCntType === 0) {\n readExpGolomb(bitstream); // log2_max_pic_order_cnt_lsb_minus4\n }\n else if (picOrderCntType === 1) {\n bitstream.skipBits(1); // delta_pic_order_always_zero_flag\n readSignedExpGolomb(bitstream); // offset_for_non_ref_pic\n readSignedExpGolomb(bitstream); // offset_for_top_to_bottom_field\n const numRefFramesInPicOrderCntCycle = readExpGolomb(bitstream);\n for (let i = 0; i < numRefFramesInPicOrderCntCycle; i++) {\n readSignedExpGolomb(bitstream); // offset_for_ref_frame[i]\n }\n }\n readExpGolomb(bitstream); // max_num_ref_frames\n bitstream.skipBits(1); // gaps_in_frame_num_value_allowed_flag\n const picWidthInMbsMinus1 = readExpGolomb(bitstream);\n const picHeightInMapUnitsMinus1 = readExpGolomb(bitstream);\n const codedWidth = 16 * (picWidthInMbsMinus1 + 1);\n const codedHeight = 16 * (picHeightInMapUnitsMinus1 + 1);\n let displayWidth = codedWidth;\n let displayHeight = codedHeight;\n const frameMbsOnlyFlag = bitstream.readBits(1);\n if (!frameMbsOnlyFlag) {\n bitstream.skipBits(1); // mb_adaptive_frame_field_flag\n }\n bitstream.skipBits(1); // direct_8x8_inference_flag\n const frameCroppingFlag = bitstream.readBits(1);\n if (frameCroppingFlag) {\n const frameCropLeftOffset = readExpGolomb(bitstream);\n const frameCropRightOffset = readExpGolomb(bitstream);\n const frameCropTopOffset = readExpGolomb(bitstream);\n const frameCropBottomOffset = readExpGolomb(bitstream);\n let cropUnitX;\n let cropUnitY;\n const chromaArrayType = separateColourPlaneFlag === 0 ? chromaFormatIdc : 0;\n if (chromaArrayType === 0) {\n // \"If ChromaArrayType is equal to 0, CropUnitX and CropUnitY are derived as:\"\n cropUnitX = 1;\n cropUnitY = 2 - frameMbsOnlyFlag;\n }\n else {\n // \"Otherwise (ChromaArrayType is equal to 1, 2, or 3), CropUnitX and CropUnitY are derived as:\"\n const subWidthC = chromaFormatIdc === 3 ? 1 : 2;\n const subHeightC = chromaFormatIdc === 1 ? 2 : 1;\n cropUnitX = subWidthC;\n cropUnitY = subHeightC * (2 - frameMbsOnlyFlag);\n }\n displayWidth -= (cropUnitX * (frameCropLeftOffset + frameCropRightOffset));\n displayHeight -= (cropUnitY * (frameCropTopOffset + frameCropBottomOffset));\n }\n // 2 = unspecified\n let colourPrimaries = 2;\n let transferCharacteristics = 2;\n let matrixCoefficients = 2;\n let fullRangeFlag = 0;\n let pixelAspectRatio = { num: 1, den: 1 };\n let numReorderFrames = null;\n let maxDecFrameBuffering = null;\n const vuiParametersPresentFlag = bitstream.readBits(1);\n if (vuiParametersPresentFlag) {\n const aspectRatioInfoPresentFlag = bitstream.readBits(1);\n if (aspectRatioInfoPresentFlag) {\n const aspectRatioIdc = bitstream.readBits(8);\n if (aspectRatioIdc === 255) { // Extended_SAR\n pixelAspectRatio = {\n num: bitstream.readBits(16),\n den: bitstream.readBits(16),\n };\n }\n else {\n const aspectRatio = AVC_HEVC_ASPECT_RATIO_IDC_TABLE[aspectRatioIdc];\n if (aspectRatio) {\n pixelAspectRatio = aspectRatio;\n }\n }\n }\n const overscanInfoPresentFlag = bitstream.readBits(1);\n if (overscanInfoPresentFlag) {\n bitstream.skipBits(1); // overscan_appropriate_flag\n }\n const videoSignalTypePresentFlag = bitstream.readBits(1);\n if (videoSignalTypePresentFlag) {\n bitstream.skipBits(3); // video_format\n fullRangeFlag = bitstream.readBits(1);\n const colourDescriptionPresentFlag = bitstream.readBits(1);\n if (colourDescriptionPresentFlag) {\n colourPrimaries = bitstream.readBits(8);\n transferCharacteristics = bitstream.readBits(8);\n matrixCoefficients = bitstream.readBits(8);\n }\n }\n const chromaLocInfoPresentFlag = bitstream.readBits(1);\n if (chromaLocInfoPresentFlag) {\n readExpGolomb(bitstream); // chroma_sample_loc_type_top_field\n readExpGolomb(bitstream); // chroma_sample_loc_type_bottom_field\n }\n const timingInfoPresentFlag = bitstream.readBits(1);\n if (timingInfoPresentFlag) {\n bitstream.skipBits(32); // num_units_in_tick\n bitstream.skipBits(32); // time_scale\n bitstream.skipBits(1); // fixed_frame_rate_flag\n }\n const nalHrdParametersPresentFlag = bitstream.readBits(1);\n if (nalHrdParametersPresentFlag) {\n skipAvcHrdParameters(bitstream);\n }\n const vclHrdParametersPresentFlag = bitstream.readBits(1);\n if (vclHrdParametersPresentFlag) {\n skipAvcHrdParameters(bitstream);\n }\n if (nalHrdParametersPresentFlag || vclHrdParametersPresentFlag) {\n bitstream.skipBits(1); // low_delay_hrd_flag\n }\n bitstream.skipBits(1); // pic_struct_present_flag\n const bitstreamRestrictionFlag = bitstream.readBits(1);\n if (bitstreamRestrictionFlag) {\n bitstream.skipBits(1); // motion_vectors_over_pic_boundaries_flag\n readExpGolomb(bitstream); // max_bytes_per_pic_denom\n readExpGolomb(bitstream); // max_bits_per_mb_denom\n readExpGolomb(bitstream); // log2_max_mv_length_horizontal\n readExpGolomb(bitstream); // log2_max_mv_length_vertical\n numReorderFrames = readExpGolomb(bitstream);\n maxDecFrameBuffering = readExpGolomb(bitstream);\n }\n }\n if (numReorderFrames === null) {\n assert(maxDecFrameBuffering === null);\n const constraintSet3Flag = constraintFlags & 0b00010000;\n if ((profileIdc === 44 || profileIdc === 86 || profileIdc === 100\n || profileIdc === 110 || profileIdc === 122 || profileIdc === 244) && constraintSet3Flag) {\n // \"If profile_idc is equal to 44, 86, 100, 110, 122, or 244 and constraint_set3_flag is equal to 1, the\n // value of num_reorder_frames shall be inferred to be equal to 0.\"\n numReorderFrames = 0;\n maxDecFrameBuffering = 0;\n }\n else {\n const picWidthInMbs = picWidthInMbsMinus1 + 1;\n const picHeightInMapUnits = picHeightInMapUnitsMinus1 + 1;\n const frameHeightInMbs = (2 - frameMbsOnlyFlag) * picHeightInMapUnits;\n const levelInfo = AVC_LEVEL_TABLE.find(x => x.level >= levelIdc) ?? last(AVC_LEVEL_TABLE);\n // \"MaxDpbFrames is equal to\n // Min( MaxDpbMbs / ( picWidthInMbs * frameHeightInMbs ), 16 ) and MaxDpbMbs is given in Table A-1.\"\n const maxDpbFrames = Math.min(Math.floor(levelInfo.maxDpbMbs / (picWidthInMbs * frameHeightInMbs)), 16);\n // \"Otherwise, [...] the value of num_reorder_frames shall be inferred to be equal to MaxDpbFrames.\"\n numReorderFrames = maxDpbFrames;\n maxDecFrameBuffering = maxDpbFrames;\n }\n }\n assert(maxDecFrameBuffering !== null);\n return {\n profileIdc,\n constraintFlags,\n levelIdc,\n frameMbsOnlyFlag,\n chromaFormatIdc,\n bitDepthLumaMinus8,\n bitDepthChromaMinus8,\n codedWidth,\n codedHeight,\n displayWidth,\n displayHeight,\n pixelAspectRatio,\n colourPrimaries,\n matrixCoefficients,\n transferCharacteristics,\n fullRangeFlag,\n numReorderFrames,\n maxDecFrameBuffering,\n };\n }\n catch (error) {\n console.error('Error parsing AVC SPS:', error);\n return null;\n }\n};\nconst skipAvcHrdParameters = (bitstream) => {\n const cpb_cnt_minus1 = readExpGolomb(bitstream);\n bitstream.skipBits(4); // bit_rate_scale\n bitstream.skipBits(4); // cpb_size_scale\n for (let i = 0; i <= cpb_cnt_minus1; i++) {\n readExpGolomb(bitstream); // bit_rate_value_minus1[i]\n readExpGolomb(bitstream); // cpb_size_value_minus1[i]\n bitstream.skipBits(1); // cbr_flag[i]\n }\n bitstream.skipBits(5); // initial_cpb_removal_delay_length_minus1\n bitstream.skipBits(5); // cpb_removal_delay_length_minus1\n bitstream.skipBits(5); // dpb_output_delay_length_minus1\n bitstream.skipBits(5); // time_offset_length\n};\nexport const iterateHevcNalUnits = (packetData, decoderConfig) => {\n if (decoderConfig.description) {\n const bytes = toUint8Array(decoderConfig.description);\n const lengthSizeMinusOne = bytes[21] & 0b11;\n const lengthSize = (lengthSizeMinusOne + 1);\n return iterateNalUnitsInLengthPrefixed(packetData, lengthSize);\n }\n else {\n return iterateNalUnitsInAnnexB(packetData);\n }\n};\nexport const extractNalUnitTypeForHevc = (byte) => {\n return (byte >> 1) & 0x3F;\n};\n/** Parses an HEVC SPS (Sequence Parameter Set) to extract video information. */\nexport const parseHevcSps = (sps) => {\n try {\n const bitstream = new Bitstream(removeEmulationPreventionBytes(sps));\n bitstream.skipBits(16); // NAL header\n bitstream.readBits(4); // sps_video_parameter_set_id\n const spsMaxSubLayersMinus1 = bitstream.readBits(3);\n const spsTemporalIdNestingFlag = bitstream.readBits(1);\n const { general_profile_space, general_tier_flag, general_profile_idc, general_profile_compatibility_flags, general_constraint_indicator_flags, general_level_idc, } = parseProfileTierLevel(bitstream, spsMaxSubLayersMinus1);\n readExpGolomb(bitstream); // sps_seq_parameter_set_id\n const chromaFormatIdc = readExpGolomb(bitstream);\n let separateColourPlaneFlag = 0;\n if (chromaFormatIdc === 3) {\n separateColourPlaneFlag = bitstream.readBits(1);\n }\n const picWidthInLumaSamples = readExpGolomb(bitstream);\n const picHeightInLumaSamples = readExpGolomb(bitstream);\n let displayWidth = picWidthInLumaSamples;\n let displayHeight = picHeightInLumaSamples;\n if (bitstream.readBits(1)) { // conformance_window_flag\n const confWinLeftOffset = readExpGolomb(bitstream);\n const confWinRightOffset = readExpGolomb(bitstream);\n const confWinTopOffset = readExpGolomb(bitstream);\n const confWinBottomOffset = readExpGolomb(bitstream);\n // SubWidthC and SubHeightC depend on chroma_format_idc and separate_colour_plane_flag\n let subWidthC = 1;\n let subHeightC = 1;\n const chromaArrayType = separateColourPlaneFlag === 0 ? chromaFormatIdc : 0;\n if (chromaArrayType === 1) {\n subWidthC = 2;\n subHeightC = 2;\n }\n else if (chromaArrayType === 2) {\n subWidthC = 2;\n subHeightC = 1;\n }\n displayWidth -= (confWinLeftOffset + confWinRightOffset) * subWidthC;\n displayHeight -= (confWinTopOffset + confWinBottomOffset) * subHeightC;\n }\n const bitDepthLumaMinus8 = readExpGolomb(bitstream);\n const bitDepthChromaMinus8 = readExpGolomb(bitstream);\n readExpGolomb(bitstream); // log2_max_pic_order_cnt_lsb_minus4\n const spsSubLayerOrderingInfoPresentFlag = bitstream.readBits(1);\n const startI = spsSubLayerOrderingInfoPresentFlag ? 0 : spsMaxSubLayersMinus1;\n let spsMaxNumReorderPics = 0;\n for (let i = startI; i <= spsMaxSubLayersMinus1; i++) {\n readExpGolomb(bitstream); // sps_max_dec_pic_buffering_minus1[i]\n spsMaxNumReorderPics = readExpGolomb(bitstream); // sps_max_num_reorder_pics[i]\n readExpGolomb(bitstream); // sps_max_latency_increase_plus1[i]\n }\n readExpGolomb(bitstream); // log2_min_luma_coding_block_size_minus3\n readExpGolomb(bitstream); // log2_diff_max_min_luma_coding_block_size\n readExpGolomb(bitstream); // log2_min_luma_transform_block_size_minus2\n readExpGolomb(bitstream); // log2_diff_max_min_luma_transform_block_size\n readExpGolomb(bitstream); // max_transform_hierarchy_depth_inter\n readExpGolomb(bitstream); // max_transform_hierarchy_depth_intra\n if (bitstream.readBits(1)) { // scaling_list_enabled_flag\n if (bitstream.readBits(1)) {\n skipScalingListData(bitstream);\n }\n }\n bitstream.skipBits(1); // amp_enabled_flag\n bitstream.skipBits(1); // sample_adaptive_offset_enabled_flag\n if (bitstream.readBits(1)) { // pcm_enabled_flag\n bitstream.skipBits(4); // pcm_sample_bit_depth_luma_minus1\n bitstream.skipBits(4); // pcm_sample_bit_depth_chroma_minus1\n readExpGolomb(bitstream); // log2_min_pcm_luma_coding_block_size_minus3\n readExpGolomb(bitstream); // log2_diff_max_min_pcm_luma_coding_block_size\n bitstream.skipBits(1); // pcm_loop_filter_disabled_flag\n }\n const numShortTermRefPicSets = readExpGolomb(bitstream);\n skipAllStRefPicSets(bitstream, numShortTermRefPicSets);\n if (bitstream.readBits(1)) { // long_term_ref_pics_present_flag\n const numLongTermRefPicsSps = readExpGolomb(bitstream);\n for (let i = 0; i < numLongTermRefPicsSps; i++) {\n readExpGolomb(bitstream); // lt_ref_pic_poc_lsb_sps[i]\n bitstream.skipBits(1); // used_by_curr_pic_lt_sps_flag[i]\n }\n }\n bitstream.skipBits(1); // sps_temporal_mvp_enabled_flag\n bitstream.skipBits(1); // strong_intra_smoothing_enabled_flag\n let colourPrimaries = 2;\n let transferCharacteristics = 2;\n let matrixCoefficients = 2;\n let fullRangeFlag = 0;\n let minSpatialSegmentationIdc = 0;\n let pixelAspectRatio = { num: 1, den: 1 };\n if (bitstream.readBits(1)) { // vui_parameters_present_flag\n const vui = parseHevcVui(bitstream, spsMaxSubLayersMinus1);\n pixelAspectRatio = vui.pixelAspectRatio;\n colourPrimaries = vui.colourPrimaries;\n transferCharacteristics = vui.transferCharacteristics;\n matrixCoefficients = vui.matrixCoefficients;\n fullRangeFlag = vui.fullRangeFlag;\n minSpatialSegmentationIdc = vui.minSpatialSegmentationIdc;\n }\n return {\n displayWidth,\n displayHeight,\n pixelAspectRatio,\n colourPrimaries,\n transferCharacteristics,\n matrixCoefficients,\n fullRangeFlag,\n maxDecFrameBuffering: spsMaxNumReorderPics + 1,\n spsMaxSubLayersMinus1,\n spsTemporalIdNestingFlag,\n generalProfileSpace: general_profile_space,\n generalTierFlag: general_tier_flag,\n generalProfileIdc: general_profile_idc,\n generalProfileCompatibilityFlags: general_profile_compatibility_flags,\n generalConstraintIndicatorFlags: general_constraint_indicator_flags,\n generalLevelIdc: general_level_idc,\n chromaFormatIdc,\n bitDepthLumaMinus8,\n bitDepthChromaMinus8,\n minSpatialSegmentationIdc,\n };\n }\n catch (error) {\n console.error('Error parsing HEVC SPS:', error);\n return null;\n }\n};\n/** Builds a HevcDecoderConfigurationRecord from an HEVC packet in Annex B format. */\nexport const extractHevcDecoderConfigurationRecord = (packetData) => {\n try {\n const vpsUnits = [];\n const spsUnits = [];\n const ppsUnits = [];\n const seiUnits = [];\n for (const loc of iterateNalUnitsInAnnexB(packetData)) {\n const nalUnit = packetData.subarray(loc.offset, loc.offset + loc.length);\n const type = extractNalUnitTypeForHevc(nalUnit[0]);\n if (type === HevcNalUnitType.VPS_NUT) {\n vpsUnits.push(nalUnit);\n }\n else if (type === HevcNalUnitType.SPS_NUT) {\n spsUnits.push(nalUnit);\n }\n else if (type === HevcNalUnitType.PPS_NUT) {\n ppsUnits.push(nalUnit);\n }\n else if (type === HevcNalUnitType.PREFIX_SEI_NUT || type === HevcNalUnitType.SUFFIX_SEI_NUT) {\n seiUnits.push(nalUnit);\n }\n }\n if (spsUnits.length === 0 || ppsUnits.length === 0)\n return null;\n const spsInfo = parseHevcSps(spsUnits[0]);\n if (!spsInfo)\n return null;\n // Parse PPS for parallelismType\n let parallelismType = 0;\n if (ppsUnits.length > 0) {\n const pps = ppsUnits[0];\n const ppsBitstream = new Bitstream(removeEmulationPreventionBytes(pps));\n ppsBitstream.skipBits(16); // NAL header\n readExpGolomb(ppsBitstream); // pps_pic_parameter_set_id\n readExpGolomb(ppsBitstream); // pps_seq_parameter_set_id\n ppsBitstream.skipBits(1); // dependent_slice_segments_enabled_flag\n ppsBitstream.skipBits(1); // output_flag_present_flag\n ppsBitstream.skipBits(3); // num_extra_slice_header_bits\n ppsBitstream.skipBits(1); // sign_data_hiding_enabled_flag\n ppsBitstream.skipBits(1); // cabac_init_present_flag\n readExpGolomb(ppsBitstream); // num_ref_idx_l0_default_active_minus1\n readExpGolomb(ppsBitstream); // num_ref_idx_l1_default_active_minus1\n readSignedExpGolomb(ppsBitstream); // init_qp_minus26\n ppsBitstream.skipBits(1); // constrained_intra_pred_flag\n ppsBitstream.skipBits(1); // transform_skip_enabled_flag\n if (ppsBitstream.readBits(1)) { // cu_qp_delta_enabled_flag\n readExpGolomb(ppsBitstream); // diff_cu_qp_delta_depth\n }\n readSignedExpGolomb(ppsBitstream); // pps_cb_qp_offset\n readSignedExpGolomb(ppsBitstream); // pps_cr_qp_offset\n ppsBitstream.skipBits(1); // pps_slice_chroma_qp_offsets_present_flag\n ppsBitstream.skipBits(1); // weighted_pred_flag\n ppsBitstream.skipBits(1); // weighted_bipred_flag\n ppsBitstream.skipBits(1); // transquant_bypass_enabled_flag\n const tiles_enabled_flag = ppsBitstream.readBits(1);\n const entropy_coding_sync_enabled_flag = ppsBitstream.readBits(1);\n if (!tiles_enabled_flag && !entropy_coding_sync_enabled_flag)\n parallelismType = 0;\n else if (tiles_enabled_flag && !entropy_coding_sync_enabled_flag)\n parallelismType = 2;\n else if (!tiles_enabled_flag && entropy_coding_sync_enabled_flag)\n parallelismType = 3;\n else\n parallelismType = 0;\n }\n const arrays = [\n ...(vpsUnits.length\n ? [\n {\n arrayCompleteness: 1,\n nalUnitType: HevcNalUnitType.VPS_NUT,\n nalUnits: vpsUnits,\n },\n ]\n : []),\n ...(spsUnits.length\n ? [\n {\n arrayCompleteness: 1,\n nalUnitType: HevcNalUnitType.SPS_NUT,\n nalUnits: spsUnits,\n },\n ]\n : []),\n ...(ppsUnits.length\n ? [\n {\n arrayCompleteness: 1,\n nalUnitType: HevcNalUnitType.PPS_NUT,\n nalUnits: ppsUnits,\n },\n ]\n : []),\n ...(seiUnits.length\n ? [\n {\n arrayCompleteness: 1,\n nalUnitType: extractNalUnitTypeForHevc(seiUnits[0][0]),\n nalUnits: seiUnits,\n },\n ]\n : []),\n ];\n const record = {\n configurationVersion: 1,\n generalProfileSpace: spsInfo.generalProfileSpace,\n generalTierFlag: spsInfo.generalTierFlag,\n generalProfileIdc: spsInfo.generalProfileIdc,\n generalProfileCompatibilityFlags: spsInfo.generalProfileCompatibilityFlags,\n generalConstraintIndicatorFlags: spsInfo.generalConstraintIndicatorFlags,\n generalLevelIdc: spsInfo.generalLevelIdc,\n minSpatialSegmentationIdc: spsInfo.minSpatialSegmentationIdc,\n parallelismType,\n chromaFormatIdc: spsInfo.chromaFormatIdc,\n bitDepthLumaMinus8: spsInfo.bitDepthLumaMinus8,\n bitDepthChromaMinus8: spsInfo.bitDepthChromaMinus8,\n avgFrameRate: 0,\n constantFrameRate: 0,\n numTemporalLayers: spsInfo.spsMaxSubLayersMinus1 + 1,\n temporalIdNested: spsInfo.spsTemporalIdNestingFlag,\n lengthSizeMinusOne: 3,\n arrays,\n };\n return record;\n }\n catch (error) {\n console.error('Error building HEVC Decoder Configuration Record:', error);\n return null;\n }\n};\nconst parseProfileTierLevel = (bitstream, maxNumSubLayersMinus1) => {\n const general_profile_space = bitstream.readBits(2);\n const general_tier_flag = bitstream.readBits(1);\n const general_profile_idc = bitstream.readBits(5);\n let general_profile_compatibility_flags = 0;\n for (let i = 0; i < 32; i++) {\n general_profile_compatibility_flags = (general_profile_compatibility_flags << 1) | bitstream.readBits(1);\n }\n const general_constraint_indicator_flags = new Uint8Array(6);\n for (let i = 0; i < 6; i++) {\n general_constraint_indicator_flags[i] = bitstream.readBits(8);\n }\n const general_level_idc = bitstream.readBits(8);\n const sub_layer_profile_present_flag = [];\n const sub_layer_level_present_flag = [];\n for (let i = 0; i < maxNumSubLayersMinus1; i++) {\n sub_layer_profile_present_flag.push(bitstream.readBits(1));\n sub_layer_level_present_flag.push(bitstream.readBits(1));\n }\n if (maxNumSubLayersMinus1 > 0) {\n for (let i = maxNumSubLayersMinus1; i < 8; i++) {\n bitstream.skipBits(2); // reserved_zero_2bits\n }\n }\n for (let i = 0; i < maxNumSubLayersMinus1; i++) {\n if (sub_layer_profile_present_flag[i])\n bitstream.skipBits(88);\n if (sub_layer_level_present_flag[i])\n bitstream.skipBits(8);\n }\n return {\n general_profile_space,\n general_tier_flag,\n general_profile_idc,\n general_profile_compatibility_flags,\n general_constraint_indicator_flags,\n general_level_idc,\n };\n};\nconst skipScalingListData = (bitstream) => {\n for (let sizeId = 0; sizeId < 4; sizeId++) {\n for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) {\n const scaling_list_pred_mode_flag = bitstream.readBits(1);\n if (!scaling_list_pred_mode_flag) {\n readExpGolomb(bitstream); // scaling_list_pred_matrix_id_delta\n }\n else {\n const coefNum = Math.min(64, 1 << (4 + (sizeId << 1)));\n if (sizeId > 1) {\n readSignedExpGolomb(bitstream); // scaling_list_dc_coef_minus8\n }\n for (let i = 0; i < coefNum; i++) {\n readSignedExpGolomb(bitstream); // scaling_list_delta_coef\n }\n }\n }\n }\n};\nconst skipAllStRefPicSets = (bitstream, num_short_term_ref_pic_sets) => {\n const NumDeltaPocs = [];\n for (let stRpsIdx = 0; stRpsIdx < num_short_term_ref_pic_sets; stRpsIdx++) {\n NumDeltaPocs[stRpsIdx] = skipStRefPicSet(bitstream, stRpsIdx, num_short_term_ref_pic_sets, NumDeltaPocs);\n }\n};\nconst skipStRefPicSet = (bitstream, stRpsIdx, num_short_term_ref_pic_sets, NumDeltaPocs) => {\n let NumDeltaPocsThis = 0;\n let inter_ref_pic_set_prediction_flag = 0;\n let RefRpsIdx = 0;\n if (stRpsIdx !== 0) {\n inter_ref_pic_set_prediction_flag = bitstream.readBits(1);\n }\n if (inter_ref_pic_set_prediction_flag) {\n if (stRpsIdx === num_short_term_ref_pic_sets) {\n const delta_idx_minus1 = readExpGolomb(bitstream);\n RefRpsIdx = stRpsIdx - (delta_idx_minus1 + 1);\n }\n else {\n RefRpsIdx = stRpsIdx - 1;\n }\n bitstream.readBits(1); // delta_rps_sign\n readExpGolomb(bitstream); // abs_delta_rps_minus1\n // The number of iterations is NumDeltaPocs[RefRpsIdx] + 1\n const numDelta = NumDeltaPocs[RefRpsIdx] ?? 0;\n for (let j = 0; j <= numDelta; j++) {\n const used_by_curr_pic_flag = bitstream.readBits(1);\n if (!used_by_curr_pic_flag) {\n bitstream.readBits(1); // use_delta_flag\n }\n }\n NumDeltaPocsThis = NumDeltaPocs[RefRpsIdx];\n }\n else {\n const num_negative_pics = readExpGolomb(bitstream);\n const num_positive_pics = readExpGolomb(bitstream);\n for (let i = 0; i < num_negative_pics; i++) {\n readExpGolomb(bitstream); // delta_poc_s0_minus1[i]\n bitstream.readBits(1); // used_by_curr_pic_s0_flag[i]\n }\n for (let i = 0; i < num_positive_pics; i++) {\n readExpGolomb(bitstream); // delta_poc_s1_minus1[i]\n bitstream.readBits(1); // used_by_curr_pic_s1_flag[i]\n }\n NumDeltaPocsThis = num_negative_pics + num_positive_pics;\n }\n return NumDeltaPocsThis;\n};\nconst parseHevcVui = (bitstream, sps_max_sub_layers_minus1) => {\n // Defaults: 2 = unspecified\n let colourPrimaries = 2;\n let transferCharacteristics = 2;\n let matrixCoefficients = 2;\n let fullRangeFlag = 0;\n let minSpatialSegmentationIdc = 0;\n let pixelAspectRatio = { num: 1, den: 1 };\n if (bitstream.readBits(1)) { // aspect_ratio_info_present_flag\n const aspect_ratio_idc = bitstream.readBits(8);\n if (aspect_ratio_idc === 255) {\n pixelAspectRatio = {\n num: bitstream.readBits(16),\n den: bitstream.readBits(16),\n };\n }\n else {\n const aspectRatio = AVC_HEVC_ASPECT_RATIO_IDC_TABLE[aspect_ratio_idc];\n if (aspectRatio) {\n pixelAspectRatio = aspectRatio;\n }\n }\n }\n if (bitstream.readBits(1)) { // overscan_info_present_flag\n bitstream.readBits(1); // overscan_appropriate_flag\n }\n if (bitstream.readBits(1)) { // video_signal_type_present_flag\n bitstream.readBits(3); // video_format\n fullRangeFlag = bitstream.readBits(1);\n if (bitstream.readBits(1)) { // colour_description_present_flag\n colourPrimaries = bitstream.readBits(8);\n transferCharacteristics = bitstream.readBits(8);\n matrixCoefficients = bitstream.readBits(8);\n }\n }\n if (bitstream.readBits(1)) { // chroma_loc_info_present_flag\n readExpGolomb(bitstream); // chroma_sample_loc_type_top_field\n readExpGolomb(bitstream); // chroma_sample_loc_type_bottom_field\n }\n bitstream.readBits(1); // neutral_chroma_indication_flag\n bitstream.readBits(1); // field_seq_flag\n bitstream.readBits(1); // frame_field_info_present_flag\n if (bitstream.readBits(1)) { // default_display_window_flag\n readExpGolomb(bitstream); // def_disp_win_left_offset\n readExpGolomb(bitstream); // def_disp_win_right_offset\n readExpGolomb(bitstream); // def_disp_win_top_offset\n readExpGolomb(bitstream); // def_disp_win_bottom_offset\n }\n if (bitstream.readBits(1)) { // vui_timing_info_present_flag\n bitstream.readBits(32); // vui_num_units_in_tick\n bitstream.readBits(32); // vui_time_scale\n if (bitstream.readBits(1)) { // vui_poc_proportional_to_timing_flag\n readExpGolomb(bitstream); // vui_num_ticks_poc_diff_one_minus1\n }\n if (bitstream.readBits(1)) {\n skipHevcHrdParameters(bitstream, true, sps_max_sub_layers_minus1);\n }\n }\n if (bitstream.readBits(1)) { // bitstream_restriction_flag\n bitstream.readBits(1); // tiles_fixed_structure_flag\n bitstream.readBits(1); // motion_vectors_over_pic_boundaries_flag\n bitstream.readBits(1); // restricted_ref_pic_lists_flag\n minSpatialSegmentationIdc = readExpGolomb(bitstream);\n readExpGolomb(bitstream); // max_bytes_per_pic_denom\n readExpGolomb(bitstream); // max_bits_per_min_cu_denom\n readExpGolomb(bitstream); // log2_max_mv_length_horizontal\n readExpGolomb(bitstream); // log2_max_mv_length_vertical\n }\n return {\n pixelAspectRatio,\n colourPrimaries,\n transferCharacteristics,\n matrixCoefficients,\n fullRangeFlag,\n minSpatialSegmentationIdc,\n };\n};\nconst skipHevcHrdParameters = (bitstream, commonInfPresentFlag, maxNumSubLayersMinus1) => {\n let nal_hrd_parameters_present_flag = false;\n let vcl_hrd_parameters_present_flag = false;\n let sub_pic_hrd_params_present_flag = false;\n if (commonInfPresentFlag) {\n nal_hrd_parameters_present_flag = bitstream.readBits(1) === 1;\n vcl_hrd_parameters_present_flag = bitstream.readBits(1) === 1;\n if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) {\n sub_pic_hrd_params_present_flag = bitstream.readBits(1) === 1;\n if (sub_pic_hrd_params_present_flag) {\n bitstream.readBits(8); // tick_divisor_minus2\n bitstream.readBits(5); // du_cpb_removal_delay_increment_length_minus1\n bitstream.readBits(1); // sub_pic_cpb_params_in_pic_timing_sei_flag\n bitstream.readBits(5); // dpb_output_delay_du_length_minus1\n }\n bitstream.readBits(4); // bit_rate_scale\n bitstream.readBits(4); // cpb_size_scale\n if (sub_pic_hrd_params_present_flag) {\n bitstream.readBits(4); // cpb_size_du_scale\n }\n bitstream.readBits(5); // initial_cpb_removal_delay_length_minus1\n bitstream.readBits(5); // au_cpb_removal_delay_length_minus1\n bitstream.readBits(5); // dpb_output_delay_length_minus1\n }\n }\n for (let i = 0; i <= maxNumSubLayersMinus1; i++) {\n const fixed_pic_rate_general_flag = bitstream.readBits(1) === 1;\n let fixed_pic_rate_within_cvs_flag = true; // Default assumption if general is true\n if (!fixed_pic_rate_general_flag) {\n fixed_pic_rate_within_cvs_flag = bitstream.readBits(1) === 1;\n }\n let low_delay_hrd_flag = false; // Default assumption\n if (fixed_pic_rate_within_cvs_flag) {\n readExpGolomb(bitstream); // elemental_duration_in_tc_minus1[i]\n }\n else {\n low_delay_hrd_flag = bitstream.readBits(1) === 1;\n }\n let CpbCnt = 1; // Default if low_delay is true\n if (!low_delay_hrd_flag) {\n const cpb_cnt_minus1 = readExpGolomb(bitstream); // cpb_cnt_minus1[i]\n CpbCnt = cpb_cnt_minus1 + 1;\n }\n if (nal_hrd_parameters_present_flag) {\n skipSubLayerHrdParameters(bitstream, CpbCnt, sub_pic_hrd_params_present_flag);\n }\n if (vcl_hrd_parameters_present_flag) {\n skipSubLayerHrdParameters(bitstream, CpbCnt, sub_pic_hrd_params_present_flag);\n }\n }\n};\nconst skipSubLayerHrdParameters = (bitstream, CpbCnt, sub_pic_hrd_params_present_flag) => {\n for (let i = 0; i < CpbCnt; i++) {\n readExpGolomb(bitstream); // bit_rate_value_minus1[i]\n readExpGolomb(bitstream); // cpb_size_value_minus1[i]\n if (sub_pic_hrd_params_present_flag) {\n readExpGolomb(bitstream); // cpb_size_du_value_minus1[i]\n readExpGolomb(bitstream); // bit_rate_du_value_minus1[i]\n }\n bitstream.readBits(1); // cbr_flag[i]\n }\n};\n/** Serializes an HevcDecoderConfigurationRecord into the format specified in Section 8.3.3.1 of ISO 14496-15. */\nexport const serializeHevcDecoderConfigurationRecord = (record) => {\n const bytes = [];\n bytes.push(record.configurationVersion);\n bytes.push(((record.generalProfileSpace & 0x3) << 6)\n | ((record.generalTierFlag & 0x1) << 5)\n | (record.generalProfileIdc & 0x1F));\n bytes.push((record.generalProfileCompatibilityFlags >>> 24) & 0xFF);\n bytes.push((record.generalProfileCompatibilityFlags >>> 16) & 0xFF);\n bytes.push((record.generalProfileCompatibilityFlags >>> 8) & 0xFF);\n bytes.push(record.generalProfileCompatibilityFlags & 0xFF);\n bytes.push(...record.generalConstraintIndicatorFlags);\n bytes.push(record.generalLevelIdc & 0xFF);\n bytes.push(0xF0 | ((record.minSpatialSegmentationIdc >> 8) & 0x0F)); // Reserved + high nibble\n bytes.push(record.minSpatialSegmentationIdc & 0xFF); // Low byte\n bytes.push(0xFC | (record.parallelismType & 0x03));\n bytes.push(0xFC | (record.chromaFormatIdc & 0x03));\n bytes.push(0xF8 | (record.bitDepthLumaMinus8 & 0x07));\n bytes.push(0xF8 | (record.bitDepthChromaMinus8 & 0x07));\n bytes.push((record.avgFrameRate >> 8) & 0xFF); // High byte\n bytes.push(record.avgFrameRate & 0xFF); // Low byte\n bytes.push(((record.constantFrameRate & 0x03) << 6)\n | ((record.numTemporalLayers & 0x07) << 3)\n | ((record.temporalIdNested & 0x01) << 2)\n | (record.lengthSizeMinusOne & 0x03));\n bytes.push(record.arrays.length & 0xFF);\n for (const arr of record.arrays) {\n bytes.push(((arr.arrayCompleteness & 0x01) << 7)\n | (0 << 6)\n | (arr.nalUnitType & 0x3F));\n bytes.push((arr.nalUnits.length >> 8) & 0xFF); // High byte\n bytes.push(arr.nalUnits.length & 0xFF); // Low byte\n for (const nal of arr.nalUnits) {\n bytes.push((nal.length >> 8) & 0xFF); // High byte\n bytes.push(nal.length & 0xFF); // Low byte\n for (let i = 0; i < nal.length; i++) {\n bytes.push(nal[i]);\n }\n }\n }\n return new Uint8Array(bytes);\n};\n/** Deserializes an HevcDecoderConfigurationRecord from the format specified in Section 8.3.3.1 of ISO 14496-15. */\nexport const deserializeHevcDecoderConfigurationRecord = (data) => {\n try {\n const view = toDataView(data);\n let offset = 0;\n const configurationVersion = view.getUint8(offset++);\n const byte1 = view.getUint8(offset++);\n const generalProfileSpace = (byte1 >> 6) & 0x3;\n const generalTierFlag = (byte1 >> 5) & 0x1;\n const generalProfileIdc = byte1 & 0x1F;\n const generalProfileCompatibilityFlags = view.getUint32(offset, false);\n offset += 4;\n const generalConstraintIndicatorFlags = data.subarray(offset, offset + 6);\n offset += 6;\n const generalLevelIdc = view.getUint8(offset++);\n const minSpatialSegmentationIdc = ((view.getUint8(offset++) & 0x0F) << 8) | view.getUint8(offset++);\n const parallelismType = view.getUint8(offset++) & 0x03;\n const chromaFormatIdc = view.getUint8(offset++) & 0x03;\n const bitDepthLumaMinus8 = view.getUint8(offset++) & 0x07;\n const bitDepthChromaMinus8 = view.getUint8(offset++) & 0x07;\n const avgFrameRate = view.getUint16(offset, false);\n offset += 2;\n const byte21 = view.getUint8(offset++);\n const constantFrameRate = (byte21 >> 6) & 0x03;\n const numTemporalLayers = (byte21 >> 3) & 0x07;\n const temporalIdNested = (byte21 >> 2) & 0x01;\n const lengthSizeMinusOne = byte21 & 0x03;\n const numOfArrays = view.getUint8(offset++);\n const arrays = [];\n for (let i = 0; i < numOfArrays; i++) {\n const arrByte = view.getUint8(offset++);\n const arrayCompleteness = (arrByte >> 7) & 0x01;\n const nalUnitType = arrByte & 0x3F;\n const numNalus = view.getUint16(offset, false);\n offset += 2;\n const nalUnits = [];\n for (let j = 0; j < numNalus; j++) {\n const nalUnitLength = view.getUint16(offset, false);\n offset += 2;\n nalUnits.push(data.subarray(offset, offset + nalUnitLength));\n offset += nalUnitLength;\n }\n arrays.push({\n arrayCompleteness,\n nalUnitType,\n nalUnits,\n });\n }\n return {\n configurationVersion,\n generalProfileSpace,\n generalTierFlag,\n generalProfileIdc,\n generalProfileCompatibilityFlags,\n generalConstraintIndicatorFlags,\n generalLevelIdc,\n minSpatialSegmentationIdc,\n parallelismType,\n chromaFormatIdc,\n bitDepthLumaMinus8,\n bitDepthChromaMinus8,\n avgFrameRate,\n constantFrameRate,\n numTemporalLayers,\n temporalIdNested,\n lengthSizeMinusOne,\n arrays,\n };\n }\n catch (error) {\n console.error('Error deserializing HEVC Decoder Configuration Record:', error);\n return null;\n }\n};\nexport const extractVp9CodecInfoFromPacket = (packet) => {\n // eslint-disable-next-line @stylistic/max-len\n // https://storage.googleapis.com/downloads.webmproject.org/docs/vp9/vp9-bitstream-specification-v0.7-20170222-draft.pdf\n // http://downloads.webmproject.org/docs/vp9/vp9-bitstream_superframe-and-uncompressed-header_v1.0.pdf\n const bitstream = new Bitstream(packet);\n // Frame marker (0b10)\n const frameMarker = bitstream.readBits(2);\n if (frameMarker !== 2) {\n return null;\n }\n // Profile\n const profileLowBit = bitstream.readBits(1);\n const profileHighBit = bitstream.readBits(1);\n const profile = (profileHighBit << 1) + profileLowBit;\n // Skip reserved bit for profile 3\n if (profile === 3) {\n bitstream.skipBits(1);\n }\n // show_existing_frame\n const showExistingFrame = bitstream.readBits(1);\n if (showExistingFrame === 1) {\n return null;\n }\n // frame_type (0 = key frame)\n const frameType = bitstream.readBits(1);\n if (frameType !== 0) {\n return null;\n }\n // Skip show_frame and error_resilient_mode\n bitstream.skipBits(2);\n // Sync code (0x498342)\n const syncCode = bitstream.readBits(24);\n if (syncCode !== 0x498342) {\n return null;\n }\n // Color config\n let bitDepth = 8;\n if (profile >= 2) {\n const tenOrTwelveBit = bitstream.readBits(1);\n bitDepth = tenOrTwelveBit ? 12 : 10;\n }\n // Color space\n const colorSpace = bitstream.readBits(3);\n let chromaSubsampling = 0;\n let videoFullRangeFlag = 0;\n if (colorSpace !== 7) { // 7 is CS_RGB\n const colorRange = bitstream.readBits(1);\n videoFullRangeFlag = colorRange;\n if (profile === 1 || profile === 3) {\n const subsamplingX = bitstream.readBits(1);\n const subsamplingY = bitstream.readBits(1);\n // 0 = 4:2:0 vertical\n // 1 = 4:2:0 colocated\n // 2 = 4:2:2\n // 3 = 4:4:4\n chromaSubsampling = !subsamplingX && !subsamplingY\n ? 3 // 0,0 = 4:4:4\n : subsamplingX && !subsamplingY\n ? 2 // 1,0 = 4:2:2\n : 1; // 1,1 = 4:2:0 colocated (default)\n // Skip reserved bit\n bitstream.skipBits(1);\n }\n else {\n // For profile 0 and 2, always 4:2:0\n chromaSubsampling = 1; // Using colocated as default\n }\n }\n else {\n // RGB is always 4:4:4\n chromaSubsampling = 3;\n videoFullRangeFlag = 1;\n }\n // Parse frame size\n const widthMinusOne = bitstream.readBits(16);\n const heightMinusOne = bitstream.readBits(16);\n const width = widthMinusOne + 1;\n const height = heightMinusOne + 1;\n // Calculate level based on dimensions\n const pictureSize = width * height;\n let level = last(VP9_LEVEL_TABLE).level; // Default to highest level\n for (const entry of VP9_LEVEL_TABLE) {\n if (pictureSize <= entry.maxPictureSize) {\n level = entry.level;\n break;\n }\n }\n // Map color_space to standard values\n const matrixCoefficients = colorSpace === 7\n ? 0\n : colorSpace === 2\n ? 1\n : colorSpace === 1\n ? 6\n : 2;\n const colourPrimaries = colorSpace === 2\n ? 1\n : colorSpace === 1\n ? 6\n : 2;\n const transferCharacteristics = colorSpace === 2\n ? 1\n : colorSpace === 1\n ? 6\n : 2;\n return {\n profile,\n level,\n bitDepth,\n chromaSubsampling,\n videoFullRangeFlag,\n colourPrimaries,\n transferCharacteristics,\n matrixCoefficients,\n };\n};\n/** Iterates over all OBUs in an AV1 packet bistream. */\nexport const iterateAv1PacketObus = function* (packet) {\n // https://aomediacodec.github.io/av1-spec/av1-spec.pdf\n const bitstream = new Bitstream(packet);\n const readLeb128 = () => {\n let value = 0;\n for (let i = 0; i < 8; i++) {\n const byte = bitstream.readAlignedByte();\n value |= ((byte & 0x7f) << (i * 7));\n if (!(byte & 0x80)) {\n break;\n }\n // Spec requirement\n if (i === 7 && (byte & 0x80)) {\n return null;\n }\n }\n // Spec requirement\n if (value >= 2 ** 32 - 1) {\n return null;\n }\n return value;\n };\n while (bitstream.getBitsLeft() >= 8) {\n // Parse OBU header\n bitstream.skipBits(1);\n const obuType = bitstream.readBits(4);\n const obuExtension = bitstream.readBits(1);\n const obuHasSizeField = bitstream.readBits(1);\n bitstream.skipBits(1);\n // Skip extension header if present\n if (obuExtension) {\n bitstream.skipBits(8);\n }\n // Read OBU size if present\n let obuSize;\n if (obuHasSizeField) {\n const obuSizeValue = readLeb128();\n if (obuSizeValue === null)\n return; // It was invalid\n obuSize = obuSizeValue;\n }\n else {\n // Calculate remaining bits and convert to bytes, rounding down\n obuSize = Math.floor(bitstream.getBitsLeft() / 8);\n }\n assert(bitstream.pos % 8 === 0);\n yield {\n type: obuType,\n data: packet.subarray(bitstream.pos / 8, bitstream.pos / 8 + obuSize),\n };\n // Move to next OBU\n bitstream.skipBits(obuSize * 8);\n }\n};\n/**\n * When AV1 codec information is not provided by the container, we can still try to extract the information by digging\n * into the AV1 bitstream.\n */\nexport const extractAv1CodecInfoFromPacket = (packet) => {\n // https://aomediacodec.github.io/av1-spec/av1-spec.pdf\n for (const { type, data } of iterateAv1PacketObus(packet)) {\n if (type !== 1) {\n continue; // 1 == OBU_SEQUENCE_HEADER\n }\n const bitstream = new Bitstream(data);\n // Read sequence header fields\n const seqProfile = bitstream.readBits(3);\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const stillPicture = bitstream.readBits(1);\n const reducedStillPictureHeader = bitstream.readBits(1);\n let seqLevel = 0;\n let seqTier = 0;\n let bufferDelayLengthMinus1 = 0;\n if (reducedStillPictureHeader) {\n seqLevel = bitstream.readBits(5);\n }\n else {\n // Parse timing_info_present_flag\n const timingInfoPresentFlag = bitstream.readBits(1);\n if (timingInfoPresentFlag) {\n // Skip timing info (num_units_in_display_tick, time_scale, equal_picture_interval)\n bitstream.skipBits(32); // num_units_in_display_tick\n bitstream.skipBits(32); // time_scale\n const equalPictureInterval = bitstream.readBits(1);\n if (equalPictureInterval) {\n // Skip num_ticks_per_picture_minus_1 (uvlc)\n // Since this is variable length, we'd need to implement uvlc reading\n // For now, we'll return null as this is rare\n return null;\n }\n }\n // Parse decoder_model_info_present_flag\n const decoderModelInfoPresentFlag = bitstream.readBits(1);\n if (decoderModelInfoPresentFlag) {\n // Store buffer_delay_length_minus_1 instead of just skipping\n bufferDelayLengthMinus1 = bitstream.readBits(5);\n bitstream.skipBits(32); // num_units_in_decoding_tick\n bitstream.skipBits(5); // buffer_removal_time_length_minus_1\n bitstream.skipBits(5); // frame_presentation_time_length_minus_1\n }\n // Parse operating_points_cnt_minus_1\n const operatingPointsCntMinus1 = bitstream.readBits(5);\n // For each operating point\n for (let i = 0; i <= operatingPointsCntMinus1; i++) {\n // operating_point_idc[i]\n bitstream.skipBits(12);\n // seq_level_idx[i]\n const seqLevelIdx = bitstream.readBits(5);\n if (i === 0) {\n seqLevel = seqLevelIdx;\n }\n if (seqLevelIdx > 7) {\n // seq_tier[i]\n const seqTierTemp = bitstream.readBits(1);\n if (i === 0) {\n seqTier = seqTierTemp;\n }\n }\n if (decoderModelInfoPresentFlag) {\n // decoder_model_present_for_this_op[i]\n const decoderModelPresentForThisOp = bitstream.readBits(1);\n if (decoderModelPresentForThisOp) {\n const n = bufferDelayLengthMinus1 + 1;\n bitstream.skipBits(n); // decoder_buffer_delay[op]\n bitstream.skipBits(n); // encoder_buffer_delay[op]\n bitstream.skipBits(1); // low_delay_mode_flag[op]\n }\n }\n // initial_display_delay_present_flag\n const initialDisplayDelayPresentFlag = bitstream.readBits(1);\n if (initialDisplayDelayPresentFlag) {\n // initial_display_delay_minus_1[i]\n bitstream.skipBits(4);\n }\n }\n }\n // Frame size\n const frameWidthBitsMinus1 = bitstream.readBits(4);\n const frameHeightBitsMinus1 = bitstream.readBits(4);\n const n1 = frameWidthBitsMinus1 + 1;\n bitstream.skipBits(n1); // max_frame_width_minus_1\n const n2 = frameHeightBitsMinus1 + 1;\n bitstream.skipBits(n2); // max_frame_height_minus_1\n // Frame IDs\n let frameIdNumbersPresentFlag = 0;\n if (reducedStillPictureHeader) {\n frameIdNumbersPresentFlag = 0;\n }\n else {\n frameIdNumbersPresentFlag = bitstream.readBits(1);\n }\n if (frameIdNumbersPresentFlag) {\n bitstream.skipBits(4); // delta_frame_id_length_minus_2\n bitstream.skipBits(3); // additional_frame_id_length_minus_1\n }\n bitstream.skipBits(1); // use_128x128_superblock\n bitstream.skipBits(1); // enable_filter_intra\n bitstream.skipBits(1); // enable_intra_edge_filter\n if (!reducedStillPictureHeader) {\n bitstream.skipBits(1); // enable_interintra_compound\n bitstream.skipBits(1); // enable_masked_compound\n bitstream.skipBits(1); // enable_warped_motion\n bitstream.skipBits(1); // enable_dual_filter\n const enableOrderHint = bitstream.readBits(1);\n if (enableOrderHint) {\n bitstream.skipBits(1); // enable_jnt_comp\n bitstream.skipBits(1); // enable_ref_frame_mvs\n }\n const seqChooseScreenContentTools = bitstream.readBits(1);\n let seqForceScreenContentTools = 0;\n if (seqChooseScreenContentTools) {\n seqForceScreenContentTools = 2; // SELECT_SCREEN_CONTENT_TOOLS\n }\n else {\n seqForceScreenContentTools = bitstream.readBits(1);\n }\n if (seqForceScreenContentTools > 0) {\n const seqChooseIntegerMv = bitstream.readBits(1);\n if (!seqChooseIntegerMv) {\n bitstream.skipBits(1); // seq_force_integer_mv\n }\n }\n if (enableOrderHint) {\n bitstream.skipBits(3); // order_hint_bits_minus_1\n }\n }\n bitstream.skipBits(1); // enable_superres\n bitstream.skipBits(1); // enable_cdef\n bitstream.skipBits(1); // enable_restoration\n // color_config()\n const highBitdepth = bitstream.readBits(1);\n let bitDepth = 8;\n if (seqProfile === 2 && highBitdepth) {\n const twelveBit = bitstream.readBits(1);\n bitDepth = twelveBit ? 12 : 10;\n }\n else if (seqProfile <= 2) {\n bitDepth = highBitdepth ? 10 : 8;\n }\n let monochrome = 0;\n if (seqProfile !== 1) {\n monochrome = bitstream.readBits(1);\n }\n let chromaSubsamplingX = 1;\n let chromaSubsamplingY = 1;\n let chromaSamplePosition = 0;\n if (!monochrome) {\n if (seqProfile === 0) {\n chromaSubsamplingX = 1;\n chromaSubsamplingY = 1;\n }\n else if (seqProfile === 1) {\n chromaSubsamplingX = 0;\n chromaSubsamplingY = 0;\n }\n else {\n if (bitDepth === 12) {\n chromaSubsamplingX = bitstream.readBits(1);\n if (chromaSubsamplingX) {\n chromaSubsamplingY = bitstream.readBits(1);\n }\n }\n }\n if (chromaSubsamplingX && chromaSubsamplingY) {\n chromaSamplePosition = bitstream.readBits(2);\n }\n }\n return {\n profile: seqProfile,\n level: seqLevel,\n tier: seqTier,\n bitDepth,\n monochrome,\n chromaSubsamplingX,\n chromaSubsamplingY,\n chromaSamplePosition,\n };\n }\n return null;\n};\nexport const parseOpusIdentificationHeader = (bytes) => {\n const view = toDataView(bytes);\n const outputChannelCount = view.getUint8(9);\n const preSkip = view.getUint16(10, true);\n const inputSampleRate = view.getUint32(12, true);\n const outputGain = view.getInt16(16, true);\n const channelMappingFamily = view.getUint8(18);\n let channelMappingTable = null;\n if (channelMappingFamily) {\n channelMappingTable = bytes.subarray(19, 19 + 2 + outputChannelCount);\n }\n return {\n outputChannelCount,\n preSkip,\n inputSampleRate,\n outputGain,\n channelMappingFamily,\n channelMappingTable,\n };\n};\n// From https://datatracker.ietf.org/doc/html/rfc6716, in 48 kHz samples\nconst OPUS_FRAME_DURATION_TABLE = [\n 480, 960, 1920, 2880,\n 480, 960, 1920, 2880,\n 480, 960, 1920, 2880,\n 480, 960,\n 480, 960,\n 120, 240, 480, 960,\n 120, 240, 480, 960,\n 120, 240, 480, 960,\n 120, 240, 480, 960,\n];\nexport const parseOpusTocByte = (packet) => {\n const config = packet[0] >> 3;\n return {\n durationInSamples: OPUS_FRAME_DURATION_TABLE[config],\n };\n};\n// Based on vorbis_parser.c from FFmpeg.\nexport const parseModesFromVorbisSetupPacket = (setupHeader) => {\n // Verify that this is a Setup header.\n if (setupHeader.length < 7) {\n throw new Error('Setup header is too short.');\n }\n if (setupHeader[0] !== 5) {\n throw new Error('Wrong packet type in Setup header.');\n }\n const signature = String.fromCharCode(...setupHeader.slice(1, 7));\n if (signature !== 'vorbis') {\n throw new Error('Invalid packet signature in Setup header.');\n }\n // Reverse the entire buffer.\n const bufSize = setupHeader.length;\n const revBuffer = new Uint8Array(bufSize);\n for (let i = 0; i < bufSize; i++) {\n revBuffer[i] = setupHeader[bufSize - 1 - i];\n }\n // Initialize a Bitstream on the reversed buffer.\n const bitstream = new Bitstream(revBuffer);\n // --- Find the framing bit.\n // In FFmpeg code, we scan until get_bits1() returns 1.\n let gotFramingBit = 0;\n while (bitstream.getBitsLeft() > 97) {\n if (bitstream.readBits(1) === 1) {\n gotFramingBit = bitstream.pos;\n break;\n }\n }\n if (gotFramingBit === 0) {\n throw new Error('Invalid Setup header: framing bit not found.');\n }\n // --- Search backwards for a valid mode header.\n // We try to “guess” the number of modes by reading a fixed pattern.\n let modeCount = 0;\n let gotModeHeader = false;\n let lastModeCount = 0;\n while (bitstream.getBitsLeft() >= 97) {\n const tempPos = bitstream.pos;\n const a = bitstream.readBits(8);\n const b = bitstream.readBits(16);\n const c = bitstream.readBits(16);\n // If a > 63 or b or c nonzero, assume we’ve gone too far.\n if (a > 63 || b !== 0 || c !== 0) {\n bitstream.pos = tempPos;\n break;\n }\n bitstream.skipBits(1);\n modeCount++;\n if (modeCount > 64) {\n break;\n }\n const bsClone = bitstream.clone();\n const candidate = bsClone.readBits(6) + 1;\n if (candidate === modeCount) {\n gotModeHeader = true;\n lastModeCount = modeCount;\n }\n }\n if (!gotModeHeader) {\n throw new Error('Invalid Setup header: mode header not found.');\n }\n if (lastModeCount > 63) {\n throw new Error(`Unsupported mode count: ${lastModeCount}.`);\n }\n const finalModeCount = lastModeCount;\n // --- Reinitialize the bitstream.\n bitstream.pos = 0;\n // Skip the bits up to the found framing bit.\n bitstream.skipBits(gotFramingBit);\n // --- Now read, for each mode (in reverse order), 40 bits then one bit.\n // That one bit is the mode blockflag.\n const modeBlockflags = Array(finalModeCount).fill(0);\n for (let i = finalModeCount - 1; i >= 0; i--) {\n bitstream.skipBits(40);\n modeBlockflags[i] = bitstream.readBits(1);\n }\n return { modeBlockflags };\n};\n/** Determines a packet's type (key or delta) by digging into the packet bitstream. */\nexport const determineVideoPacketType = (codec, decoderConfig, packetData) => {\n switch (codec) {\n case 'avc':\n {\n for (const loc of iterateAvcNalUnits(packetData, decoderConfig)) {\n const nalTypeByte = packetData[loc.offset];\n const type = extractNalUnitTypeForAvc(nalTypeByte);\n if (type >= AvcNalUnitType.NON_IDR_SLICE && type <= AvcNalUnitType.SLICE_DPC) {\n return 'delta';\n }\n if (type === AvcNalUnitType.IDR) {\n return 'key';\n }\n // In addition to IDR, Recovery Point SEI also counts as a valid H.264 keyframe by current consensus.\n // See https://github.com/w3c/webcodecs/issues/650 for the relevant discussion. WebKit and Firefox have\n // always supported them, but Chromium hasn't, therefore the (admittedly dirty) version check.\n if (type === AvcNalUnitType.SEI && (!isChromium() || getChromiumVersion() >= 144)) {\n const nalUnit = packetData.subarray(loc.offset, loc.offset + loc.length);\n const bytes = removeEmulationPreventionBytes(nalUnit);\n let pos = 1; // Skip NALU header\n // sei_rbsp()\n do {\n // sei_message()\n let payloadType = 0;\n while (true) {\n const nextByte = bytes[pos++];\n if (nextByte === undefined)\n break;\n payloadType += nextByte;\n if (nextByte < 255) {\n break;\n }\n }\n let payloadSize = 0;\n while (true) {\n const nextByte = bytes[pos++];\n if (nextByte === undefined)\n break;\n payloadSize += nextByte;\n if (nextByte < 255) {\n break;\n }\n }\n // sei_payload()\n const PAYLOAD_TYPE_RECOVERY_POINT = 6;\n if (payloadType === PAYLOAD_TYPE_RECOVERY_POINT) {\n const bitstream = new Bitstream(bytes);\n bitstream.pos = 8 * pos;\n const recoveryFrameCount = readExpGolomb(bitstream);\n const exactMatchFlag = bitstream.readBits(1);\n if (recoveryFrameCount === 0 && exactMatchFlag === 1) {\n // https://github.com/w3c/webcodecs/pull/910\n // \"recovery_frame_cnt == 0 and exact_match_flag=1 in the SEI recovery payload\"\n return 'key';\n }\n }\n pos += payloadSize;\n } while (pos < bytes.length - 1);\n }\n }\n return 'delta';\n }\n ;\n case 'hevc':\n {\n for (const loc of iterateHevcNalUnits(packetData, decoderConfig)) {\n const type = extractNalUnitTypeForHevc(packetData[loc.offset]);\n if (type < HevcNalUnitType.BLA_W_LP) {\n return 'delta';\n }\n if (type <= HevcNalUnitType.RSV_IRAP_VCL23) {\n return 'key';\n }\n }\n return 'delta';\n }\n ;\n case 'vp8':\n {\n // VP8, once again, by far the easiest to deal with.\n const frameType = packetData[0] & 0b1;\n return frameType === 0 ? 'key' : 'delta';\n }\n ;\n case 'vp9':\n {\n const bitstream = new Bitstream(packetData);\n if (bitstream.readBits(2) !== 2) {\n return null;\n }\n ;\n const profileLowBit = bitstream.readBits(1);\n const profileHighBit = bitstream.readBits(1);\n const profile = (profileHighBit << 1) + profileLowBit;\n // Skip reserved bit for profile 3\n if (profile === 3) {\n bitstream.skipBits(1);\n }\n const showExistingFrame = bitstream.readBits(1);\n if (showExistingFrame) {\n return null;\n }\n const frameType = bitstream.readBits(1);\n return frameType === 0 ? 'key' : 'delta';\n }\n ;\n case 'av1':\n {\n let reducedStillPictureHeader = false;\n for (const { type, data } of iterateAv1PacketObus(packetData)) {\n if (type === 1) { // OBU_SEQUENCE_HEADER\n const bitstream = new Bitstream(data);\n bitstream.skipBits(4);\n reducedStillPictureHeader = !!bitstream.readBits(1);\n }\n else if (type === 3 // OBU_FRAME_HEADER\n || type === 6 // OBU_FRAME\n || type === 7 // OBU_REDUNDANT_FRAME_HEADER\n ) {\n if (reducedStillPictureHeader) {\n return 'key';\n }\n const bitstream = new Bitstream(data);\n const showExistingFrame = bitstream.readBits(1);\n if (showExistingFrame) {\n return null;\n }\n const frameType = bitstream.readBits(2);\n return frameType === 0 ? 'key' : 'delta';\n }\n }\n return null;\n }\n ;\n default:\n {\n assertNever(codec);\n assert(false);\n }\n ;\n }\n};\nexport var FlacBlockType;\n(function (FlacBlockType) {\n FlacBlockType[FlacBlockType[\"STREAMINFO\"] = 0] = \"STREAMINFO\";\n FlacBlockType[FlacBlockType[\"VORBIS_COMMENT\"] = 4] = \"VORBIS_COMMENT\";\n FlacBlockType[FlacBlockType[\"PICTURE\"] = 6] = \"PICTURE\";\n})(FlacBlockType || (FlacBlockType = {}));\nexport const readVorbisComments = (bytes, metadataTags) => {\n // https://datatracker.ietf.org/doc/html/rfc7845#section-5.2\n const commentView = toDataView(bytes);\n let commentPos = 0;\n const vendorStringLength = commentView.getUint32(commentPos, true);\n commentPos += 4;\n const vendorString = textDecoder.decode(bytes.subarray(commentPos, commentPos + vendorStringLength));\n commentPos += vendorStringLength;\n if (vendorStringLength > 0) {\n // Expose the vendor string in the raw metadata\n metadataTags.raw ??= {};\n metadataTags.raw['vendor'] ??= vendorString;\n }\n const listLength = commentView.getUint32(commentPos, true);\n commentPos += 4;\n // Loop over all metadata tags\n for (let i = 0; i < listLength; i++) {\n const stringLength = commentView.getUint32(commentPos, true);\n commentPos += 4;\n const string = textDecoder.decode(bytes.subarray(commentPos, commentPos + stringLength));\n commentPos += stringLength;\n const separatorIndex = string.indexOf('=');\n if (separatorIndex === -1) {\n continue;\n }\n const key = string.slice(0, separatorIndex).toUpperCase();\n const value = string.slice(separatorIndex + 1);\n metadataTags.raw ??= {};\n metadataTags.raw[key] ??= value;\n switch (key) {\n case 'TITLE':\n {\n metadataTags.title ??= value;\n }\n ;\n break;\n case 'DESCRIPTION':\n {\n metadataTags.description ??= value;\n }\n ;\n break;\n case 'ARTIST':\n {\n metadataTags.artist ??= value;\n }\n ;\n break;\n case 'ALBUM':\n {\n metadataTags.album ??= value;\n }\n ;\n break;\n case 'ALBUMARTIST':\n {\n metadataTags.albumArtist ??= value;\n }\n ;\n break;\n case 'COMMENT':\n {\n metadataTags.comment ??= value;\n }\n ;\n break;\n case 'LYRICS':\n {\n metadataTags.lyrics ??= value;\n }\n ;\n break;\n case 'TRACKNUMBER':\n {\n const parts = value.split('/');\n const trackNum = Number.parseInt(parts[0], 10);\n const tracksTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(trackNum) && trackNum > 0) {\n metadataTags.trackNumber ??= trackNum;\n }\n if (tracksTotal && Number.isInteger(tracksTotal) && tracksTotal > 0) {\n metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n ;\n break;\n case 'TRACKTOTAL':\n {\n const tracksTotal = Number.parseInt(value, 10);\n if (Number.isInteger(tracksTotal) && tracksTotal > 0) {\n metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n ;\n break;\n case 'DISCNUMBER':\n {\n const parts = value.split('/');\n const discNum = Number.parseInt(parts[0], 10);\n const discsTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(discNum) && discNum > 0) {\n metadataTags.discNumber ??= discNum;\n }\n if (discsTotal && Number.isInteger(discsTotal) && discsTotal > 0) {\n metadataTags.discsTotal ??= discsTotal;\n }\n }\n ;\n break;\n case 'DISCTOTAL':\n {\n const discsTotal = Number.parseInt(value, 10);\n if (Number.isInteger(discsTotal) && discsTotal > 0) {\n metadataTags.discsTotal ??= discsTotal;\n }\n }\n ;\n break;\n case 'DATE':\n {\n const date = new Date(value);\n if (!Number.isNaN(date.getTime())) {\n metadataTags.date ??= date;\n }\n }\n ;\n break;\n case 'GENRE':\n {\n metadataTags.genre ??= value;\n }\n ;\n break;\n case 'METADATA_BLOCK_PICTURE':\n {\n // https://datatracker.ietf.org/doc/rfc9639/ Section 8.8\n const decoded = base64ToBytes(value);\n const view = toDataView(decoded);\n const pictureType = view.getUint32(0, false);\n const mediaTypeLength = view.getUint32(4, false);\n const mediaType = String.fromCharCode(...decoded.subarray(8, 8 + mediaTypeLength)); // ASCII\n const descriptionLength = view.getUint32(8 + mediaTypeLength, false);\n const description = textDecoder.decode(decoded.subarray(12 + mediaTypeLength, 12 + mediaTypeLength + descriptionLength));\n const dataLength = view.getUint32(mediaTypeLength + descriptionLength + 28);\n const data = decoded.subarray(mediaTypeLength + descriptionLength + 32, mediaTypeLength + descriptionLength + 32 + dataLength);\n metadataTags.images ??= [];\n metadataTags.images.push({\n data,\n mimeType: mediaType,\n kind: pictureType === 3 ? 'coverFront' : pictureType === 4 ? 'coverBack' : 'unknown',\n name: undefined,\n description: description || undefined,\n });\n }\n ;\n break;\n }\n }\n};\nexport const createVorbisComments = (headerBytes, tags, writeImages) => {\n // https://datatracker.ietf.org/doc/html/rfc7845#section-5.2\n const commentHeaderParts = [\n headerBytes,\n ];\n const vendorString = 'Mediabunny';\n const encodedVendorString = textEncoder.encode(vendorString);\n let currentBuffer = new Uint8Array(4 + encodedVendorString.length);\n let currentView = new DataView(currentBuffer.buffer);\n currentView.setUint32(0, encodedVendorString.length, true);\n currentBuffer.set(encodedVendorString, 4);\n commentHeaderParts.push(currentBuffer);\n const writtenTags = new Set();\n const addCommentTag = (key, value) => {\n const joined = `${key}=${value}`;\n const encoded = textEncoder.encode(joined);\n currentBuffer = new Uint8Array(4 + encoded.length);\n currentView = new DataView(currentBuffer.buffer);\n currentView.setUint32(0, encoded.length, true);\n currentBuffer.set(encoded, 4);\n commentHeaderParts.push(currentBuffer);\n writtenTags.add(key);\n };\n for (const { key, value } of keyValueIterator(tags)) {\n switch (key) {\n case 'title':\n {\n addCommentTag('TITLE', value);\n }\n ;\n break;\n case 'description':\n {\n addCommentTag('DESCRIPTION', value);\n }\n ;\n break;\n case 'artist':\n {\n addCommentTag('ARTIST', value);\n }\n ;\n break;\n case 'album':\n {\n addCommentTag('ALBUM', value);\n }\n ;\n break;\n case 'albumArtist':\n {\n addCommentTag('ALBUMARTIST', value);\n }\n ;\n break;\n case 'genre':\n {\n addCommentTag('GENRE', value);\n }\n ;\n break;\n case 'date':\n {\n const rawVersion = tags.raw?.['DATE'] ?? tags.raw?.['date'];\n if (rawVersion && typeof rawVersion === 'string') {\n addCommentTag('DATE', rawVersion);\n }\n else {\n addCommentTag('DATE', value.toISOString().slice(0, 10));\n }\n }\n ;\n break;\n case 'comment':\n {\n addCommentTag('COMMENT', value);\n }\n ;\n break;\n case 'lyrics':\n {\n addCommentTag('LYRICS', value);\n }\n ;\n break;\n case 'trackNumber':\n {\n addCommentTag('TRACKNUMBER', value.toString());\n }\n ;\n break;\n case 'tracksTotal':\n {\n addCommentTag('TRACKTOTAL', value.toString());\n }\n ;\n break;\n case 'discNumber':\n {\n addCommentTag('DISCNUMBER', value.toString());\n }\n ;\n break;\n case 'discsTotal':\n {\n addCommentTag('DISCTOTAL', value.toString());\n }\n ;\n break;\n case 'images':\n {\n // For example, in .flac, we put the pictures in a different section,\n // not in the Vorbis comment header.\n if (!writeImages) {\n break;\n }\n for (const image of value) {\n // https://datatracker.ietf.org/doc/rfc9639/ Section 8.8\n const pictureType = image.kind === 'coverFront' ? 3 : image.kind === 'coverBack' ? 4 : 0;\n const encodedMediaType = new Uint8Array(image.mimeType.length);\n for (let i = 0; i < image.mimeType.length; i++) {\n encodedMediaType[i] = image.mimeType.charCodeAt(i);\n }\n const encodedDescription = textEncoder.encode(image.description ?? '');\n const buffer = new Uint8Array(4 // Picture type\n + 4 // MIME type length\n + encodedMediaType.length // MIME type\n + 4 // Description length\n + encodedDescription.length // Description\n + 16 // Width, height, color depth, number of colors\n + 4 // Picture data length\n + image.data.length);\n const view = toDataView(buffer);\n view.setUint32(0, pictureType, false);\n view.setUint32(4, encodedMediaType.length, false);\n buffer.set(encodedMediaType, 8);\n view.setUint32(8 + encodedMediaType.length, encodedDescription.length, false);\n buffer.set(encodedDescription, 12 + encodedMediaType.length);\n // Skip a bunch of fields (width, height, color depth, number of colors)\n view.setUint32(28 + encodedMediaType.length + encodedDescription.length, image.data.length, false);\n buffer.set(image.data, 32 + encodedMediaType.length + encodedDescription.length);\n const encoded = bytesToBase64(buffer);\n addCommentTag('METADATA_BLOCK_PICTURE', encoded);\n }\n }\n ;\n break;\n case 'raw':\n {\n // Handled later\n }\n ;\n break;\n default: assertNever(key);\n }\n }\n if (tags.raw) {\n for (const key in tags.raw) {\n const value = tags.raw[key] ?? tags.raw[key.toLowerCase()];\n if (key === 'vendor' || value == null || writtenTags.has(key)) {\n continue;\n }\n if (typeof value === 'string') {\n addCommentTag(key, value);\n }\n }\n }\n const listLengthBuffer = new Uint8Array(4);\n toDataView(listLengthBuffer).setUint32(0, writtenTags.size, true);\n commentHeaderParts.splice(2, 0, listLengthBuffer); // Insert after the header and vendor section\n // Merge all comment header parts into a single buffer\n const commentHeaderLength = commentHeaderParts.reduce((a, b) => a + b.length, 0);\n const commentHeader = new Uint8Array(commentHeaderLength);\n let pos = 0;\n for (const part of commentHeaderParts) {\n commentHeader.set(part, pos);\n pos += part.length;\n }\n return commentHeader;\n};\n// ============================================================================\n// AC-3 / E-AC-3 Parsing\n// Reference: ETSI TS 102 366 V1.4.1\n// ============================================================================\n/**\n * Channel counts indexed by acmod (Table 4.3).\n * Does NOT include LFE - add lfeon to get total channel count.\n */\nexport const AC3_ACMOD_CHANNEL_COUNTS = [2, 1, 2, 3, 3, 4, 4, 5];\n/**\n * Parse an AC-3 syncframe to extract BSI (Bit Stream Information) fields.\n * Section 4.3\n */\nexport const parseAc3SyncFrame = (data) => {\n if (data.length < 7) {\n return null;\n }\n // Check sync word (0x0B77)\n if (data[0] !== 0x0B || data[1] !== 0x77) {\n return null;\n }\n const bitstream = new Bitstream(data);\n bitstream.skipBits(16); // sync word\n bitstream.skipBits(16); // crc1\n const fscod = bitstream.readBits(2);\n if (fscod === 3) {\n return null; // Reserved, invalid\n }\n const frmsizecod = bitstream.readBits(6);\n const bsid = bitstream.readBits(5);\n // Verify this is AC-3\n if (bsid > 8) {\n return null;\n }\n const bsmod = bitstream.readBits(3);\n const acmod = bitstream.readBits(3);\n // Skip cmixlev (center downmix level) if three front channels are in use (L, C, R).\n if ((acmod & 0x1) !== 0 && acmod !== 0x1) {\n bitstream.skipBits(2);\n }\n // Skip surmixlev (surround downmix level) if surround channels are in use.\n if ((acmod & 0x4) !== 0) {\n bitstream.skipBits(2);\n }\n // Skip dsurmod if stereo (acmod === 2)\n if (acmod === 0x2) {\n bitstream.skipBits(2);\n }\n const lfeon = bitstream.readBits(1);\n const bitRateCode = Math.floor(frmsizecod / 2);\n return { fscod, bsid, bsmod, acmod, lfeon, bitRateCode };\n};\n/**\n * AC-3 frame sizes in bytes, indexed by [3 * frmsizecod + fscod].\n * fscod: 0=48kHz, 1=44.1kHz, 2=32kHz\n * Values are 16-bit words * 2 (to convert to bytes).\n * Table 4.13\n */\nexport const AC3_FRAME_SIZES = [\n // frmsizecod, [48kHz, 44.1kHz, 32kHz] in bytes\n 64 * 2, 69 * 2, 96 * 2,\n 64 * 2, 70 * 2, 96 * 2,\n 80 * 2, 87 * 2, 120 * 2,\n 80 * 2, 88 * 2, 120 * 2,\n 96 * 2, 104 * 2, 144 * 2,\n 96 * 2, 105 * 2, 144 * 2,\n 112 * 2, 121 * 2, 168 * 2,\n 112 * 2, 122 * 2, 168 * 2,\n 128 * 2, 139 * 2, 192 * 2,\n 128 * 2, 140 * 2, 192 * 2,\n 160 * 2, 174 * 2, 240 * 2,\n 160 * 2, 175 * 2, 240 * 2,\n 192 * 2, 208 * 2, 288 * 2,\n 192 * 2, 209 * 2, 288 * 2,\n 224 * 2, 243 * 2, 336 * 2,\n 224 * 2, 244 * 2, 336 * 2,\n 256 * 2, 278 * 2, 384 * 2,\n 256 * 2, 279 * 2, 384 * 2,\n 320 * 2, 348 * 2, 480 * 2,\n 320 * 2, 349 * 2, 480 * 2,\n 384 * 2, 417 * 2, 576 * 2,\n 384 * 2, 418 * 2, 576 * 2,\n 448 * 2, 487 * 2, 672 * 2,\n 448 * 2, 488 * 2, 672 * 2,\n 512 * 2, 557 * 2, 768 * 2,\n 512 * 2, 558 * 2, 768 * 2,\n 640 * 2, 696 * 2, 960 * 2,\n 640 * 2, 697 * 2, 960 * 2,\n 768 * 2, 835 * 2, 1152 * 2,\n 768 * 2, 836 * 2, 1152 * 2,\n 896 * 2, 975 * 2, 1344 * 2,\n 896 * 2, 976 * 2, 1344 * 2,\n 1024 * 2, 1114 * 2, 1536 * 2,\n 1024 * 2, 1115 * 2, 1536 * 2,\n 1152 * 2, 1253 * 2, 1728 * 2,\n 1152 * 2, 1254 * 2, 1728 * 2,\n 1280 * 2, 1393 * 2, 1920 * 2,\n 1280 * 2, 1394 * 2, 1920 * 2,\n];\n/** Number of samples per AC-3 syncframe (always 1536) */\nexport const AC3_SAMPLES_PER_FRAME = 1536;\n/**\n * AC-3 registration_descriptor for MPEG-TS.\n * Section A.2.3\n */\nexport const AC3_REGISTRATION_DESCRIPTOR = new Uint8Array([0x05, 0x04, 0x41, 0x43, 0x2d, 0x33]);\n/** E-AC-3 registration_descriptor for MPEG-TS/ */\nexport const EAC3_REGISTRATION_DESCRIPTOR = new Uint8Array([0x05, 0x04, 0x45, 0x41, 0x43, 0x33]);\n/** Number of audio blocks per syncframe, indexed by numblkscod */\nexport const EAC3_NUMBLKS_TABLE = [1, 2, 3, 6];\n/**\n * Parse an E-AC-3 syncframe to extract BSI fields.\n * Section E.1.2\n */\nexport const parseEac3SyncFrame = (data) => {\n if (data.length < 6) {\n return null;\n }\n // Check sync word (0x0B77)\n if (data[0] !== 0x0B || data[1] !== 0x77) {\n return null;\n }\n const bitstream = new Bitstream(data);\n bitstream.skipBits(16); // sync word\n const strmtyp = bitstream.readBits(2);\n bitstream.skipBits(3); // substreamid\n // Only parse independent substreams (strmtyp 0 or 2)\n if (strmtyp !== 0 && strmtyp !== 2) {\n return null;\n }\n const frmsiz = bitstream.readBits(11);\n const fscod = bitstream.readBits(2);\n let fscod2 = 0;\n let numblkscod;\n if (fscod === 3) {\n // fscod2 enables reduced sample rates (24/22.05/16 kHz) per ATSC A/52:2018\n fscod2 = bitstream.readBits(2);\n numblkscod = 3; // Implicitly 6 blocks when fscod=3\n }\n else {\n numblkscod = bitstream.readBits(2);\n }\n const acmod = bitstream.readBits(3);\n const lfeon = bitstream.readBits(1);\n const bsid = bitstream.readBits(5);\n // Verify this is E-AC-3\n if (bsid < 11 || bsid > 16) {\n return null;\n }\n // Calculate data rate: ((frmsiz + 1) * fs) / (numblks * 16)\n const numblks = EAC3_NUMBLKS_TABLE[numblkscod];\n let fs;\n if (fscod < 3) {\n fs = AC3_SAMPLE_RATES[fscod] / 1000;\n }\n else {\n fs = EAC3_REDUCED_SAMPLE_RATES[fscod2] / 1000;\n }\n const dataRate = Math.round(((frmsiz + 1) * fs) / (numblks * 16));\n // These fields require parsing beyond the first frame.\n // Defaults are correct for almost all content.\n const bsmod = 0;\n const numDepSub = 0;\n const chanLoc = 0;\n const substream = {\n fscod,\n fscod2,\n bsid,\n bsmod,\n acmod,\n lfeon,\n numDepSub,\n chanLoc,\n };\n return {\n dataRate,\n substreams: [substream],\n };\n};\n/**\n * Parse a dec3 box to extract E-AC-3 parameters.\n * Section F.6\n */\nexport const parseEac3Config = (data) => {\n if (data.length < 2) {\n return null;\n }\n const bitstream = new Bitstream(data);\n const dataRate = bitstream.readBits(13);\n const numIndSub = bitstream.readBits(3);\n const substreams = [];\n for (let i = 0; i <= numIndSub; i++) {\n // Check we have enough data for this substream\n // Each substream needs at least 24 bits (3 bytes) without dependent subs\n if (Math.ceil(bitstream.pos / 8) + 3 > data.length) {\n break;\n }\n const fscod = bitstream.readBits(2);\n const bsid = bitstream.readBits(5);\n bitstream.skipBits(1); // reserved\n bitstream.skipBits(1); // asvc\n const bsmod = bitstream.readBits(3);\n const acmod = bitstream.readBits(3);\n const lfeon = bitstream.readBits(1);\n bitstream.skipBits(3); // reserved\n const numDepSub = bitstream.readBits(4);\n let chanLoc = 0;\n if (numDepSub > 0) {\n chanLoc = bitstream.readBits(9);\n }\n else {\n bitstream.skipBits(1); // reserved\n }\n substreams.push({\n fscod,\n fscod2: null,\n bsid,\n bsmod,\n acmod,\n lfeon,\n numDepSub,\n chanLoc,\n });\n }\n if (substreams.length === 0) {\n return null;\n }\n return { dataRate, substreams };\n};\n/**\n * Get sample rate from E-AC-3 config.\n * See ATSC A/52:2018 for handling fscod2.\n */\nexport const getEac3SampleRate = (config) => {\n const sub = config.substreams[0];\n assert(sub);\n if (sub.fscod < 3) {\n return AC3_SAMPLE_RATES[sub.fscod];\n }\n else if (sub.fscod2 !== null && sub.fscod2 < 3) {\n return EAC3_REDUCED_SAMPLE_RATES[sub.fscod2];\n }\n return null;\n};\n/**\n * Get channel count from E-AC-3 config (first independent substream only).\n */\nexport const getEac3ChannelCount = (config) => {\n const sub = config.substreams[0];\n assert(sub);\n let channels = AC3_ACMOD_CHANNEL_COUNTS[sub.acmod] + sub.lfeon;\n // Add channels from dependent substreams\n if (sub.numDepSub > 0) {\n const CHAN_LOC_COUNTS = [2, 2, 1, 1, 2, 2, 2, 1, 1];\n for (let bit = 0; bit < 9; bit++) {\n if (sub.chanLoc & (1 << (8 - bit))) {\n channels += CHAN_LOC_COUNTS[bit];\n }\n }\n }\n return channels;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { parseAacAudioSpecificConfig } from '../shared/aac-misc.js';\nimport { COLOR_PRIMARIES_MAP, MATRIX_COEFFICIENTS_MAP, TRANSFER_CHARACTERISTICS_MAP, assert, bytesToHexString, isAllowSharedBufferSource, last, reverseBitsU32, toDataView, } from './misc.js';\n/**\n * List of known video codecs, ordered by encoding preference.\n * @group Codecs\n * @public\n */\nexport const VIDEO_CODECS = [\n 'avc',\n 'hevc',\n 'vp9',\n 'av1',\n 'vp8',\n];\n/**\n * List of known PCM (uncompressed) audio codecs, ordered by encoding preference.\n * @group Codecs\n * @public\n */\nexport const PCM_AUDIO_CODECS = [\n 'pcm-s16', // We don't prefix 'le' so we're compatible with the WebCodecs-registered PCM codec strings\n 'pcm-s16be',\n 'pcm-s24',\n 'pcm-s24be',\n 'pcm-s32',\n 'pcm-s32be',\n 'pcm-f32',\n 'pcm-f32be',\n 'pcm-f64',\n 'pcm-f64be',\n 'pcm-u8',\n 'pcm-s8',\n 'ulaw',\n 'alaw',\n];\n/**\n * List of known compressed audio codecs, ordered by encoding preference.\n * @group Codecs\n * @public\n */\nexport const NON_PCM_AUDIO_CODECS = [\n 'aac',\n 'opus',\n 'mp3',\n 'vorbis',\n 'flac',\n 'ac3',\n 'eac3',\n];\n/**\n * List of known audio codecs, ordered by encoding preference.\n * @group Codecs\n * @public\n */\nexport const AUDIO_CODECS = [\n ...NON_PCM_AUDIO_CODECS,\n ...PCM_AUDIO_CODECS,\n];\n/**\n * List of known subtitle codecs, ordered by encoding preference.\n * @group Codecs\n * @public\n */\nexport const SUBTITLE_CODECS = [\n 'webvtt',\n]; // TODO add the rest\n// https://en.wikipedia.org/wiki/Advanced_Video_Coding\nexport const AVC_LEVEL_TABLE = [\n { maxMacroblocks: 99, maxBitrate: 64000, maxDpbMbs: 396, level: 0x0A }, // Level 1\n { maxMacroblocks: 396, maxBitrate: 192000, maxDpbMbs: 900, level: 0x0B }, // Level 1.1\n { maxMacroblocks: 396, maxBitrate: 384000, maxDpbMbs: 2376, level: 0x0C }, // Level 1.2\n { maxMacroblocks: 396, maxBitrate: 768000, maxDpbMbs: 2376, level: 0x0D }, // Level 1.3\n { maxMacroblocks: 396, maxBitrate: 2000000, maxDpbMbs: 2376, level: 0x14 }, // Level 2\n { maxMacroblocks: 792, maxBitrate: 4000000, maxDpbMbs: 4752, level: 0x15 }, // Level 2.1\n { maxMacroblocks: 1620, maxBitrate: 4000000, maxDpbMbs: 8100, level: 0x16 }, // Level 2.2\n { maxMacroblocks: 1620, maxBitrate: 10000000, maxDpbMbs: 8100, level: 0x1E }, // Level 3\n { maxMacroblocks: 3600, maxBitrate: 14000000, maxDpbMbs: 18000, level: 0x1F }, // Level 3.1\n { maxMacroblocks: 5120, maxBitrate: 20000000, maxDpbMbs: 20480, level: 0x20 }, // Level 3.2\n { maxMacroblocks: 8192, maxBitrate: 20000000, maxDpbMbs: 32768, level: 0x28 }, // Level 4\n { maxMacroblocks: 8192, maxBitrate: 50000000, maxDpbMbs: 32768, level: 0x29 }, // Level 4.1\n { maxMacroblocks: 8704, maxBitrate: 50000000, maxDpbMbs: 34816, level: 0x2A }, // Level 4.2\n { maxMacroblocks: 22080, maxBitrate: 135000000, maxDpbMbs: 110400, level: 0x32 }, // Level 5\n { maxMacroblocks: 36864, maxBitrate: 240000000, maxDpbMbs: 184320, level: 0x33 }, // Level 5.1\n { maxMacroblocks: 36864, maxBitrate: 240000000, maxDpbMbs: 184320, level: 0x34 }, // Level 5.2\n { maxMacroblocks: 139264, maxBitrate: 240000000, maxDpbMbs: 696320, level: 0x3C }, // Level 6\n { maxMacroblocks: 139264, maxBitrate: 480000000, maxDpbMbs: 696320, level: 0x3D }, // Level 6.1\n { maxMacroblocks: 139264, maxBitrate: 800000000, maxDpbMbs: 696320, level: 0x3E }, // Level 6.2\n];\n// https://en.wikipedia.org/wiki/High_Efficiency_Video_Coding\nconst HEVC_LEVEL_TABLE = [\n { maxPictureSize: 36864, maxBitrate: 128000, tier: 'L', level: 30 }, // Level 1 (Low Tier)\n { maxPictureSize: 122880, maxBitrate: 1500000, tier: 'L', level: 60 }, // Level 2 (Low Tier)\n { maxPictureSize: 245760, maxBitrate: 3000000, tier: 'L', level: 63 }, // Level 2.1 (Low Tier)\n { maxPictureSize: 552960, maxBitrate: 6000000, tier: 'L', level: 90 }, // Level 3 (Low Tier)\n { maxPictureSize: 983040, maxBitrate: 10000000, tier: 'L', level: 93 }, // Level 3.1 (Low Tier)\n { maxPictureSize: 2228224, maxBitrate: 12000000, tier: 'L', level: 120 }, // Level 4 (Low Tier)\n { maxPictureSize: 2228224, maxBitrate: 30000000, tier: 'H', level: 120 }, // Level 4 (High Tier)\n { maxPictureSize: 2228224, maxBitrate: 20000000, tier: 'L', level: 123 }, // Level 4.1 (Low Tier)\n { maxPictureSize: 2228224, maxBitrate: 50000000, tier: 'H', level: 123 }, // Level 4.1 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 25000000, tier: 'L', level: 150 }, // Level 5 (Low Tier)\n { maxPictureSize: 8912896, maxBitrate: 100000000, tier: 'H', level: 150 }, // Level 5 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 40000000, tier: 'L', level: 153 }, // Level 5.1 (Low Tier)\n { maxPictureSize: 8912896, maxBitrate: 160000000, tier: 'H', level: 153 }, // Level 5.1 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 60000000, tier: 'L', level: 156 }, // Level 5.2 (Low Tier)\n { maxPictureSize: 8912896, maxBitrate: 240000000, tier: 'H', level: 156 }, // Level 5.2 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 60000000, tier: 'L', level: 180 }, // Level 6 (Low Tier)\n { maxPictureSize: 35651584, maxBitrate: 240000000, tier: 'H', level: 180 }, // Level 6 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 120000000, tier: 'L', level: 183 }, // Level 6.1 (Low Tier)\n { maxPictureSize: 35651584, maxBitrate: 480000000, tier: 'H', level: 183 }, // Level 6.1 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 240000000, tier: 'L', level: 186 }, // Level 6.2 (Low Tier)\n { maxPictureSize: 35651584, maxBitrate: 800000000, tier: 'H', level: 186 }, // Level 6.2 (High Tier)\n];\n// https://en.wikipedia.org/wiki/VP9\nexport const VP9_LEVEL_TABLE = [\n { maxPictureSize: 36864, maxBitrate: 200000, level: 10 }, // Level 1\n { maxPictureSize: 73728, maxBitrate: 800000, level: 11 }, // Level 1.1\n { maxPictureSize: 122880, maxBitrate: 1800000, level: 20 }, // Level 2\n { maxPictureSize: 245760, maxBitrate: 3600000, level: 21 }, // Level 2.1\n { maxPictureSize: 552960, maxBitrate: 7200000, level: 30 }, // Level 3\n { maxPictureSize: 983040, maxBitrate: 12000000, level: 31 }, // Level 3.1\n { maxPictureSize: 2228224, maxBitrate: 18000000, level: 40 }, // Level 4\n { maxPictureSize: 2228224, maxBitrate: 30000000, level: 41 }, // Level 4.1\n { maxPictureSize: 8912896, maxBitrate: 60000000, level: 50 }, // Level 5\n { maxPictureSize: 8912896, maxBitrate: 120000000, level: 51 }, // Level 5.1\n { maxPictureSize: 8912896, maxBitrate: 180000000, level: 52 }, // Level 5.2\n { maxPictureSize: 35651584, maxBitrate: 180000000, level: 60 }, // Level 6\n { maxPictureSize: 35651584, maxBitrate: 240000000, level: 61 }, // Level 6.1\n { maxPictureSize: 35651584, maxBitrate: 480000000, level: 62 }, // Level 6.2\n];\n// https://en.wikipedia.org/wiki/AV1\nconst AV1_LEVEL_TABLE = [\n { maxPictureSize: 147456, maxBitrate: 1500000, tier: 'M', level: 0 }, // Level 2.0 (Main Tier)\n { maxPictureSize: 278784, maxBitrate: 3000000, tier: 'M', level: 1 }, // Level 2.1 (Main Tier)\n { maxPictureSize: 665856, maxBitrate: 6000000, tier: 'M', level: 4 }, // Level 3.0 (Main Tier)\n { maxPictureSize: 1065024, maxBitrate: 10000000, tier: 'M', level: 5 }, // Level 3.1 (Main Tier)\n { maxPictureSize: 2359296, maxBitrate: 12000000, tier: 'M', level: 8 }, // Level 4.0 (Main Tier)\n { maxPictureSize: 2359296, maxBitrate: 30000000, tier: 'H', level: 8 }, // Level 4.0 (High Tier)\n { maxPictureSize: 2359296, maxBitrate: 20000000, tier: 'M', level: 9 }, // Level 4.1 (Main Tier)\n { maxPictureSize: 2359296, maxBitrate: 50000000, tier: 'H', level: 9 }, // Level 4.1 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 30000000, tier: 'M', level: 12 }, // Level 5.0 (Main Tier)\n { maxPictureSize: 8912896, maxBitrate: 100000000, tier: 'H', level: 12 }, // Level 5.0 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 40000000, tier: 'M', level: 13 }, // Level 5.1 (Main Tier)\n { maxPictureSize: 8912896, maxBitrate: 160000000, tier: 'H', level: 13 }, // Level 5.1 (High Tier)\n { maxPictureSize: 8912896, maxBitrate: 60000000, tier: 'M', level: 14 }, // Level 5.2 (Main Tier)\n { maxPictureSize: 8912896, maxBitrate: 240000000, tier: 'H', level: 14 }, // Level 5.2 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 60000000, tier: 'M', level: 15 }, // Level 5.3 (Main Tier)\n { maxPictureSize: 35651584, maxBitrate: 240000000, tier: 'H', level: 15 }, // Level 5.3 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 60000000, tier: 'M', level: 16 }, // Level 6.0 (Main Tier)\n { maxPictureSize: 35651584, maxBitrate: 240000000, tier: 'H', level: 16 }, // Level 6.0 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 100000000, tier: 'M', level: 17 }, // Level 6.1 (Main Tier)\n { maxPictureSize: 35651584, maxBitrate: 480000000, tier: 'H', level: 17 }, // Level 6.1 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 160000000, tier: 'M', level: 18 }, // Level 6.2 (Main Tier)\n { maxPictureSize: 35651584, maxBitrate: 800000000, tier: 'H', level: 18 }, // Level 6.2 (High Tier)\n { maxPictureSize: 35651584, maxBitrate: 160000000, tier: 'M', level: 19 }, // Level 6.3 (Main Tier)\n { maxPictureSize: 35651584, maxBitrate: 800000000, tier: 'H', level: 19 }, // Level 6.3 (High Tier)\n];\nconst VP9_DEFAULT_SUFFIX = '.01.01.01.01.00';\nconst AV1_DEFAULT_SUFFIX = '.0.110.01.01.01.0';\nexport const buildVideoCodecString = (codec, width, height, bitrate) => {\n if (codec === 'avc') {\n const profileIndication = 0x64; // High Profile\n const totalMacroblocks = Math.ceil(width / 16) * Math.ceil(height / 16);\n // Determine the level based on the table\n const levelInfo = AVC_LEVEL_TABLE.find(level => totalMacroblocks <= level.maxMacroblocks && bitrate <= level.maxBitrate) ?? last(AVC_LEVEL_TABLE);\n const levelIndication = levelInfo ? levelInfo.level : 0;\n const hexProfileIndication = profileIndication.toString(16).padStart(2, '0');\n const hexProfileCompatibility = '00';\n const hexLevelIndication = levelIndication.toString(16).padStart(2, '0');\n return `avc1.${hexProfileIndication}${hexProfileCompatibility}${hexLevelIndication}`;\n }\n else if (codec === 'hevc') {\n const profilePrefix = ''; // Profile space 0\n const profileIdc = 1; // Main Profile\n const compatibilityFlags = '6'; // Taken from the example in ISO 14496-15\n const pictureSize = width * height;\n const levelInfo = HEVC_LEVEL_TABLE.find(level => pictureSize <= level.maxPictureSize && bitrate <= level.maxBitrate) ?? last(HEVC_LEVEL_TABLE);\n const constraintFlags = 'B0'; // Progressive source flag\n return 'hev1.'\n + `${profilePrefix}${profileIdc}.`\n + `${compatibilityFlags}.`\n + `${levelInfo.tier}${levelInfo.level}.`\n + `${constraintFlags}`;\n }\n else if (codec === 'vp8') {\n return 'vp8'; // Easy, this one\n }\n else if (codec === 'vp9') {\n const profile = '00'; // Profile 0\n const pictureSize = width * height;\n const levelInfo = VP9_LEVEL_TABLE.find(level => pictureSize <= level.maxPictureSize && bitrate <= level.maxBitrate) ?? last(VP9_LEVEL_TABLE);\n const bitDepth = '08'; // 8-bit\n return `vp09.${profile}.${levelInfo.level.toString().padStart(2, '0')}.${bitDepth}`;\n }\n else if (codec === 'av1') {\n const profile = 0; // Main Profile, single digit\n const pictureSize = width * height;\n const levelInfo = AV1_LEVEL_TABLE.find(level => pictureSize <= level.maxPictureSize && bitrate <= level.maxBitrate) ?? last(AV1_LEVEL_TABLE);\n const level = levelInfo.level.toString().padStart(2, '0');\n const bitDepth = '08'; // 8-bit\n return `av01.${profile}.${level}${levelInfo.tier}.${bitDepth}`;\n }\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw new TypeError(`Unhandled codec '${codec}'.`);\n};\nexport const generateVp9CodecConfigurationFromCodecString = (codecString) => {\n // Reference: https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate\n const parts = codecString.split('.'); // We can derive the required values from the codec string\n const profile = Number(parts[1]);\n const level = Number(parts[2]);\n const bitDepth = Number(parts[3]);\n const chromaSubsampling = parts[4] ? Number(parts[4]) : 1;\n return [\n 1, 1, profile,\n 2, 1, level,\n 3, 1, bitDepth,\n 4, 1, chromaSubsampling,\n ];\n};\nexport const generateAv1CodecConfigurationFromCodecString = (codecString) => {\n // Reference: https://aomediacodec.github.io/av1-isobmff/\n const parts = codecString.split('.'); // We can derive the required values from the codec string\n const marker = 1;\n const version = 1;\n const firstByte = (marker << 7) + version;\n const profile = Number(parts[1]);\n const levelAndTier = parts[2];\n const level = Number(levelAndTier.slice(0, -1));\n const secondByte = (profile << 5) + level;\n const tier = levelAndTier.slice(-1) === 'H' ? 1 : 0;\n const bitDepth = Number(parts[3]);\n const highBitDepth = bitDepth === 8 ? 0 : 1;\n const twelveBit = 0;\n const monochrome = parts[4] ? Number(parts[4]) : 0;\n const chromaSubsamplingX = parts[5] ? Number(parts[5][0]) : 1;\n const chromaSubsamplingY = parts[5] ? Number(parts[5][1]) : 1;\n const chromaSamplePosition = parts[5] ? Number(parts[5][2]) : 0; // CSP_UNKNOWN\n const thirdByte = (tier << 7)\n + (highBitDepth << 6)\n + (twelveBit << 5)\n + (monochrome << 4)\n + (chromaSubsamplingX << 3)\n + (chromaSubsamplingY << 2)\n + chromaSamplePosition;\n const initialPresentationDelayPresent = 0; // Should be fine\n const fourthByte = initialPresentationDelayPresent;\n return [firstByte, secondByte, thirdByte, fourthByte];\n};\nexport const extractVideoCodecString = (trackInfo) => {\n const { codec, codecDescription, colorSpace, avcCodecInfo, hevcCodecInfo, vp9CodecInfo, av1CodecInfo } = trackInfo;\n if (codec === 'avc') {\n assert(trackInfo.avcType !== null);\n if (avcCodecInfo) {\n const bytes = new Uint8Array([\n avcCodecInfo.avcProfileIndication,\n avcCodecInfo.profileCompatibility,\n avcCodecInfo.avcLevelIndication,\n ]);\n return `avc${trackInfo.avcType}.${bytesToHexString(bytes)}`;\n }\n if (!codecDescription || codecDescription.byteLength < 4) {\n throw new TypeError('AVC decoder description is not provided or is not at least 4 bytes long.');\n }\n return `avc${trackInfo.avcType}.${bytesToHexString(codecDescription.subarray(1, 4))}`;\n }\n else if (codec === 'hevc') {\n let generalProfileSpace;\n let generalProfileIdc;\n let compatibilityFlags;\n let generalTierFlag;\n let generalLevelIdc;\n let constraintFlags;\n if (hevcCodecInfo) {\n generalProfileSpace = hevcCodecInfo.generalProfileSpace;\n generalProfileIdc = hevcCodecInfo.generalProfileIdc;\n compatibilityFlags = reverseBitsU32(hevcCodecInfo.generalProfileCompatibilityFlags);\n generalTierFlag = hevcCodecInfo.generalTierFlag;\n generalLevelIdc = hevcCodecInfo.generalLevelIdc;\n constraintFlags = [...hevcCodecInfo.generalConstraintIndicatorFlags];\n }\n else {\n if (!codecDescription || codecDescription.byteLength < 23) {\n throw new TypeError('HEVC decoder description is not provided or is not at least 23 bytes long.');\n }\n const view = toDataView(codecDescription);\n const profileByte = view.getUint8(1);\n generalProfileSpace = (profileByte >> 6) & 0x03;\n generalProfileIdc = profileByte & 0x1F;\n compatibilityFlags = reverseBitsU32(view.getUint32(2));\n generalTierFlag = (profileByte >> 5) & 0x01;\n generalLevelIdc = view.getUint8(12);\n constraintFlags = [];\n for (let i = 0; i < 6; i++) {\n constraintFlags.push(view.getUint8(6 + i));\n }\n }\n let codecString = 'hev1.';\n codecString += ['', 'A', 'B', 'C'][generalProfileSpace] + generalProfileIdc;\n codecString += '.';\n codecString += compatibilityFlags.toString(16).toUpperCase();\n codecString += '.';\n codecString += generalTierFlag === 0 ? 'L' : 'H';\n codecString += generalLevelIdc;\n while (constraintFlags.length > 0 && constraintFlags[constraintFlags.length - 1] === 0) {\n constraintFlags.pop();\n }\n if (constraintFlags.length > 0) {\n codecString += '.';\n codecString += constraintFlags.map(x => x.toString(16).toUpperCase()).join('.');\n }\n return codecString;\n }\n else if (codec === 'vp8') {\n return 'vp8'; // Easy, this one\n }\n else if (codec === 'vp9') {\n if (!vp9CodecInfo) {\n // Calculate level based on dimensions\n const pictureSize = trackInfo.width * trackInfo.height;\n let level = last(VP9_LEVEL_TABLE).level; // Default to highest level\n for (const entry of VP9_LEVEL_TABLE) {\n if (pictureSize <= entry.maxPictureSize) {\n level = entry.level;\n break;\n }\n }\n // We don't really know better, so let's return a general-purpose, common codec string and hope for the best\n return `vp09.00.${level.toString().padStart(2, '0')}.08`;\n }\n const profile = vp9CodecInfo.profile.toString().padStart(2, '0');\n const level = vp9CodecInfo.level.toString().padStart(2, '0');\n const bitDepth = vp9CodecInfo.bitDepth.toString().padStart(2, '0');\n const chromaSubsampling = vp9CodecInfo.chromaSubsampling.toString().padStart(2, '0');\n const colourPrimaries = vp9CodecInfo.colourPrimaries.toString().padStart(2, '0');\n const transferCharacteristics = vp9CodecInfo.transferCharacteristics.toString().padStart(2, '0');\n const matrixCoefficients = vp9CodecInfo.matrixCoefficients.toString().padStart(2, '0');\n const videoFullRangeFlag = vp9CodecInfo.videoFullRangeFlag.toString().padStart(2, '0');\n let string = `vp09.${profile}.${level}.${bitDepth}.${chromaSubsampling}`;\n string += `.${colourPrimaries}.${transferCharacteristics}.${matrixCoefficients}.${videoFullRangeFlag}`;\n if (string.endsWith(VP9_DEFAULT_SUFFIX)) {\n string = string.slice(0, -VP9_DEFAULT_SUFFIX.length);\n }\n return string;\n }\n else if (codec === 'av1') {\n if (!av1CodecInfo) {\n // Calculate level based on dimensions\n const pictureSize = trackInfo.width * trackInfo.height;\n let level = last(VP9_LEVEL_TABLE).level; // Default to highest level\n for (const entry of VP9_LEVEL_TABLE) {\n if (pictureSize <= entry.maxPictureSize) {\n level = entry.level;\n break;\n }\n }\n // We don't really know better, so let's return a general-purpose, common codec string and hope for the best\n return `av01.0.${level.toString().padStart(2, '0')}M.08`;\n }\n // https://aomediacodec.github.io/av1-isobmff/#codecsparam\n const profile = av1CodecInfo.profile; // Single digit\n const level = av1CodecInfo.level.toString().padStart(2, '0');\n const tier = av1CodecInfo.tier ? 'H' : 'M';\n const bitDepth = av1CodecInfo.bitDepth.toString().padStart(2, '0');\n const monochrome = av1CodecInfo.monochrome ? '1' : '0';\n const chromaSubsampling = 100 * av1CodecInfo.chromaSubsamplingX\n + 10 * av1CodecInfo.chromaSubsamplingY\n + 1 * (av1CodecInfo.chromaSubsamplingX && av1CodecInfo.chromaSubsamplingY\n ? av1CodecInfo.chromaSamplePosition\n : 0);\n // The defaults are 1 (ITU-R BT.709)\n const colorPrimaries = colorSpace?.primaries ? COLOR_PRIMARIES_MAP[colorSpace.primaries] : 1;\n const transferCharacteristics = colorSpace?.transfer ? TRANSFER_CHARACTERISTICS_MAP[colorSpace.transfer] : 1;\n const matrixCoefficients = colorSpace?.matrix ? MATRIX_COEFFICIENTS_MAP[colorSpace.matrix] : 1;\n const videoFullRangeFlag = colorSpace?.fullRange ? 1 : 0;\n let string = `av01.${profile}.${level}${tier}.${bitDepth}`;\n string += `.${monochrome}.${chromaSubsampling.toString().padStart(3, '0')}`;\n string += `.${colorPrimaries.toString().padStart(2, '0')}`;\n string += `.${transferCharacteristics.toString().padStart(2, '0')}`;\n string += `.${matrixCoefficients.toString().padStart(2, '0')}`;\n string += `.${videoFullRangeFlag}`;\n if (string.endsWith(AV1_DEFAULT_SUFFIX)) {\n string = string.slice(0, -AV1_DEFAULT_SUFFIX.length);\n }\n return string;\n }\n throw new TypeError(`Unhandled codec '${codec}'.`);\n};\nexport const buildAudioCodecString = (codec, numberOfChannels, sampleRate) => {\n if (codec === 'aac') {\n // If stereo or higher channels and lower sample rate, likely using HE-AAC v2 with PS\n if (numberOfChannels >= 2 && sampleRate <= 24000) {\n return 'mp4a.40.29'; // HE-AAC v2 (AAC LC + SBR + PS)\n }\n // If sample rate is low, likely using HE-AAC v1 with SBR\n if (sampleRate <= 24000) {\n return 'mp4a.40.5'; // HE-AAC v1 (AAC LC + SBR)\n }\n // Default to standard AAC-LC for higher sample rates\n return 'mp4a.40.2'; // AAC-LC\n }\n else if (codec === 'mp3') {\n return 'mp3';\n }\n else if (codec === 'opus') {\n return 'opus';\n }\n else if (codec === 'vorbis') {\n return 'vorbis';\n }\n else if (codec === 'flac') {\n return 'flac';\n }\n else if (codec === 'ac3') {\n return 'ac-3';\n }\n else if (codec === 'eac3') {\n return 'ec-3';\n }\n else if (PCM_AUDIO_CODECS.includes(codec)) {\n return codec;\n }\n throw new TypeError(`Unhandled codec '${codec}'.`);\n};\nexport const extractAudioCodecString = (trackInfo) => {\n const { codec, codecDescription, aacCodecInfo } = trackInfo;\n if (codec === 'aac') {\n if (!aacCodecInfo) {\n throw new TypeError('AAC codec info must be provided.');\n }\n if (aacCodecInfo.isMpeg2) {\n return 'mp4a.67';\n }\n else {\n let objectType;\n if (aacCodecInfo.objectType !== null) {\n objectType = aacCodecInfo.objectType;\n }\n else {\n const audioSpecificConfig = parseAacAudioSpecificConfig(codecDescription);\n objectType = audioSpecificConfig.objectType;\n }\n return `mp4a.40.${objectType}`;\n }\n }\n else if (codec === 'mp3') {\n return 'mp3';\n }\n else if (codec === 'opus') {\n return 'opus';\n }\n else if (codec === 'vorbis') {\n return 'vorbis';\n }\n else if (codec === 'flac') {\n return 'flac';\n }\n else if (codec === 'ac3') {\n return 'ac-3';\n }\n else if (codec === 'eac3') {\n return 'ec-3';\n }\n else if (codec && PCM_AUDIO_CODECS.includes(codec)) {\n return codec;\n }\n throw new TypeError(`Unhandled codec '${codec}'.`);\n};\nexport const OPUS_SAMPLE_RATE = 48_000;\nconst PCM_CODEC_REGEX = /^pcm-([usf])(\\d+)+(be)?$/;\nexport const parsePcmCodec = (codec) => {\n assert(PCM_AUDIO_CODECS.includes(codec));\n if (codec === 'ulaw') {\n return { dataType: 'ulaw', sampleSize: 1, littleEndian: true, silentValue: 255 };\n }\n else if (codec === 'alaw') {\n return { dataType: 'alaw', sampleSize: 1, littleEndian: true, silentValue: 213 };\n }\n const match = PCM_CODEC_REGEX.exec(codec);\n assert(match);\n let dataType;\n if (match[1] === 'u') {\n dataType = 'unsigned';\n }\n else if (match[1] === 's') {\n dataType = 'signed';\n }\n else {\n dataType = 'float';\n }\n const sampleSize = (Number(match[2]) / 8);\n const littleEndian = match[3] !== 'be';\n const silentValue = codec === 'pcm-u8' ? 2 ** 7 : 0;\n return { dataType, sampleSize, littleEndian, silentValue };\n};\nexport const inferCodecFromCodecString = (codecString) => {\n // Video codecs\n if (codecString.startsWith('avc1') || codecString.startsWith('avc3')) {\n return 'avc';\n }\n else if (codecString.startsWith('hev1') || codecString.startsWith('hvc1')) {\n return 'hevc';\n }\n else if (codecString === 'vp8') {\n return 'vp8';\n }\n else if (codecString.startsWith('vp09')) {\n return 'vp9';\n }\n else if (codecString.startsWith('av01')) {\n return 'av1';\n }\n // Audio codecs\n if (codecString.startsWith('mp4a.40') || codecString === 'mp4a.67') {\n return 'aac';\n }\n else if (codecString === 'mp3'\n || codecString === 'mp4a.69'\n || codecString === 'mp4a.6B'\n || codecString === 'mp4a.6b') {\n return 'mp3';\n }\n else if (codecString === 'opus') {\n return 'opus';\n }\n else if (codecString === 'vorbis') {\n return 'vorbis';\n }\n else if (codecString === 'flac') {\n return 'flac';\n }\n else if (codecString === 'ac-3' || codecString === 'ac3') {\n return 'ac3';\n }\n else if (codecString === 'ec-3' || codecString === 'eac3') {\n return 'eac3';\n }\n else if (codecString === 'ulaw') {\n return 'ulaw';\n }\n else if (codecString === 'alaw') {\n return 'alaw';\n }\n else if (PCM_CODEC_REGEX.test(codecString)) {\n return codecString;\n }\n // Subtitle codecs\n if (codecString === 'webvtt') {\n return 'webvtt';\n }\n return null;\n};\nexport const getVideoEncoderConfigExtension = (codec) => {\n if (codec === 'avc') {\n return {\n avc: {\n format: 'avc', // Ensure the format is not Annex B\n },\n };\n }\n else if (codec === 'hevc') {\n return {\n hevc: {\n format: 'hevc', // Ensure the format is not Annex B\n },\n };\n }\n return {};\n};\nexport const getAudioEncoderConfigExtension = (codec) => {\n if (codec === 'aac') {\n return {\n aac: {\n format: 'aac', // Ensure the format is not ADTS\n },\n };\n }\n else if (codec === 'opus') {\n return {\n opus: {\n format: 'opus',\n },\n };\n }\n return {};\n};\nconst VALID_VIDEO_CODEC_STRING_PREFIXES = ['avc1', 'avc3', 'hev1', 'hvc1', 'vp8', 'vp09', 'av01'];\nconst AVC_CODEC_STRING_REGEX = /^(avc1|avc3)\\.[0-9a-fA-F]{6}$/;\nconst HEVC_CODEC_STRING_REGEX = /^(hev1|hvc1)\\.(?:[ABC]?\\d+)\\.[0-9a-fA-F]{1,8}\\.[LH]\\d+(?:\\.[0-9a-fA-F]{1,2}){0,6}$/;\nconst VP9_CODEC_STRING_REGEX = /^vp09(?:\\.\\d{2}){3}(?:(?:\\.\\d{2}){5})?$/;\nconst AV1_CODEC_STRING_REGEX = /^av01\\.\\d\\.\\d{2}[MH]\\.\\d{2}(?:\\.\\d\\.\\d{3}\\.\\d{2}\\.\\d{2}\\.\\d{2}\\.\\d)?$/;\nexport const validateVideoChunkMetadata = (metadata) => {\n if (!metadata) {\n throw new TypeError('Video chunk metadata must be provided.');\n }\n if (typeof metadata !== 'object') {\n throw new TypeError('Video chunk metadata must be an object.');\n }\n if (!metadata.decoderConfig) {\n throw new TypeError('Video chunk metadata must include a decoder configuration.');\n }\n if (typeof metadata.decoderConfig !== 'object') {\n throw new TypeError('Video chunk metadata decoder configuration must be an object.');\n }\n if (typeof metadata.decoderConfig.codec !== 'string') {\n throw new TypeError('Video chunk metadata decoder configuration must specify a codec string.');\n }\n if (!VALID_VIDEO_CODEC_STRING_PREFIXES.some(prefix => metadata.decoderConfig.codec.startsWith(prefix))) {\n throw new TypeError('Video chunk metadata decoder configuration codec string must be a valid video codec string as specified in'\n + ' the Mediabunny Codec Registry.');\n }\n if (!Number.isInteger(metadata.decoderConfig.codedWidth) || metadata.decoderConfig.codedWidth <= 0) {\n throw new TypeError('Video chunk metadata decoder configuration must specify a valid codedWidth (positive integer).');\n }\n if (!Number.isInteger(metadata.decoderConfig.codedHeight) || metadata.decoderConfig.codedHeight <= 0) {\n throw new TypeError('Video chunk metadata decoder configuration must specify a valid codedHeight (positive integer).');\n }\n if (metadata.decoderConfig.description !== undefined) {\n if (!isAllowSharedBufferSource(metadata.decoderConfig.description)) {\n throw new TypeError('Video chunk metadata decoder configuration description, when defined, must be an ArrayBuffer or an'\n + ' ArrayBuffer view.');\n }\n }\n if (metadata.decoderConfig.colorSpace !== undefined) {\n const { colorSpace } = metadata.decoderConfig;\n if (typeof colorSpace !== 'object') {\n throw new TypeError('Video chunk metadata decoder configuration colorSpace, when provided, must be an object.');\n }\n const primariesValues = Object.keys(COLOR_PRIMARIES_MAP);\n if (colorSpace.primaries != null && !primariesValues.includes(colorSpace.primaries)) {\n throw new TypeError(`Video chunk metadata decoder configuration colorSpace primaries, when defined, must be one of`\n + ` ${primariesValues.join(', ')}.`);\n }\n const transferValues = Object.keys(TRANSFER_CHARACTERISTICS_MAP);\n if (colorSpace.transfer != null && !transferValues.includes(colorSpace.transfer)) {\n throw new TypeError(`Video chunk metadata decoder configuration colorSpace transfer, when defined, must be one of`\n + ` ${transferValues.join(', ')}.`);\n }\n const matrixValues = Object.keys(MATRIX_COEFFICIENTS_MAP);\n if (colorSpace.matrix != null && !matrixValues.includes(colorSpace.matrix)) {\n throw new TypeError(`Video chunk metadata decoder configuration colorSpace matrix, when defined, must be one of`\n + ` ${matrixValues.join(', ')}.`);\n }\n if (colorSpace.fullRange != null && typeof colorSpace.fullRange !== 'boolean') {\n throw new TypeError('Video chunk metadata decoder configuration colorSpace fullRange, when defined, must be a boolean.');\n }\n }\n if (metadata.decoderConfig.codec.startsWith('avc1') || metadata.decoderConfig.codec.startsWith('avc3')) {\n // AVC-specific validation\n if (!AVC_CODEC_STRING_REGEX.test(metadata.decoderConfig.codec)) {\n throw new TypeError('Video chunk metadata decoder configuration codec string for AVC must be a valid AVC codec string as'\n + ' specified in Section 3.4 of RFC 6381.');\n }\n // `description` may or may not be set, depending on if the format is AVCC or Annex B, so don't perform any\n // validation for it.\n // https://www.w3.org/TR/webcodecs-avc-codec-registration\n }\n else if (metadata.decoderConfig.codec.startsWith('hev1') || metadata.decoderConfig.codec.startsWith('hvc1')) {\n // HEVC-specific validation\n if (!HEVC_CODEC_STRING_REGEX.test(metadata.decoderConfig.codec)) {\n throw new TypeError('Video chunk metadata decoder configuration codec string for HEVC must be a valid HEVC codec string as'\n + ' specified in Section E.3 of ISO 14496-15.');\n }\n // `description` may or may not be set, depending on if the format is HEVC or Annex B, so don't perform any\n // validation for it.\n // https://www.w3.org/TR/webcodecs-hevc-codec-registration\n }\n else if (metadata.decoderConfig.codec.startsWith('vp8')) {\n // VP8-specific validation\n if (metadata.decoderConfig.codec !== 'vp8') {\n throw new TypeError('Video chunk metadata decoder configuration codec string for VP8 must be \"vp8\".');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('vp09')) {\n // VP9-specific validation\n if (!VP9_CODEC_STRING_REGEX.test(metadata.decoderConfig.codec)) {\n throw new TypeError('Video chunk metadata decoder configuration codec string for VP9 must be a valid VP9 codec string as'\n + ' specified in Section \"Codecs Parameter String\" of https://www.webmproject.org/vp9/mp4/.');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('av01')) {\n // AV1-specific validation\n if (!AV1_CODEC_STRING_REGEX.test(metadata.decoderConfig.codec)) {\n throw new TypeError('Video chunk metadata decoder configuration codec string for AV1 must be a valid AV1 codec string as'\n + ' specified in Section \"Codecs Parameter String\" of https://aomediacodec.github.io/av1-isobmff/.');\n }\n }\n};\nconst VALID_AUDIO_CODEC_STRING_PREFIXES = [\n 'mp4a', 'mp3', 'opus', 'vorbis', 'flac', 'ulaw', 'alaw', 'pcm', 'ac-3', 'ec-3',\n];\nexport const validateAudioChunkMetadata = (metadata) => {\n if (!metadata) {\n throw new TypeError('Audio chunk metadata must be provided.');\n }\n if (typeof metadata !== 'object') {\n throw new TypeError('Audio chunk metadata must be an object.');\n }\n if (!metadata.decoderConfig) {\n throw new TypeError('Audio chunk metadata must include a decoder configuration.');\n }\n if (typeof metadata.decoderConfig !== 'object') {\n throw new TypeError('Audio chunk metadata decoder configuration must be an object.');\n }\n if (typeof metadata.decoderConfig.codec !== 'string') {\n throw new TypeError('Audio chunk metadata decoder configuration must specify a codec string.');\n }\n if (!VALID_AUDIO_CODEC_STRING_PREFIXES.some(prefix => metadata.decoderConfig.codec.startsWith(prefix))) {\n throw new TypeError('Audio chunk metadata decoder configuration codec string must be a valid audio codec string as specified in'\n + ' the Mediabunny Codec Registry.');\n }\n if (!Number.isInteger(metadata.decoderConfig.sampleRate) || metadata.decoderConfig.sampleRate <= 0) {\n throw new TypeError('Audio chunk metadata decoder configuration must specify a valid sampleRate (positive integer).');\n }\n if (!Number.isInteger(metadata.decoderConfig.numberOfChannels) || metadata.decoderConfig.numberOfChannels <= 0) {\n throw new TypeError('Audio chunk metadata decoder configuration must specify a valid numberOfChannels (positive integer).');\n }\n if (metadata.decoderConfig.description !== undefined) {\n if (!isAllowSharedBufferSource(metadata.decoderConfig.description)) {\n throw new TypeError('Audio chunk metadata decoder configuration description, when defined, must be an ArrayBuffer or an'\n + ' ArrayBuffer view.');\n }\n }\n if (metadata.decoderConfig.codec.startsWith('mp4a')\n // These three refer to MP3:\n && metadata.decoderConfig.codec !== 'mp4a.69'\n && metadata.decoderConfig.codec !== 'mp4a.6B'\n && metadata.decoderConfig.codec !== 'mp4a.6b') {\n // AAC-specific validation\n const validStrings = ['mp4a.40.2', 'mp4a.40.02', 'mp4a.40.5', 'mp4a.40.05', 'mp4a.40.29', 'mp4a.67'];\n if (!validStrings.includes(metadata.decoderConfig.codec)) {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for AAC must be a valid AAC codec string as'\n + ' specified in https://www.w3.org/TR/webcodecs-aac-codec-registration/.');\n }\n // `description` may or may not be set, depending on if the format is AAC or ADTS, so don't perform any\n // validation for it.\n // https://www.w3.org/TR/webcodecs-aac-codec-registration\n }\n else if (metadata.decoderConfig.codec.startsWith('mp3') || metadata.decoderConfig.codec.startsWith('mp4a')) {\n // MP3-specific validation\n if (metadata.decoderConfig.codec !== 'mp3'\n && metadata.decoderConfig.codec !== 'mp4a.69'\n && metadata.decoderConfig.codec !== 'mp4a.6B'\n && metadata.decoderConfig.codec !== 'mp4a.6b') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for MP3 must be \"mp3\", \"mp4a.69\" or'\n + ' \"mp4a.6B\".');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('opus')) {\n // Opus-specific validation\n if (metadata.decoderConfig.codec !== 'opus') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for Opus must be \"opus\".');\n }\n if (metadata.decoderConfig.description && metadata.decoderConfig.description.byteLength < 18) {\n // Description is optional for Opus per-spec, so we shouldn't enforce it\n throw new TypeError('Audio chunk metadata decoder configuration description, when specified, is expected to be an'\n + ' Identification Header as specified in Section 5.1 of RFC 7845.');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('vorbis')) {\n // Vorbis-specific validation\n if (metadata.decoderConfig.codec !== 'vorbis') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for Vorbis must be \"vorbis\".');\n }\n if (!metadata.decoderConfig.description) {\n throw new TypeError('Audio chunk metadata decoder configuration for Vorbis must include a description, which is expected to'\n + ' adhere to the format described in https://www.w3.org/TR/webcodecs-vorbis-codec-registration/.');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('flac')) {\n // FLAC-specific validation\n if (metadata.decoderConfig.codec !== 'flac') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for FLAC must be \"flac\".');\n }\n const minDescriptionSize = 4 + 4 + 34; // 'fLaC' + metadata block header + STREAMINFO block\n if (!metadata.decoderConfig.description || metadata.decoderConfig.description.byteLength < minDescriptionSize) {\n throw new TypeError('Audio chunk metadata decoder configuration for FLAC must include a description, which is expected to'\n + ' adhere to the format described in https://www.w3.org/TR/webcodecs-flac-codec-registration/.');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('ac-3') || metadata.decoderConfig.codec.startsWith('ac3')) {\n // AC3-specific validation\n if (metadata.decoderConfig.codec !== 'ac-3') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for AC-3 must be \"ac-3\".');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('ec-3') || metadata.decoderConfig.codec.startsWith('eac3')) {\n // EAC3-specific validation\n if (metadata.decoderConfig.codec !== 'ec-3') {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for EC-3 must be \"ec-3\".');\n }\n }\n else if (metadata.decoderConfig.codec.startsWith('pcm')\n || metadata.decoderConfig.codec.startsWith('ulaw')\n || metadata.decoderConfig.codec.startsWith('alaw')) {\n // PCM-specific validation\n if (!PCM_AUDIO_CODECS.includes(metadata.decoderConfig.codec)) {\n throw new TypeError('Audio chunk metadata decoder configuration codec string for PCM must be one of the supported PCM'\n + ` codecs (${PCM_AUDIO_CODECS.join(', ')}).`);\n }\n }\n};\nexport const validateSubtitleMetadata = (metadata) => {\n if (!metadata) {\n throw new TypeError('Subtitle metadata must be provided.');\n }\n if (typeof metadata !== 'object') {\n throw new TypeError('Subtitle metadata must be an object.');\n }\n if (!metadata.config) {\n throw new TypeError('Subtitle metadata must include a config object.');\n }\n if (typeof metadata.config !== 'object') {\n throw new TypeError('Subtitle metadata config must be an object.');\n }\n if (typeof metadata.config.description !== 'string') {\n throw new TypeError('Subtitle metadata config description must be a string.');\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\n/**\n * Base class for custom video decoders. To add your own custom video decoder, extend this class, implement the\n * abstract methods and static `supports` method, and register the decoder using {@link registerDecoder}.\n * @group Custom coders\n * @public\n */\nexport class CustomVideoDecoder {\n /** Returns true if and only if the decoder can decode the given codec configuration. */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n static supports(codec, config) {\n return false;\n }\n}\n/**\n * Base class for custom audio decoders. To add your own custom audio decoder, extend this class, implement the\n * abstract methods and static `supports` method, and register the decoder using {@link registerDecoder}.\n * @group Custom coders\n * @public\n */\nexport class CustomAudioDecoder {\n /** Returns true if and only if the decoder can decode the given codec configuration. */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n static supports(codec, config) {\n return false;\n }\n}\n/**\n * Base class for custom video encoders. To add your own custom video encoder, extend this class, implement the\n * abstract methods and static `supports` method, and register the encoder using {@link registerEncoder}.\n * @group Custom coders\n * @public\n */\nexport class CustomVideoEncoder {\n /** Returns true if and only if the encoder can encode the given codec configuration. */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n static supports(codec, config) {\n return false;\n }\n}\n/**\n * Base class for custom audio encoders. To add your own custom audio encoder, extend this class, implement the\n * abstract methods and static `supports` method, and register the encoder using {@link registerEncoder}.\n * @group Custom coders\n * @public\n */\nexport class CustomAudioEncoder {\n /** Returns true if and only if the encoder can encode the given codec configuration. */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n static supports(codec, config) {\n return false;\n }\n}\nexport const customVideoDecoders = [];\nexport const customAudioDecoders = [];\nexport const customVideoEncoders = [];\nexport const customAudioEncoders = [];\n/**\n * Registers a custom video or audio decoder. Registered decoders will automatically be used for decoding whenever\n * possible.\n * @group Custom coders\n * @public\n */\nexport const registerDecoder = (decoder) => {\n if (decoder.prototype instanceof CustomVideoDecoder) {\n const casted = decoder;\n if (customVideoDecoders.includes(casted)) {\n console.warn('Video decoder already registered.');\n return;\n }\n customVideoDecoders.push(casted);\n }\n else if (decoder.prototype instanceof CustomAudioDecoder) {\n const casted = decoder;\n if (customAudioDecoders.includes(casted)) {\n console.warn('Audio decoder already registered.');\n return;\n }\n customAudioDecoders.push(casted);\n }\n else {\n throw new TypeError('Decoder must be a CustomVideoDecoder or CustomAudioDecoder.');\n }\n};\n/**\n * Registers a custom video or audio encoder. Registered encoders will automatically be used for encoding whenever\n * possible.\n * @group Custom coders\n * @public\n */\nexport const registerEncoder = (encoder) => {\n if (encoder.prototype instanceof CustomVideoEncoder) {\n const casted = encoder;\n if (customVideoEncoders.includes(casted)) {\n console.warn('Video encoder already registered.');\n return;\n }\n customVideoEncoders.push(casted);\n }\n else if (encoder.prototype instanceof CustomAudioEncoder) {\n const casted = encoder;\n if (customAudioEncoders.includes(casted)) {\n console.warn('Audio encoder already registered.');\n return;\n }\n customAudioEncoders.push(casted);\n }\n else {\n throw new TypeError('Encoder must be a CustomVideoEncoder or CustomAudioEncoder.');\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport class Demuxer {\n constructor(input) {\n this.input = input;\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { Bitstream } from '../../shared/bitstream.js';\nimport { assert, assertNever } from '../misc.js';\nimport { readBytes, readU16Be, readU8 } from '../reader.js';\n// https://www.rfc-editor.org/rfc/rfc9639.html#name-block-size-bits\nexport const getBlockSizeOrUncommon = (bits) => {\n if (bits === 0b0000) {\n return null;\n }\n else if (bits === 0b0001) {\n return 192;\n }\n else if (bits >= 0b0010 && bits <= 0b0101) {\n return 144 * 2 ** bits;\n }\n else if (bits === 0b0110) {\n return 'uncommon-u8';\n }\n else if (bits === 0b0111) {\n return 'uncommon-u16';\n }\n else if (bits >= 0b1000 && bits <= 0b1111) {\n return 2 ** bits;\n }\n else {\n return null;\n }\n};\n// https://www.rfc-editor.org/rfc/rfc9639.html#name-sample-rate-bits\nexport const getSampleRateOrUncommon = (sampleRateBits, streamInfoSampleRate) => {\n switch (sampleRateBits) {\n case 0b0000: return streamInfoSampleRate;\n case 0b0001: return 88200;\n case 0b0010: return 176400;\n case 0b0011: return 192000;\n case 0b0100: return 8000;\n case 0b0101: return 16000;\n case 0b0110: return 22050;\n case 0b0111: return 24000;\n case 0b1000: return 32000;\n case 0b1001: return 44100;\n case 0b1010: return 48000;\n case 0b1011: return 96000;\n case 0b1100: return 'uncommon-u8';\n case 0b1101: return 'uncommon-u16';\n case 0b1110: return 'uncommon-u16-10';\n default: return null;\n }\n};\n// https://www.rfc-editor.org/rfc/rfc9639.html#name-coded-number\nexport const readCodedNumber = (fileSlice) => {\n let ones = 0;\n const bitstream1 = new Bitstream(readBytes(fileSlice, 1));\n while (bitstream1.readBits(1) === 1) {\n ones++;\n }\n if (ones === 0) {\n return bitstream1.readBits(7);\n }\n const bitArray = [];\n const extraBytes = ones - 1;\n const bitstream2 = new Bitstream(readBytes(fileSlice, extraBytes));\n const firstByteBits = 8 - ones - 1;\n for (let i = 0; i < firstByteBits; i++) {\n bitArray.unshift(bitstream1.readBits(1));\n }\n for (let i = 0; i < extraBytes; i++) {\n for (let j = 0; j < 8; j++) {\n const val = bitstream2.readBits(1);\n if (j < 2) {\n continue;\n }\n bitArray.unshift(val);\n }\n }\n const encoded = bitArray.reduce((acc, bit, index) => {\n return acc | (bit << index);\n }, 0);\n return encoded;\n};\nexport const readBlockSize = (slice, blockSizeBits) => {\n if (blockSizeBits === 'uncommon-u16') {\n return readU16Be(slice) + 1;\n }\n else if (blockSizeBits === 'uncommon-u8') {\n return readU8(slice) + 1;\n }\n else if (typeof blockSizeBits === 'number') {\n return blockSizeBits;\n }\n else {\n assertNever(blockSizeBits);\n assert(false);\n }\n};\nexport const readSampleRate = (slice, sampleRateOrUncommon) => {\n if (sampleRateOrUncommon === 'uncommon-u16') {\n return readU16Be(slice);\n }\n if (sampleRateOrUncommon === 'uncommon-u16-10') {\n return readU16Be(slice) * 10;\n }\n if (sampleRateOrUncommon === 'uncommon-u8') {\n return readU8(slice);\n }\n if (typeof sampleRateOrUncommon === 'number') {\n return sampleRateOrUncommon;\n }\n return null;\n};\n// https://www.rfc-editor.org/rfc/rfc9639.html#section-9.1.1\nexport const calculateCrc8 = (data) => {\n const polynomial = 0x07; // x^8 + x^2 + x^1 + x^0\n let crc = 0x00; // Initialize CRC to 0\n for (const byte of data) {\n crc ^= byte; // XOR byte into least significant byte of crc\n for (let i = 0; i < 8; i++) {\n // For each bit in the byte\n if ((crc & 0x80) !== 0) {\n // If the leftmost bit (MSB) is set\n crc = (crc << 1) ^ polynomial; // Shift left and XOR with polynomial\n }\n else {\n crc <<= 1; // Just shift left\n }\n crc &= 0xff; // Ensure CRC remains 8-bit\n }\n }\n return crc;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { decodeSynchsafe, encodeSynchsafe } from '../shared/mp3-misc.js';\nimport { coalesceIndex, textDecoder, textEncoder, isIso88591Compatible, assertNever, keyValueIterator, toDataView, } from './misc.js';\nimport { readAscii, readBytes, readU32Be, readU8 } from './reader.js';\nexport var Id3V2HeaderFlags;\n(function (Id3V2HeaderFlags) {\n Id3V2HeaderFlags[Id3V2HeaderFlags[\"Unsynchronisation\"] = 128] = \"Unsynchronisation\";\n Id3V2HeaderFlags[Id3V2HeaderFlags[\"ExtendedHeader\"] = 64] = \"ExtendedHeader\";\n Id3V2HeaderFlags[Id3V2HeaderFlags[\"ExperimentalIndicator\"] = 32] = \"ExperimentalIndicator\";\n Id3V2HeaderFlags[Id3V2HeaderFlags[\"Footer\"] = 16] = \"Footer\";\n})(Id3V2HeaderFlags || (Id3V2HeaderFlags = {}));\nexport var Id3V2TextEncoding;\n(function (Id3V2TextEncoding) {\n Id3V2TextEncoding[Id3V2TextEncoding[\"ISO_8859_1\"] = 0] = \"ISO_8859_1\";\n Id3V2TextEncoding[Id3V2TextEncoding[\"UTF_16_WITH_BOM\"] = 1] = \"UTF_16_WITH_BOM\";\n Id3V2TextEncoding[Id3V2TextEncoding[\"UTF_16_BE_NO_BOM\"] = 2] = \"UTF_16_BE_NO_BOM\";\n Id3V2TextEncoding[Id3V2TextEncoding[\"UTF_8\"] = 3] = \"UTF_8\";\n})(Id3V2TextEncoding || (Id3V2TextEncoding = {}));\nexport const ID3_V1_TAG_SIZE = 128;\nexport const ID3_V2_HEADER_SIZE = 10;\nexport const ID3_V1_GENRES = [\n 'Blues', 'Classic rock', 'Country', 'Dance', 'Disco', 'Funk', 'Grunge', 'Hip-hop', 'Jazz',\n 'Metal', 'New age', 'Oldies', 'Other', 'Pop', 'Rhythm and blues', 'Rap', 'Reggae', 'Rock',\n 'Techno', 'Industrial', 'Alternative', 'Ska', 'Death metal', 'Pranks', 'Soundtrack',\n 'Euro-techno', 'Ambient', 'Trip-hop', 'Vocal', 'Jazz & funk', 'Fusion', 'Trance', 'Classical',\n 'Instrumental', 'Acid', 'House', 'Game', 'Sound clip', 'Gospel', 'Noise', 'Alternative rock',\n 'Bass', 'Soul', 'Punk', 'Space', 'Meditative', 'Instrumental pop', 'Instrumental rock',\n 'Ethnic', 'Gothic', 'Darkwave', 'Techno-industrial', 'Electronic', 'Pop-folk', 'Eurodance',\n 'Dream', 'Southern rock', 'Comedy', 'Cult', 'Gangsta', 'Top 40', 'Christian rap', 'Pop/funk',\n 'Jungle music', 'Native US', 'Cabaret', 'New wave', 'Psychedelic', 'Rave', 'Showtunes',\n 'Trailer', 'Lo-fi', 'Tribal', 'Acid punk', 'Acid jazz', 'Polka', 'Retro', 'Musical',\n 'Rock \\'n\\' roll', 'Hard rock', 'Folk', 'Folk rock', 'National folk', 'Swing', 'Fast fusion',\n 'Bebop', 'Latin', 'Revival', 'Celtic', 'Bluegrass', 'Avantgarde', 'Gothic rock',\n 'Progressive rock', 'Psychedelic rock', 'Symphonic rock', 'Slow rock', 'Big band', 'Chorus',\n 'Easy listening', 'Acoustic', 'Humour', 'Speech', 'Chanson', 'Opera', 'Chamber music',\n 'Sonata', 'Symphony', 'Booty bass', 'Primus', 'Porn groove', 'Satire', 'Slow jam', 'Club',\n 'Tango', 'Samba', 'Folklore', 'Ballad', 'Power ballad', 'Rhythmic Soul', 'Freestyle', 'Duet',\n 'Punk rock', 'Drum solo', 'A cappella', 'Euro-house', 'Dance hall', 'Goa music', 'Drum & bass',\n 'Club-house', 'Hardcore techno', 'Terror', 'Indie', 'Britpop', 'Negerpunk', 'Polsk punk',\n 'Beat', 'Christian gangsta rap', 'Heavy metal', 'Black metal', 'Crossover',\n 'Contemporary Christian', 'Christian rock', 'Merengue', 'Salsa', 'Thrash metal', 'Anime',\n 'Jpop', 'Synthpop', 'Christmas', 'Art rock', 'Baroque', 'Bhangra', 'Big beat', 'Breakbeat',\n 'Chillout', 'Downtempo', 'Dub', 'EBM', 'Eclectic', 'Electro', 'Electroclash', 'Emo',\n 'Experimental', 'Garage', 'Global', 'IDM', 'Illbient', 'Industro-Goth', 'Jam Band',\n 'Krautrock', 'Leftfield', 'Lounge', 'Math rock', 'New romantic', 'Nu-breakz', 'Post-punk',\n 'Post-rock', 'Psytrance', 'Shoegaze', 'Space rock', 'Trop rock', 'World music', 'Neoclassical',\n 'Audiobook', 'Audio theatre', 'Neue Deutsche Welle', 'Podcast', 'Indie rock', 'G-Funk',\n 'Dubstep', 'Garage rock', 'Psybient',\n];\nexport const parseId3V1Tag = (slice, tags) => {\n const startPos = slice.filePos;\n tags.raw ??= {};\n tags.raw['TAG'] ??= readBytes(slice, ID3_V1_TAG_SIZE - 3); // Dump the whole tag into the raw metadata\n slice.filePos = startPos;\n const title = readId3V1String(slice, 30);\n if (title)\n tags.title ??= title;\n const artist = readId3V1String(slice, 30);\n if (artist)\n tags.artist ??= artist;\n const album = readId3V1String(slice, 30);\n if (album)\n tags.album ??= album;\n const yearText = readId3V1String(slice, 4);\n const year = Number.parseInt(yearText, 10);\n if (Number.isInteger(year) && year > 0) {\n tags.date ??= new Date(year, 0, 1);\n }\n const commentBytes = readBytes(slice, 30);\n let comment;\n // Check for the ID3v1.1 track number format:\n // The 29th byte (index 28) is a null terminator, and the 30th byte is the track number.\n if (commentBytes[28] === 0 && commentBytes[29] !== 0) {\n const trackNum = commentBytes[29];\n if (trackNum > 0) {\n tags.trackNumber ??= trackNum;\n }\n slice.skip(-30);\n comment = readId3V1String(slice, 28);\n slice.skip(2);\n }\n else {\n slice.skip(-30);\n comment = readId3V1String(slice, 30);\n }\n if (comment)\n tags.comment ??= comment;\n const genreIndex = readU8(slice);\n if (genreIndex < ID3_V1_GENRES.length) {\n tags.genre ??= ID3_V1_GENRES[genreIndex];\n }\n};\nexport const readId3V1String = (slice, length) => {\n const bytes = readBytes(slice, length);\n const endIndex = coalesceIndex(bytes.indexOf(0), bytes.length);\n const relevantBytes = bytes.subarray(0, endIndex);\n // Decode as ISO-8859-1\n let str = '';\n for (let i = 0; i < relevantBytes.length; i++) {\n str += String.fromCharCode(relevantBytes[i]);\n }\n return str.trimEnd(); // String also may be padded with spaces\n};\nexport const readId3V2Header = (slice) => {\n const startPos = slice.filePos;\n const tag = readAscii(slice, 3);\n const majorVersion = readU8(slice);\n const revision = readU8(slice);\n const flags = readU8(slice);\n const sizeRaw = readU32Be(slice);\n if (tag !== 'ID3' || majorVersion === 0xff || revision === 0xff || (sizeRaw & 0x80808080) !== 0) {\n slice.filePos = startPos;\n return null;\n }\n const size = decodeSynchsafe(sizeRaw);\n return { majorVersion, revision, flags, size };\n};\nexport const parseId3V2Tag = (slice, header, tags) => {\n // https://id3.org/id3v2.3.0\n if (![2, 3, 4].includes(header.majorVersion)) {\n console.warn(`Unsupported ID3v2 major version: ${header.majorVersion}`);\n return;\n }\n const bytes = readBytes(slice, header.size);\n const reader = new Id3V2Reader(header, bytes);\n if (header.flags & Id3V2HeaderFlags.Footer) {\n reader.removeFooter();\n }\n if ((header.flags & Id3V2HeaderFlags.Unsynchronisation) && header.majorVersion === 3) {\n reader.ununsynchronizeAll();\n }\n if (header.flags & Id3V2HeaderFlags.ExtendedHeader) {\n const extendedHeaderSize = reader.readU32();\n if (header.majorVersion === 3) {\n reader.pos += extendedHeaderSize; // The extended header size excludes itself\n }\n else {\n reader.pos += extendedHeaderSize - 4; // The extended header size includes itself\n }\n }\n while (reader.pos <= reader.bytes.length - reader.frameHeaderSize()) {\n const frame = reader.readId3V2Frame();\n if (!frame) {\n break;\n }\n const frameStartPos = reader.pos;\n const frameEndPos = reader.pos + frame.size;\n let frameEncrypted = false;\n let frameCompressed = false;\n let frameUnsynchronized = false;\n if (header.majorVersion === 3) {\n frameEncrypted = !!(frame.flags & (1 << 6));\n frameCompressed = !!(frame.flags & (1 << 7));\n }\n else if (header.majorVersion === 4) {\n frameEncrypted = !!(frame.flags & (1 << 2));\n frameCompressed = !!(frame.flags & (1 << 3));\n frameUnsynchronized = !!(frame.flags & (1 << 1))\n || !!(header.flags & Id3V2HeaderFlags.Unsynchronisation);\n }\n if (frameEncrypted) {\n console.warn(`Skipping encrypted ID3v2 frame ${frame.id}`);\n reader.pos = frameEndPos;\n continue;\n }\n if (frameCompressed) {\n console.warn(`Skipping compressed ID3v2 frame ${frame.id}`); // Maybe someday? Idk\n reader.pos = frameEndPos;\n continue;\n }\n if (frameUnsynchronized) {\n reader.ununsynchronizeRegion(reader.pos, frameEndPos);\n }\n tags.raw ??= {};\n if (frame.id[0] === 'T') {\n // It's a text frame, let's decode as text\n tags.raw[frame.id] ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n else {\n // For the others, let's just get the bytes\n tags.raw[frame.id] ??= reader.readBytes(frame.size);\n }\n reader.pos = frameStartPos;\n switch (frame.id) {\n case 'TIT2':\n case 'TT2':\n {\n tags.title ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n ;\n break;\n case 'TIT3':\n case 'TT3':\n {\n tags.description ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n ;\n break;\n case 'TPE1':\n case 'TP1':\n {\n tags.artist ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n ;\n break;\n case 'TALB':\n case 'TAL':\n {\n tags.album ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n ;\n break;\n case 'TPE2':\n case 'TP2':\n {\n tags.albumArtist ??= reader.readId3V2EncodingAndText(frameEndPos);\n }\n ;\n break;\n case 'TRCK':\n case 'TRK':\n {\n const trackText = reader.readId3V2EncodingAndText(frameEndPos);\n const parts = trackText.split('/');\n const trackNum = Number.parseInt(parts[0], 10);\n const tracksTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(trackNum) && trackNum > 0) {\n tags.trackNumber ??= trackNum;\n }\n if (tracksTotal && Number.isInteger(tracksTotal) && tracksTotal > 0) {\n tags.tracksTotal ??= tracksTotal;\n }\n }\n ;\n break;\n case 'TPOS':\n case 'TPA':\n {\n const discText = reader.readId3V2EncodingAndText(frameEndPos);\n const parts = discText.split('/');\n const discNum = Number.parseInt(parts[0], 10);\n const discsTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(discNum) && discNum > 0) {\n tags.discNumber ??= discNum;\n }\n if (discsTotal && Number.isInteger(discsTotal) && discsTotal > 0) {\n tags.discsTotal ??= discsTotal;\n }\n }\n ;\n break;\n case 'TCON':\n case 'TCO':\n {\n const genreText = reader.readId3V2EncodingAndText(frameEndPos);\n let match = /^\\((\\d+)\\)/.exec(genreText);\n if (match) {\n const genreNumber = Number.parseInt(match[1]);\n if (ID3_V1_GENRES[genreNumber] !== undefined) {\n tags.genre ??= ID3_V1_GENRES[genreNumber];\n break;\n }\n }\n match = /^\\d+$/.exec(genreText);\n if (match) {\n const genreNumber = Number.parseInt(match[0]);\n if (ID3_V1_GENRES[genreNumber] !== undefined) {\n tags.genre ??= ID3_V1_GENRES[genreNumber];\n break;\n }\n }\n tags.genre ??= genreText;\n }\n ;\n break;\n case 'TDRC':\n case 'TDAT':\n {\n const dateText = reader.readId3V2EncodingAndText(frameEndPos);\n const date = new Date(dateText);\n if (!Number.isNaN(date.getTime())) {\n tags.date ??= date;\n }\n }\n ;\n break;\n case 'TYER':\n case 'TYE':\n {\n const yearText = reader.readId3V2EncodingAndText(frameEndPos);\n const year = Number.parseInt(yearText, 10);\n if (Number.isInteger(year)) {\n tags.date ??= new Date(year, 0, 1);\n }\n }\n ;\n break;\n case 'USLT':\n case 'ULT':\n {\n const encoding = reader.readU8();\n reader.pos += 3; // Skip language\n reader.readId3V2Text(encoding, frameEndPos); // Short content description\n tags.lyrics ??= reader.readId3V2Text(encoding, frameEndPos);\n }\n ;\n break;\n case 'COMM':\n case 'COM':\n {\n const encoding = reader.readU8();\n reader.pos += 3; // Skip language\n reader.readId3V2Text(encoding, frameEndPos); // Short content description\n tags.comment ??= reader.readId3V2Text(encoding, frameEndPos);\n }\n ;\n break;\n case 'APIC':\n case 'PIC':\n {\n const encoding = reader.readId3V2TextEncoding();\n let mimeType;\n if (header.majorVersion === 2) {\n const imageFormat = reader.readAscii(3);\n mimeType = imageFormat === 'PNG'\n ? 'image/png'\n : imageFormat === 'JPG'\n ? 'image/jpeg'\n : 'image/*';\n }\n else {\n mimeType = reader.readId3V2Text(encoding, frameEndPos);\n }\n const pictureType = reader.readU8();\n const description = reader.readId3V2Text(encoding, frameEndPos).trimEnd(); // Trim ending spaces\n const imageDataSize = frameEndPos - reader.pos;\n if (imageDataSize >= 0) {\n const imageData = reader.readBytes(imageDataSize);\n if (!tags.images)\n tags.images = [];\n tags.images.push({\n data: imageData,\n mimeType,\n kind: pictureType === 3\n ? 'coverFront'\n : pictureType === 4\n ? 'coverBack'\n : 'unknown',\n description,\n });\n }\n }\n ;\n break;\n default:\n {\n reader.pos += frame.size;\n }\n ;\n break;\n }\n reader.pos = frameEndPos;\n }\n};\n// https://id3.org/id3v2.3.0\nexport class Id3V2Reader {\n constructor(header, bytes) {\n this.header = header;\n this.bytes = bytes;\n this.pos = 0;\n this.view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);\n }\n frameHeaderSize() {\n return this.header.majorVersion === 2 ? 6 : 10;\n }\n ununsynchronizeAll() {\n const newBytes = [];\n for (let i = 0; i < this.bytes.length; i++) {\n const value1 = this.bytes[i];\n newBytes.push(value1);\n if (value1 === 0xff && i !== this.bytes.length - 1) {\n const value2 = this.bytes[i];\n if (value2 === 0x00) {\n i++;\n }\n }\n }\n this.bytes = new Uint8Array(newBytes);\n this.view = new DataView(this.bytes.buffer);\n }\n ununsynchronizeRegion(start, end) {\n const newBytes = [];\n for (let i = start; i < end; i++) {\n const value1 = this.bytes[i];\n newBytes.push(value1);\n if (value1 === 0xff && i !== end - 1) {\n const value2 = this.bytes[i + 1];\n if (value2 === 0x00) {\n i++;\n }\n }\n }\n const before = this.bytes.subarray(0, start);\n const after = this.bytes.subarray(end);\n this.bytes = new Uint8Array(before.length + newBytes.length + after.length);\n this.bytes.set(before, 0);\n this.bytes.set(newBytes, before.length);\n this.bytes.set(after, before.length + newBytes.length);\n this.view = new DataView(this.bytes.buffer);\n }\n removeFooter() {\n this.bytes = this.bytes.subarray(0, this.bytes.length - ID3_V2_HEADER_SIZE);\n this.view = new DataView(this.bytes.buffer);\n }\n readBytes(length) {\n const slice = this.bytes.subarray(this.pos, this.pos + length);\n this.pos += length;\n return slice;\n }\n readU8() {\n const value = this.view.getUint8(this.pos);\n this.pos += 1;\n return value;\n }\n readU16() {\n const value = this.view.getUint16(this.pos, false);\n this.pos += 2;\n return value;\n }\n readU24() {\n const high = this.view.getUint16(this.pos, false);\n const low = this.view.getUint8(this.pos + 1);\n this.pos += 3;\n return high * 0x100 + low;\n }\n readU32() {\n const value = this.view.getUint32(this.pos, false);\n this.pos += 4;\n return value;\n }\n readAscii(length) {\n let str = '';\n for (let i = 0; i < length; i++) {\n str += String.fromCharCode(this.view.getUint8(this.pos + i));\n }\n this.pos += length;\n return str;\n }\n readId3V2Frame() {\n if (this.header.majorVersion === 2) {\n const id = this.readAscii(3);\n if (id === '\\x00\\x00\\x00') {\n return null;\n }\n const size = this.readU24();\n return { id, size, flags: 0 };\n }\n else {\n const id = this.readAscii(4);\n if (id === '\\x00\\x00\\x00\\x00') {\n // We've landed in the padding section\n return null;\n }\n const sizeRaw = this.readU32();\n let size = this.header.majorVersion === 4\n ? decodeSynchsafe(sizeRaw)\n : sizeRaw;\n const flags = this.readU16();\n const headerEndPos = this.pos;\n // Some files may have incorrectly synchsafed/unsynchsafed sizes. To validate which interpretation is valid,\n // we validate a size by skipping ahead and seeing if we land at a valid frame header (or at the end of the\n // tag.\n const isSizeValid = (size) => {\n const nextPos = this.pos + size;\n if (nextPos > this.bytes.length) {\n return false;\n }\n if (nextPos <= this.bytes.length - this.frameHeaderSize()) {\n this.pos += size;\n const nextId = this.readAscii(4);\n if (nextId !== '\\x00\\x00\\x00\\x00' && !/[0-9A-Z]{4}/.test(nextId)) {\n return false;\n }\n }\n return true;\n };\n if (!isSizeValid(size)) {\n // Flip the synchsafing, and try if this one makes more sense\n const otherSize = this.header.majorVersion === 4\n ? sizeRaw\n : decodeSynchsafe(sizeRaw);\n if (isSizeValid(otherSize)) {\n size = otherSize;\n }\n }\n this.pos = headerEndPos;\n return { id, size, flags };\n }\n }\n readId3V2TextEncoding() {\n const number = this.readU8();\n if (number > 3) {\n throw new Error(`Unsupported text encoding: ${number}`);\n }\n return number;\n }\n readId3V2Text(encoding, until) {\n const startPos = this.pos;\n const data = this.readBytes(until - this.pos);\n switch (encoding) {\n case Id3V2TextEncoding.ISO_8859_1: {\n let str = '';\n for (let i = 0; i < data.length; i++) {\n const value = data[i];\n if (value === 0) {\n this.pos = startPos + i + 1;\n break;\n }\n str += String.fromCharCode(value);\n }\n return str;\n }\n case Id3V2TextEncoding.UTF_16_WITH_BOM: {\n if (data[0] === 0xff && data[1] === 0xfe) {\n const decoder = new TextDecoder('utf-16le');\n const endIndex = coalesceIndex(data.findIndex((x, i) => x === 0 && data[i + 1] === 0 && i % 2 === 0), data.length);\n this.pos = startPos + Math.min(endIndex + 2, data.length);\n return decoder.decode(data.subarray(2, endIndex));\n }\n else if (data[0] === 0xfe && data[1] === 0xff) {\n const decoder = new TextDecoder('utf-16be');\n const endIndex = coalesceIndex(data.findIndex((x, i) => x === 0 && data[i + 1] === 0 && i % 2 === 0), data.length);\n this.pos = startPos + Math.min(endIndex + 2, data.length);\n return decoder.decode(data.subarray(2, endIndex));\n }\n else {\n // Treat it like UTF-8, some files do this\n const endIndex = coalesceIndex(data.findIndex(x => x === 0), data.length);\n this.pos = startPos + Math.min(endIndex + 1, data.length);\n return textDecoder.decode(data.subarray(0, endIndex));\n }\n }\n case Id3V2TextEncoding.UTF_16_BE_NO_BOM: {\n const decoder = new TextDecoder('utf-16be');\n const endIndex = coalesceIndex(data.findIndex((x, i) => x === 0 && data[i + 1] === 0 && i % 2 === 0), data.length);\n this.pos = startPos + Math.min(endIndex + 2, data.length);\n return decoder.decode(data.subarray(0, endIndex));\n }\n case Id3V2TextEncoding.UTF_8: {\n const endIndex = coalesceIndex(data.findIndex(x => x === 0), data.length);\n this.pos = startPos + Math.min(endIndex + 1, data.length);\n return textDecoder.decode(data.subarray(0, endIndex));\n }\n }\n }\n readId3V2EncodingAndText(until) {\n if (this.pos >= until) {\n return '';\n }\n const encoding = this.readId3V2TextEncoding();\n return this.readId3V2Text(encoding, until);\n }\n}\nexport class Id3V2Writer {\n constructor(writer) {\n this.helper = new Uint8Array(8);\n this.helperView = toDataView(this.helper);\n this.writer = writer;\n }\n writeId3V2Tag(metadata) {\n const tagStartPos = this.writer.getPos();\n // Write ID3v2.4 header\n this.writeAscii('ID3');\n this.writeU8(0x04); // Version 2.4\n this.writeU8(0x00); // Revision 0\n this.writeU8(0x00); // Flags\n this.writeSynchsafeU32(0); // Size placeholder\n const framesStartPos = this.writer.getPos();\n const writtenTags = new Set();\n // Write all metadata frames\n for (const { key, value } of keyValueIterator(metadata)) {\n switch (key) {\n case 'title':\n {\n this.writeId3V2TextFrame('TIT2', value);\n writtenTags.add('TIT2');\n }\n ;\n break;\n case 'description':\n {\n this.writeId3V2TextFrame('TIT3', value);\n writtenTags.add('TIT3');\n }\n ;\n break;\n case 'artist':\n {\n this.writeId3V2TextFrame('TPE1', value);\n writtenTags.add('TPE1');\n }\n ;\n break;\n case 'album':\n {\n this.writeId3V2TextFrame('TALB', value);\n writtenTags.add('TALB');\n }\n ;\n break;\n case 'albumArtist':\n {\n this.writeId3V2TextFrame('TPE2', value);\n writtenTags.add('TPE2');\n }\n ;\n break;\n case 'trackNumber':\n {\n const string = metadata.tracksTotal !== undefined\n ? `${value}/${metadata.tracksTotal}`\n : value.toString();\n this.writeId3V2TextFrame('TRCK', string);\n writtenTags.add('TRCK');\n }\n ;\n break;\n case 'discNumber':\n {\n const string = metadata.discsTotal !== undefined\n ? `${value}/${metadata.discsTotal}`\n : value.toString();\n this.writeId3V2TextFrame('TPOS', string);\n writtenTags.add('TPOS');\n }\n ;\n break;\n case 'genre':\n {\n this.writeId3V2TextFrame('TCON', value);\n writtenTags.add('TCON');\n }\n ;\n break;\n case 'date':\n {\n this.writeId3V2TextFrame('TDRC', value.toISOString().slice(0, 10));\n writtenTags.add('TDRC');\n }\n ;\n break;\n case 'lyrics':\n {\n this.writeId3V2LyricsFrame(value);\n writtenTags.add('USLT');\n }\n ;\n break;\n case 'comment':\n {\n this.writeId3V2CommentFrame(value);\n writtenTags.add('COMM');\n }\n ;\n break;\n case 'images':\n {\n const pictureTypeMap = { coverFront: 0x03, coverBack: 0x04, unknown: 0x00 };\n for (const image of value) {\n const pictureType = pictureTypeMap[image.kind] ?? 0x00;\n const description = image.description ?? '';\n this.writeId3V2ApicFrame(image.mimeType, pictureType, description, image.data);\n }\n }\n ;\n break;\n case 'tracksTotal':\n case 'discsTotal':\n {\n // Handled with trackNumber and discNumber respectively\n }\n ;\n break;\n case 'raw':\n {\n // Handled later\n }\n ;\n break;\n default: {\n assertNever(key);\n }\n }\n }\n if (metadata.raw) {\n for (const key in metadata.raw) {\n const value = metadata.raw[key];\n if (value == null || key.length !== 4 || writtenTags.has(key)) {\n continue;\n }\n let bytes;\n if (typeof value === 'string') {\n const encoded = textEncoder.encode(value);\n bytes = new Uint8Array(encoded.byteLength + 2);\n bytes[0] = Id3V2TextEncoding.UTF_8;\n bytes.set(encoded, 1);\n // Last byte is the null terminator\n }\n else if (value instanceof Uint8Array) {\n bytes = value;\n }\n else {\n continue;\n }\n this.writeAscii(key);\n this.writeSynchsafeU32(bytes.byteLength);\n this.writeU16(0x0000);\n this.writer.write(bytes);\n }\n }\n const framesEndPos = this.writer.getPos();\n const framesSize = framesEndPos - framesStartPos;\n // Update the size field in the header (synchsafe)\n this.writer.seek(tagStartPos + 6); // Skip 'ID3' + version + revision + flags\n this.writeSynchsafeU32(framesSize);\n this.writer.seek(framesEndPos);\n return framesSize + 10; // +10 for the header size\n }\n writeU8(value) {\n this.helper[0] = value;\n this.writer.write(this.helper.subarray(0, 1));\n }\n writeU16(value) {\n this.helperView.setUint16(0, value, false);\n this.writer.write(this.helper.subarray(0, 2));\n }\n writeU32(value) {\n this.helperView.setUint32(0, value, false);\n this.writer.write(this.helper.subarray(0, 4));\n }\n writeAscii(text) {\n for (let i = 0; i < text.length; i++) {\n this.helper[i] = text.charCodeAt(i);\n }\n this.writer.write(this.helper.subarray(0, text.length));\n }\n writeSynchsafeU32(value) {\n this.writeU32(encodeSynchsafe(value));\n }\n writeIsoString(text) {\n const bytes = new Uint8Array(text.length + 1);\n for (let i = 0; i < text.length; i++) {\n bytes[i] = text.charCodeAt(i);\n }\n bytes[text.length] = 0x00;\n this.writer.write(bytes);\n }\n writeUtf8String(text) {\n const utf8Data = textEncoder.encode(text);\n this.writer.write(utf8Data);\n this.writeU8(0x00);\n }\n writeId3V2TextFrame(frameId, text) {\n const useIso88591 = isIso88591Compatible(text);\n const textDataLength = useIso88591 ? text.length : textEncoder.encode(text).byteLength;\n const frameSize = 1 + textDataLength + 1;\n this.writeAscii(frameId);\n this.writeSynchsafeU32(frameSize);\n this.writeU16(0x0000);\n this.writeU8(useIso88591 ? Id3V2TextEncoding.ISO_8859_1 : Id3V2TextEncoding.UTF_8);\n if (useIso88591) {\n this.writeIsoString(text);\n }\n else {\n this.writeUtf8String(text);\n }\n }\n writeId3V2LyricsFrame(lyrics) {\n const useIso88591 = isIso88591Compatible(lyrics);\n const shortDescription = '';\n const frameSize = 1 + 3 + shortDescription.length + 1 + lyrics.length + 1;\n this.writeAscii('USLT');\n this.writeSynchsafeU32(frameSize);\n this.writeU16(0x0000);\n this.writeU8(useIso88591 ? Id3V2TextEncoding.ISO_8859_1 : Id3V2TextEncoding.UTF_8);\n this.writeAscii('und');\n if (useIso88591) {\n this.writeIsoString(shortDescription);\n this.writeIsoString(lyrics);\n }\n else {\n this.writeUtf8String(shortDescription);\n this.writeUtf8String(lyrics);\n }\n }\n writeId3V2CommentFrame(comment) {\n const useIso88591 = isIso88591Compatible(comment);\n const textDataLength = useIso88591 ? comment.length : textEncoder.encode(comment).byteLength;\n const shortDescription = '';\n const frameSize = 1 + 3 + shortDescription.length + 1 + textDataLength + 1;\n this.writeAscii('COMM');\n this.writeSynchsafeU32(frameSize);\n this.writeU16(0x0000);\n this.writeU8(useIso88591 ? Id3V2TextEncoding.ISO_8859_1 : Id3V2TextEncoding.UTF_8);\n this.writeU8(0x75); // 'u'\n this.writeU8(0x6E); // 'n'\n this.writeU8(0x64); // 'd'\n if (useIso88591) {\n this.writeIsoString(shortDescription);\n this.writeIsoString(comment);\n }\n else {\n this.writeUtf8String(shortDescription);\n this.writeUtf8String(comment);\n }\n }\n writeId3V2ApicFrame(mimeType, pictureType, description, imageData) {\n const useIso88591 = isIso88591Compatible(mimeType) && isIso88591Compatible(description);\n const descriptionDataLength = useIso88591\n ? description.length\n : textEncoder.encode(description).byteLength;\n const frameSize = 1 + mimeType.length + 1 + 1 + descriptionDataLength + 1 + imageData.byteLength;\n this.writeAscii('APIC');\n this.writeSynchsafeU32(frameSize);\n this.writeU16(0x0000);\n this.writeU8(useIso88591 ? Id3V2TextEncoding.ISO_8859_1 : Id3V2TextEncoding.UTF_8);\n if (useIso88591) {\n this.writeIsoString(mimeType);\n }\n else {\n this.writeUtf8String(mimeType);\n }\n this.writeU8(pictureType);\n if (useIso88591) {\n this.writeIsoString(description);\n }\n else {\n this.writeUtf8String(description);\n }\n this.writer.write(imageData);\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { parseAacAudioSpecificConfig } from '../../shared/aac-misc.js';\nimport { extractAudioCodecString, extractVideoCodecString, OPUS_SAMPLE_RATE, parsePcmCodec, PCM_AUDIO_CODECS, } from '../codec.js';\nimport { extractAv1CodecInfoFromPacket, extractVp9CodecInfoFromPacket, FlacBlockType, parseEac3Config, getEac3SampleRate, getEac3ChannelCount, AC3_ACMOD_CHANNEL_COUNTS, } from '../codec-data.js';\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack, InputVideoTrack, } from '../input-track.js';\nimport { assert, binarySearchExact, binarySearchLessOrEqual, COLOR_PRIMARIES_MAP_INVERSE, findLastIndex, isIso639Dash2LanguageCode, last, MATRIX_COEFFICIENTS_MAP_INVERSE, normalizeRotation, roundToMultiple, textDecoder, TRANSFER_CHARACTERISTICS_MAP_INVERSE, UNDETERMINED_LANGUAGE, toDataView, roundIfAlmostInteger, } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { buildIsobmffMimeType } from './isobmff-misc.js';\nimport { MAX_BOX_HEADER_SIZE, MIN_BOX_HEADER_SIZE, readBoxHeader, readDataBox, readFixed_16_16, readFixed_2_30, readIsomVariableInteger, readMetadataStringShort, } from './isobmff-reader.js';\nimport { readBytes, readF64Be, readI16Be, readI32Be, readI64Be, readU16Be, readU24Be, readU32Be, readU64Be, readU8, readAscii, } from '../reader.js';\nimport { DEFAULT_TRACK_DISPOSITION, RichImageData } from '../metadata.js';\nimport { AC3_SAMPLE_RATES } from '../../shared/ac3-misc.js';\nimport { Bitstream } from '../../shared/bitstream.js';\nexport class IsobmffDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.moovSlice = null;\n this.currentTrack = null;\n this.tracks = [];\n this.metadataPromise = null;\n this.movieTimescale = -1;\n this.movieDurationInTimescale = -1;\n this.isQuickTime = false;\n this.metadataTags = {};\n this.currentMetadataKeys = null;\n this.isFragmented = false;\n this.fragmentTrackDefaults = [];\n this.currentFragment = null;\n /**\n * Caches the last fragment that was read. Based on the assumption that there will be multiple reads to the\n * same fragment in quick succession.\n */\n this.lastReadFragment = null;\n this.reader = input._reader;\n }\n async computeDuration() {\n const tracks = await this.getTracks();\n const trackDurations = await Promise.all(tracks.map(x => x.computeDuration()));\n return Math.max(0, ...trackDurations);\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks.map(track => track.inputTrack);\n }\n async getMimeType() {\n await this.readMetadata();\n const codecStrings = await Promise.all(this.tracks.map(x => x.inputTrack.getCodecParameterString()));\n return buildIsobmffMimeType({\n isQuickTime: this.isQuickTime,\n hasVideo: this.tracks.some(x => x.info?.type === 'video'),\n hasAudio: this.tracks.some(x => x.info?.type === 'audio'),\n codecStrings: codecStrings.filter(Boolean),\n });\n }\n async getMetadataTags() {\n await this.readMetadata();\n return this.metadataTags;\n }\n readMetadata() {\n return this.metadataPromise ??= (async () => {\n let currentPos = 0;\n while (true) {\n let slice = this.reader.requestSliceRange(currentPos, MIN_BOX_HEADER_SIZE, MAX_BOX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const startPos = currentPos;\n const boxInfo = readBoxHeader(slice);\n if (!boxInfo) {\n break;\n }\n if (boxInfo.name === 'ftyp') {\n const majorBrand = readAscii(slice, 4);\n this.isQuickTime = majorBrand === 'qt ';\n }\n else if (boxInfo.name === 'moov') {\n // Found moov, load it\n let moovSlice = this.reader.requestSlice(slice.filePos, boxInfo.contentSize);\n if (moovSlice instanceof Promise)\n moovSlice = await moovSlice;\n if (!moovSlice)\n break;\n this.moovSlice = moovSlice;\n this.readContiguousBoxes(this.moovSlice);\n // Put default tracks first\n this.tracks.sort((a, b) => Number(b.disposition.default) - Number(a.disposition.default));\n for (const track of this.tracks) {\n // Modify the edit list offset based on the previous segment durations. They are in different\n // timescales, so we first convert to seconds and then into the track timescale.\n const previousSegmentDurationsInSeconds = track.editListPreviousSegmentDurations / this.movieTimescale;\n track.editListOffset -= Math.round(previousSegmentDurationsInSeconds * track.timescale);\n }\n break;\n }\n currentPos = startPos + boxInfo.totalSize;\n }\n if (this.isFragmented && this.reader.fileSize !== null) {\n // The last 4 bytes may contain the size of the mfra box at the end of the file\n let lastWordSlice = this.reader.requestSlice(this.reader.fileSize - 4, 4);\n if (lastWordSlice instanceof Promise)\n lastWordSlice = await lastWordSlice;\n assert(lastWordSlice);\n const lastWord = readU32Be(lastWordSlice);\n const potentialMfraPos = this.reader.fileSize - lastWord;\n if (potentialMfraPos >= 0 && potentialMfraPos <= this.reader.fileSize - MAX_BOX_HEADER_SIZE) {\n let mfraHeaderSlice = this.reader.requestSliceRange(potentialMfraPos, MIN_BOX_HEADER_SIZE, MAX_BOX_HEADER_SIZE);\n if (mfraHeaderSlice instanceof Promise)\n mfraHeaderSlice = await mfraHeaderSlice;\n if (mfraHeaderSlice) {\n const boxInfo = readBoxHeader(mfraHeaderSlice);\n if (boxInfo && boxInfo.name === 'mfra') {\n // We found the mfra box, allowing for much better random access. Let's parse it.\n let mfraSlice = this.reader.requestSlice(mfraHeaderSlice.filePos, boxInfo.contentSize);\n if (mfraSlice instanceof Promise)\n mfraSlice = await mfraSlice;\n if (mfraSlice) {\n this.readContiguousBoxes(mfraSlice);\n }\n }\n }\n }\n }\n })();\n }\n getSampleTableForTrack(internalTrack) {\n if (internalTrack.sampleTable) {\n return internalTrack.sampleTable;\n }\n const sampleTable = {\n sampleTimingEntries: [],\n sampleCompositionTimeOffsets: [],\n sampleSizes: [],\n keySampleIndices: null,\n chunkOffsets: [],\n sampleToChunk: [],\n presentationTimestamps: null,\n presentationTimestampIndexMap: null,\n };\n internalTrack.sampleTable = sampleTable;\n assert(this.moovSlice);\n const stblContainerSlice = this.moovSlice.slice(internalTrack.sampleTableByteOffset);\n this.currentTrack = internalTrack;\n this.traverseBox(stblContainerSlice);\n this.currentTrack = null;\n const isPcmCodec = internalTrack.info?.type === 'audio'\n && internalTrack.info.codec\n && PCM_AUDIO_CODECS.includes(internalTrack.info.codec);\n if (isPcmCodec && sampleTable.sampleCompositionTimeOffsets.length === 0) {\n // If the audio has PCM samples, the way the samples are defined in the sample table is somewhat\n // suboptimal: Each individual audio sample is its own sample, meaning we can have 48000 samples per second.\n // Because we treat each sample as its own atomic unit that can be decoded, this would lead to a huge\n // amount of very short samples for PCM audio. So instead, we make a transformation: If the audio is in PCM,\n // we say that each chunk (that normally holds many samples) now is one big sample. We can this because\n // the samples in the chunk are contiguous and the format is PCM, so the entire chunk as one thing still\n // encodes valid audio information.\n assert(internalTrack.info?.type === 'audio');\n const pcmInfo = parsePcmCodec(internalTrack.info.codec);\n const newSampleTimingEntries = [];\n const newSampleSizes = [];\n for (let i = 0; i < sampleTable.sampleToChunk.length; i++) {\n const chunkEntry = sampleTable.sampleToChunk[i];\n const nextEntry = sampleTable.sampleToChunk[i + 1];\n const chunkCount = (nextEntry ? nextEntry.startChunkIndex : sampleTable.chunkOffsets.length)\n - chunkEntry.startChunkIndex;\n for (let j = 0; j < chunkCount; j++) {\n const startSampleIndex = chunkEntry.startSampleIndex + j * chunkEntry.samplesPerChunk;\n const endSampleIndex = startSampleIndex + chunkEntry.samplesPerChunk; // Exclusive, outside of chunk\n const startTimingEntryIndex = binarySearchLessOrEqual(sampleTable.sampleTimingEntries, startSampleIndex, x => x.startIndex);\n const startTimingEntry = sampleTable.sampleTimingEntries[startTimingEntryIndex];\n const endTimingEntryIndex = binarySearchLessOrEqual(sampleTable.sampleTimingEntries, endSampleIndex, x => x.startIndex);\n const endTimingEntry = sampleTable.sampleTimingEntries[endTimingEntryIndex];\n const firstSampleTimestamp = startTimingEntry.startDecodeTimestamp\n + (startSampleIndex - startTimingEntry.startIndex) * startTimingEntry.delta;\n const lastSampleTimestamp = endTimingEntry.startDecodeTimestamp\n + (endSampleIndex - endTimingEntry.startIndex) * endTimingEntry.delta;\n const delta = lastSampleTimestamp - firstSampleTimestamp;\n const lastSampleTimingEntry = last(newSampleTimingEntries);\n if (lastSampleTimingEntry && lastSampleTimingEntry.delta === delta) {\n lastSampleTimingEntry.count++;\n }\n else {\n // One sample for the entire chunk\n newSampleTimingEntries.push({\n startIndex: chunkEntry.startChunkIndex + j,\n startDecodeTimestamp: firstSampleTimestamp,\n count: 1,\n delta,\n });\n }\n // Instead of determining the chunk's size by looping over the samples sizes in the sample table, we\n // can directly compute it as we know how many PCM frames are in this chunk, and the size of each\n // PCM frame. This also improves compatibility with some files which fail to write proper sample\n // size values into their sample tables in the PCM case.\n const chunkSize = chunkEntry.samplesPerChunk\n * pcmInfo.sampleSize\n * internalTrack.info.numberOfChannels;\n newSampleSizes.push(chunkSize);\n }\n chunkEntry.startSampleIndex = chunkEntry.startChunkIndex;\n chunkEntry.samplesPerChunk = 1;\n }\n sampleTable.sampleTimingEntries = newSampleTimingEntries;\n sampleTable.sampleSizes = newSampleSizes;\n }\n if (sampleTable.sampleCompositionTimeOffsets.length > 0) {\n // If composition time offsets are defined, we must build a list of all presentation timestamps and then\n // sort them\n sampleTable.presentationTimestamps = [];\n for (const entry of sampleTable.sampleTimingEntries) {\n for (let i = 0; i < entry.count; i++) {\n sampleTable.presentationTimestamps.push({\n presentationTimestamp: entry.startDecodeTimestamp + i * entry.delta,\n sampleIndex: entry.startIndex + i,\n });\n }\n }\n for (const entry of sampleTable.sampleCompositionTimeOffsets) {\n for (let i = 0; i < entry.count; i++) {\n const sampleIndex = entry.startIndex + i;\n const sample = sampleTable.presentationTimestamps[sampleIndex];\n if (!sample) {\n continue;\n }\n sample.presentationTimestamp += entry.offset;\n }\n }\n sampleTable.presentationTimestamps.sort((a, b) => a.presentationTimestamp - b.presentationTimestamp);\n sampleTable.presentationTimestampIndexMap = Array(sampleTable.presentationTimestamps.length).fill(-1);\n for (let i = 0; i < sampleTable.presentationTimestamps.length; i++) {\n sampleTable.presentationTimestampIndexMap[sampleTable.presentationTimestamps[i].sampleIndex] = i;\n }\n }\n else {\n // If they're not defined, we can simply use the decode timestamps as presentation timestamps\n }\n return sampleTable;\n }\n async readFragment(startPos) {\n if (this.lastReadFragment?.moofOffset === startPos) {\n return this.lastReadFragment;\n }\n let headerSlice = this.reader.requestSliceRange(startPos, MIN_BOX_HEADER_SIZE, MAX_BOX_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n assert(headerSlice);\n const moofBoxInfo = readBoxHeader(headerSlice);\n assert(moofBoxInfo?.name === 'moof');\n let entireSlice = this.reader.requestSlice(startPos, moofBoxInfo.totalSize);\n if (entireSlice instanceof Promise)\n entireSlice = await entireSlice;\n assert(entireSlice);\n this.traverseBox(entireSlice);\n const fragment = this.lastReadFragment;\n assert(fragment && fragment.moofOffset === startPos);\n for (const [, trackData] of fragment.trackData) {\n const track = trackData.track;\n const { fragmentPositionCache } = track;\n if (!trackData.startTimestampIsFinal) {\n // It may be that some tracks don't define the base decode time, i.e. when the fragment begins. This\n // we'll need to figure out the start timestamp another way. We'll compute the timestamp by accessing\n // the lookup entries and fragment cache, which works out nicely with the lookup algorithm: If these\n // exist, then the lookup will automatically start at the furthest possible point. If they don't, the\n // lookup starts sequentially from the start, incrementally summing up all fragment durations. It's sort\n // of implicit, but it ends up working nicely.\n const lookupEntry = track.fragmentLookupTable.find(x => x.moofOffset === fragment.moofOffset);\n if (lookupEntry) {\n // There's a lookup entry, let's use its timestamp\n offsetFragmentTrackDataByTimestamp(trackData, lookupEntry.timestamp);\n }\n else {\n const lastCacheIndex = binarySearchLessOrEqual(fragmentPositionCache, fragment.moofOffset - 1, x => x.moofOffset);\n if (lastCacheIndex !== -1) {\n // Let's use the timestamp of the previous fragment in the cache\n const lastCache = fragmentPositionCache[lastCacheIndex];\n offsetFragmentTrackDataByTimestamp(trackData, lastCache.endTimestamp);\n }\n else {\n // We're the first fragment I guess, \"offset by 0\"\n }\n }\n trackData.startTimestampIsFinal = true;\n }\n // Let's remember that a fragment with a given timestamp is here, speeding up future lookups if no\n // lookup table exists\n const insertionIndex = binarySearchLessOrEqual(fragmentPositionCache, trackData.startTimestamp, x => x.startTimestamp);\n if (insertionIndex === -1\n || fragmentPositionCache[insertionIndex].moofOffset !== fragment.moofOffset) {\n fragmentPositionCache.splice(insertionIndex + 1, 0, {\n moofOffset: fragment.moofOffset,\n startTimestamp: trackData.startTimestamp,\n endTimestamp: trackData.endTimestamp,\n });\n }\n }\n return fragment;\n }\n readContiguousBoxes(slice) {\n const startIndex = slice.filePos;\n while (slice.filePos - startIndex <= slice.length - MIN_BOX_HEADER_SIZE) {\n const foundBox = this.traverseBox(slice);\n if (!foundBox) {\n break;\n }\n }\n }\n // eslint-disable-next-line @stylistic/generator-star-spacing\n *iterateContiguousBoxes(slice) {\n const startIndex = slice.filePos;\n while (slice.filePos - startIndex <= slice.length - MIN_BOX_HEADER_SIZE) {\n const startPos = slice.filePos;\n const boxInfo = readBoxHeader(slice);\n if (!boxInfo) {\n break;\n }\n yield { boxInfo, slice };\n slice.filePos = startPos + boxInfo.totalSize;\n }\n }\n traverseBox(slice) {\n const startPos = slice.filePos;\n const boxInfo = readBoxHeader(slice);\n if (!boxInfo) {\n return false;\n }\n const contentStartPos = slice.filePos;\n const boxEndPos = startPos + boxInfo.totalSize;\n switch (boxInfo.name) {\n case 'mdia':\n case 'minf':\n case 'dinf':\n case 'mfra':\n case 'edts':\n {\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n }\n ;\n break;\n case 'mvhd':\n {\n const version = readU8(slice);\n slice.skip(3); // Flags\n if (version === 1) {\n slice.skip(8 + 8);\n this.movieTimescale = readU32Be(slice);\n this.movieDurationInTimescale = readU64Be(slice);\n }\n else {\n slice.skip(4 + 4);\n this.movieTimescale = readU32Be(slice);\n this.movieDurationInTimescale = readU32Be(slice);\n }\n }\n ;\n break;\n case 'trak':\n {\n const track = {\n id: -1,\n demuxer: this,\n inputTrack: null,\n disposition: {\n ...DEFAULT_TRACK_DISPOSITION,\n },\n info: null,\n timescale: -1,\n durationInMovieTimescale: -1,\n durationInMediaTimescale: -1,\n rotation: 0,\n internalCodecId: null,\n name: null,\n languageCode: UNDETERMINED_LANGUAGE,\n sampleTableByteOffset: -1,\n sampleTable: null,\n fragmentLookupTable: [],\n currentFragmentState: null,\n fragmentPositionCache: [],\n editListPreviousSegmentDurations: 0,\n editListOffset: 0,\n };\n this.currentTrack = track;\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n if (track.id !== -1 && track.timescale !== -1 && track.info !== null) {\n if (track.info.type === 'video' && track.info.width !== -1) {\n const videoTrack = track;\n track.inputTrack = new InputVideoTrack(this.input, new IsobmffVideoTrackBacking(videoTrack));\n this.tracks.push(track);\n }\n else if (track.info.type === 'audio' && track.info.numberOfChannels !== -1) {\n const audioTrack = track;\n track.inputTrack = new InputAudioTrack(this.input, new IsobmffAudioTrackBacking(audioTrack));\n this.tracks.push(track);\n }\n }\n this.currentTrack = null;\n }\n ;\n break;\n case 'tkhd':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n const version = readU8(slice);\n const flags = readU24Be(slice);\n // Spec says disabled tracks are to be treated like they don't exist, but in practice, they are treated\n // more like non-default tracks.\n const trackEnabled = !!(flags & 0x1);\n track.disposition.default = trackEnabled;\n // Skip over creation & modification time to reach the track ID\n if (version === 0) {\n slice.skip(8);\n track.id = readU32Be(slice);\n slice.skip(4);\n track.durationInMovieTimescale = readU32Be(slice);\n }\n else if (version === 1) {\n slice.skip(16);\n track.id = readU32Be(slice);\n slice.skip(4);\n track.durationInMovieTimescale = readU64Be(slice);\n }\n else {\n throw new Error(`Incorrect track header version ${version}.`);\n }\n slice.skip(2 * 4 + 2 + 2 + 2 + 2);\n const matrix = [\n readFixed_16_16(slice),\n readFixed_16_16(slice),\n readFixed_2_30(slice),\n readFixed_16_16(slice),\n readFixed_16_16(slice),\n readFixed_2_30(slice),\n readFixed_16_16(slice),\n readFixed_16_16(slice),\n readFixed_2_30(slice),\n ];\n const rotation = normalizeRotation(roundToMultiple(extractRotationFromMatrix(matrix), 90));\n assert(rotation === 0 || rotation === 90 || rotation === 180 || rotation === 270);\n track.rotation = rotation;\n }\n ;\n break;\n case 'elst':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n const version = readU8(slice);\n slice.skip(3); // Flags\n let relevantEntryFound = false;\n let previousSegmentDurations = 0;\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const segmentDuration = version === 1\n ? readU64Be(slice)\n : readU32Be(slice);\n const mediaTime = version === 1\n ? readI64Be(slice)\n : readI32Be(slice);\n const mediaRate = readFixed_16_16(slice);\n if (segmentDuration === 0) {\n // Don't care\n continue;\n }\n if (relevantEntryFound) {\n console.warn('Unsupported edit list: multiple edits are not currently supported. Only using first edit.');\n break;\n }\n if (mediaTime === -1) {\n previousSegmentDurations += segmentDuration;\n continue;\n }\n if (mediaRate !== 1) {\n console.warn('Unsupported edit list entry: media rate must be 1.');\n break;\n }\n track.editListPreviousSegmentDurations = previousSegmentDurations;\n track.editListOffset = mediaTime;\n relevantEntryFound = true;\n }\n }\n ;\n break;\n case 'mdhd':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n const version = readU8(slice);\n slice.skip(3); // Flags\n if (version === 0) {\n slice.skip(8);\n track.timescale = readU32Be(slice);\n track.durationInMediaTimescale = readU32Be(slice);\n }\n else if (version === 1) {\n slice.skip(16);\n track.timescale = readU32Be(slice);\n track.durationInMediaTimescale = readU64Be(slice);\n }\n let language = readU16Be(slice);\n if (language > 0) {\n track.languageCode = '';\n for (let i = 0; i < 3; i++) {\n track.languageCode = String.fromCharCode(0x60 + (language & 0b11111)) + track.languageCode;\n language >>= 5;\n }\n if (!isIso639Dash2LanguageCode(track.languageCode)) {\n // Sometimes the bytes are garbage\n track.languageCode = UNDETERMINED_LANGUAGE;\n }\n }\n }\n ;\n break;\n case 'hdlr':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n slice.skip(8); // Version + flags + pre-defined\n const handlerType = readAscii(slice, 4);\n if (handlerType === 'vide') {\n track.info = {\n type: 'video',\n width: -1,\n height: -1,\n squarePixelWidth: -1,\n squarePixelHeight: -1,\n codec: null,\n codecDescription: null,\n colorSpace: null,\n avcType: null,\n avcCodecInfo: null,\n hevcCodecInfo: null,\n vp9CodecInfo: null,\n av1CodecInfo: null,\n };\n }\n else if (handlerType === 'soun') {\n track.info = {\n type: 'audio',\n numberOfChannels: -1,\n sampleRate: -1,\n codec: null,\n codecDescription: null,\n aacCodecInfo: null,\n };\n }\n }\n ;\n break;\n case 'stbl':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n track.sampleTableByteOffset = startPos;\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n }\n ;\n break;\n case 'stsd':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (track.info === null || track.sampleTable) {\n break;\n }\n const stsdVersion = readU8(slice);\n slice.skip(3); // Flags\n const entries = readU32Be(slice);\n for (let i = 0; i < entries; i++) {\n const sampleBoxStartPos = slice.filePos;\n const sampleBoxInfo = readBoxHeader(slice);\n if (!sampleBoxInfo) {\n break;\n }\n track.internalCodecId = sampleBoxInfo.name;\n const lowercaseBoxName = sampleBoxInfo.name.toLowerCase();\n if (track.info.type === 'video') {\n if (lowercaseBoxName === 'avc1' || lowercaseBoxName === 'avc3') {\n track.info.codec = 'avc';\n track.info.avcType = lowercaseBoxName === 'avc1' ? 1 : 3;\n }\n else if (lowercaseBoxName === 'hvc1' || lowercaseBoxName === 'hev1') {\n track.info.codec = 'hevc';\n }\n else if (lowercaseBoxName === 'vp08') {\n track.info.codec = 'vp8';\n }\n else if (lowercaseBoxName === 'vp09') {\n track.info.codec = 'vp9';\n }\n else if (lowercaseBoxName === 'av01') {\n track.info.codec = 'av1';\n }\n else {\n console.warn(`Unsupported video codec (sample entry type '${sampleBoxInfo.name}').`);\n }\n slice.skip(6 * 1 + 2 + 2 + 2 + 3 * 4);\n track.info.width = readU16Be(slice);\n track.info.height = readU16Be(slice);\n track.info.squarePixelWidth = track.info.width;\n track.info.squarePixelHeight = track.info.height;\n slice.skip(4 + 4 + 4 + 2 + 32 + 2 + 2);\n this.readContiguousBoxes(slice.slice(slice.filePos, (sampleBoxStartPos + sampleBoxInfo.totalSize) - slice.filePos));\n }\n else {\n if (lowercaseBoxName === 'mp4a') {\n // We don't know the codec yet (might be AAC, might be MP3), need to read the esds box\n }\n else if (lowercaseBoxName === 'opus') {\n track.info.codec = 'opus';\n }\n else if (lowercaseBoxName === 'flac') {\n track.info.codec = 'flac';\n }\n else if (lowercaseBoxName === 'twos'\n || lowercaseBoxName === 'sowt'\n || lowercaseBoxName === 'raw '\n || lowercaseBoxName === 'in24'\n || lowercaseBoxName === 'in32'\n || lowercaseBoxName === 'fl32'\n || lowercaseBoxName === 'fl64'\n || lowercaseBoxName === 'lpcm'\n || lowercaseBoxName === 'ipcm' // ISO/IEC 23003-5\n || lowercaseBoxName === 'fpcm' // \"\n ) {\n // It's PCM\n // developer.apple.com/documentation/quicktime-file-format/sound_sample_descriptions/\n }\n else if (lowercaseBoxName === 'ulaw') {\n track.info.codec = 'ulaw';\n }\n else if (lowercaseBoxName === 'alaw') {\n track.info.codec = 'alaw';\n }\n else if (lowercaseBoxName === 'ac-3') {\n track.info.codec = 'ac3';\n }\n else if (lowercaseBoxName === 'ec-3') {\n track.info.codec = 'eac3';\n }\n else {\n console.warn(`Unsupported audio codec (sample entry type '${sampleBoxInfo.name}').`);\n }\n slice.skip(6 * 1 + 2);\n const version = readU16Be(slice);\n slice.skip(3 * 2);\n let channelCount = readU16Be(slice);\n let sampleSize = readU16Be(slice);\n slice.skip(2 * 2);\n // Can't use fixed16_16 as that's signed\n let sampleRate = readU32Be(slice) / 0x10000;\n if (stsdVersion === 0 && version > 0) {\n // Additional QuickTime fields\n if (version === 1) {\n slice.skip(4);\n sampleSize = 8 * readU32Be(slice);\n slice.skip(2 * 4);\n }\n else if (version === 2) {\n slice.skip(4);\n sampleRate = readF64Be(slice);\n channelCount = readU32Be(slice);\n slice.skip(4); // Always 0x7f000000\n sampleSize = readU32Be(slice);\n const flags = readU32Be(slice);\n slice.skip(2 * 4);\n if (lowercaseBoxName === 'lpcm') {\n const bytesPerSample = (sampleSize + 7) >> 3;\n const isFloat = Boolean(flags & 1);\n const isBigEndian = Boolean(flags & 2);\n const sFlags = flags & 4 ? -1 : 0; // I guess it means \"signed flags\" or something?\n if (sampleSize > 0 && sampleSize <= 64) {\n if (isFloat) {\n if (sampleSize === 32) {\n track.info.codec = isBigEndian ? 'pcm-f32be' : 'pcm-f32';\n }\n }\n else {\n if (sFlags & (1 << (bytesPerSample - 1))) {\n if (bytesPerSample === 1) {\n track.info.codec = 'pcm-s8';\n }\n else if (bytesPerSample === 2) {\n track.info.codec = isBigEndian ? 'pcm-s16be' : 'pcm-s16';\n }\n else if (bytesPerSample === 3) {\n track.info.codec = isBigEndian ? 'pcm-s24be' : 'pcm-s24';\n }\n else if (bytesPerSample === 4) {\n track.info.codec = isBigEndian ? 'pcm-s32be' : 'pcm-s32';\n }\n }\n else {\n if (bytesPerSample === 1) {\n track.info.codec = 'pcm-u8';\n }\n }\n }\n }\n if (track.info.codec === null) {\n console.warn('Unsupported PCM format.');\n }\n }\n }\n }\n if (track.info.codec === 'opus') {\n sampleRate = OPUS_SAMPLE_RATE; // Always the same\n }\n track.info.numberOfChannels = channelCount;\n track.info.sampleRate = sampleRate;\n // PCM codec assignments\n if (lowercaseBoxName === 'twos') {\n if (sampleSize === 8) {\n track.info.codec = 'pcm-s8';\n }\n else if (sampleSize === 16) {\n track.info.codec = 'pcm-s16be';\n }\n else {\n console.warn(`Unsupported sample size ${sampleSize} for codec 'twos'.`);\n track.info.codec = null;\n }\n }\n else if (lowercaseBoxName === 'sowt') {\n if (sampleSize === 8) {\n track.info.codec = 'pcm-s8';\n }\n else if (sampleSize === 16) {\n track.info.codec = 'pcm-s16';\n }\n else {\n console.warn(`Unsupported sample size ${sampleSize} for codec 'sowt'.`);\n track.info.codec = null;\n }\n }\n else if (lowercaseBoxName === 'raw ') {\n track.info.codec = 'pcm-u8';\n }\n else if (lowercaseBoxName === 'in24') {\n track.info.codec = 'pcm-s24be';\n }\n else if (lowercaseBoxName === 'in32') {\n track.info.codec = 'pcm-s32be';\n }\n else if (lowercaseBoxName === 'fl32') {\n track.info.codec = 'pcm-f32be';\n }\n else if (lowercaseBoxName === 'fl64') {\n track.info.codec = 'pcm-f64be';\n }\n else if (lowercaseBoxName === 'ipcm') {\n track.info.codec = 'pcm-s16be'; // Placeholder, will be adjusted by the pcmC box\n }\n else if (lowercaseBoxName === 'fpcm') {\n track.info.codec = 'pcm-f32be'; // Placeholder, will be adjusted by the pcmC box\n }\n this.readContiguousBoxes(slice.slice(slice.filePos, (sampleBoxStartPos + sampleBoxInfo.totalSize) - slice.filePos));\n }\n }\n }\n ;\n break;\n case 'avcC':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info);\n track.info.codecDescription = readBytes(slice, boxInfo.contentSize);\n }\n ;\n break;\n case 'hvcC':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info);\n track.info.codecDescription = readBytes(slice, boxInfo.contentSize);\n }\n ;\n break;\n case 'vpcC':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'video');\n slice.skip(4); // Version + flags\n const profile = readU8(slice);\n const level = readU8(slice);\n const thirdByte = readU8(slice);\n const bitDepth = thirdByte >> 4;\n const chromaSubsampling = (thirdByte >> 1) & 0b111;\n const videoFullRangeFlag = thirdByte & 1;\n const colourPrimaries = readU8(slice);\n const transferCharacteristics = readU8(slice);\n const matrixCoefficients = readU8(slice);\n track.info.vp9CodecInfo = {\n profile,\n level,\n bitDepth,\n chromaSubsampling,\n videoFullRangeFlag,\n colourPrimaries,\n transferCharacteristics,\n matrixCoefficients,\n };\n }\n ;\n break;\n case 'av1C':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'video');\n slice.skip(1); // Marker + version\n const secondByte = readU8(slice);\n const profile = secondByte >> 5;\n const level = secondByte & 0b11111;\n const thirdByte = readU8(slice);\n const tier = thirdByte >> 7;\n const highBitDepth = (thirdByte >> 6) & 1;\n const twelveBit = (thirdByte >> 5) & 1;\n const monochrome = (thirdByte >> 4) & 1;\n const chromaSubsamplingX = (thirdByte >> 3) & 1;\n const chromaSubsamplingY = (thirdByte >> 2) & 1;\n const chromaSamplePosition = thirdByte & 0b11;\n // Logic from https://aomediacodec.github.io/av1-spec/av1-spec.pdf\n const bitDepth = profile === 2 && highBitDepth ? (twelveBit ? 12 : 10) : (highBitDepth ? 10 : 8);\n track.info.av1CodecInfo = {\n profile,\n level,\n tier,\n bitDepth,\n monochrome,\n chromaSubsamplingX,\n chromaSubsamplingY,\n chromaSamplePosition,\n };\n }\n ;\n break;\n case 'colr':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'video');\n const colourType = readAscii(slice, 4);\n if (colourType !== 'nclx') {\n break;\n }\n const colourPrimaries = readU16Be(slice);\n const transferCharacteristics = readU16Be(slice);\n const matrixCoefficients = readU16Be(slice);\n const fullRangeFlag = Boolean(readU8(slice) & 0x80);\n track.info.colorSpace = {\n primaries: COLOR_PRIMARIES_MAP_INVERSE[colourPrimaries],\n transfer: TRANSFER_CHARACTERISTICS_MAP_INVERSE[transferCharacteristics],\n matrix: MATRIX_COEFFICIENTS_MAP_INVERSE[matrixCoefficients],\n fullRange: fullRangeFlag,\n };\n }\n ;\n break;\n case 'pasp':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'video');\n const num = readU32Be(slice);\n const den = readU32Be(slice);\n if (num > den) {\n track.info.squarePixelWidth = Math.round(track.info.width * num / den);\n }\n else {\n track.info.squarePixelHeight = Math.round(track.info.height * den / num);\n }\n }\n ;\n break;\n case 'wave':\n {\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n }\n ;\n break;\n case 'esds':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n slice.skip(4); // Version + flags\n const tag = readU8(slice);\n assert(tag === 0x03); // ES Descriptor\n readIsomVariableInteger(slice); // Length\n slice.skip(2); // ES ID\n const mixed = readU8(slice);\n const streamDependenceFlag = (mixed & 0x80) !== 0;\n const urlFlag = (mixed & 0x40) !== 0;\n const ocrStreamFlag = (mixed & 0x20) !== 0;\n if (streamDependenceFlag) {\n slice.skip(2);\n }\n if (urlFlag) {\n const urlLength = readU8(slice);\n slice.skip(urlLength);\n }\n if (ocrStreamFlag) {\n slice.skip(2);\n }\n const decoderConfigTag = readU8(slice);\n assert(decoderConfigTag === 0x04); // DecoderConfigDescriptor\n const decoderConfigDescriptorLength = readIsomVariableInteger(slice); // Length\n const payloadStart = slice.filePos;\n const objectTypeIndication = readU8(slice);\n if (objectTypeIndication === 0x40 || objectTypeIndication === 0x67) {\n track.info.codec = 'aac';\n track.info.aacCodecInfo = {\n isMpeg2: objectTypeIndication === 0x67,\n objectType: null,\n };\n }\n else if (objectTypeIndication === 0x69 || objectTypeIndication === 0x6b) {\n track.info.codec = 'mp3';\n }\n else if (objectTypeIndication === 0xdd) {\n track.info.codec = 'vorbis'; // \"nonstandard, gpac uses it\" - FFmpeg\n }\n else {\n console.warn(`Unsupported audio codec (objectTypeIndication ${objectTypeIndication}) - discarding track.`);\n }\n slice.skip(1 + 3 + 4 + 4);\n if (decoderConfigDescriptorLength > slice.filePos - payloadStart) {\n // There's a DecoderSpecificInfo at the end, let's read it\n const decoderSpecificInfoTag = readU8(slice);\n assert(decoderSpecificInfoTag === 0x05); // DecoderSpecificInfo\n const decoderSpecificInfoLength = readIsomVariableInteger(slice);\n track.info.codecDescription = readBytes(slice, decoderSpecificInfoLength);\n if (track.info.codec === 'aac') {\n // Let's try to deduce more accurate values directly from the AudioSpecificConfig:\n const audioSpecificConfig = parseAacAudioSpecificConfig(track.info.codecDescription);\n if (audioSpecificConfig.numberOfChannels !== null) {\n track.info.numberOfChannels = audioSpecificConfig.numberOfChannels;\n }\n if (audioSpecificConfig.sampleRate !== null) {\n track.info.sampleRate = audioSpecificConfig.sampleRate;\n }\n }\n }\n }\n ;\n break;\n case 'enda':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n const littleEndian = readU16Be(slice) & 0xff; // 0xff is from FFmpeg\n if (littleEndian) {\n if (track.info.codec === 'pcm-s16be') {\n track.info.codec = 'pcm-s16';\n }\n else if (track.info.codec === 'pcm-s24be') {\n track.info.codec = 'pcm-s24';\n }\n else if (track.info.codec === 'pcm-s32be') {\n track.info.codec = 'pcm-s32';\n }\n else if (track.info.codec === 'pcm-f32be') {\n track.info.codec = 'pcm-f32';\n }\n else if (track.info.codec === 'pcm-f64be') {\n track.info.codec = 'pcm-f64';\n }\n }\n }\n ;\n break;\n case 'pcmC':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n slice.skip(1 + 3); // Version + flags\n // ISO/IEC 23003-5\n const formatFlags = readU8(slice);\n const isLittleEndian = Boolean(formatFlags & 0x01);\n const pcmSampleSize = readU8(slice);\n if (track.info.codec === 'pcm-s16be') {\n // ipcm\n if (isLittleEndian) {\n if (pcmSampleSize === 16) {\n track.info.codec = 'pcm-s16';\n }\n else if (pcmSampleSize === 24) {\n track.info.codec = 'pcm-s24';\n }\n else if (pcmSampleSize === 32) {\n track.info.codec = 'pcm-s32';\n }\n else {\n console.warn(`Invalid ipcm sample size ${pcmSampleSize}.`);\n track.info.codec = null;\n }\n }\n else {\n if (pcmSampleSize === 16) {\n track.info.codec = 'pcm-s16be';\n }\n else if (pcmSampleSize === 24) {\n track.info.codec = 'pcm-s24be';\n }\n else if (pcmSampleSize === 32) {\n track.info.codec = 'pcm-s32be';\n }\n else {\n console.warn(`Invalid ipcm sample size ${pcmSampleSize}.`);\n track.info.codec = null;\n }\n }\n }\n else if (track.info.codec === 'pcm-f32be') {\n // fpcm\n if (isLittleEndian) {\n if (pcmSampleSize === 32) {\n track.info.codec = 'pcm-f32';\n }\n else if (pcmSampleSize === 64) {\n track.info.codec = 'pcm-f64';\n }\n else {\n console.warn(`Invalid fpcm sample size ${pcmSampleSize}.`);\n track.info.codec = null;\n }\n }\n else {\n if (pcmSampleSize === 32) {\n track.info.codec = 'pcm-f32be';\n }\n else if (pcmSampleSize === 64) {\n track.info.codec = 'pcm-f64be';\n }\n else {\n console.warn(`Invalid fpcm sample size ${pcmSampleSize}.`);\n track.info.codec = null;\n }\n }\n }\n break;\n }\n ;\n case 'dOps':\n { // Used for Opus audio\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n slice.skip(1); // Version\n // https://www.opus-codec.org/docs/opus_in_isobmff.html\n const outputChannelCount = readU8(slice);\n const preSkip = readU16Be(slice);\n const inputSampleRate = readU32Be(slice);\n const outputGain = readI16Be(slice);\n const channelMappingFamily = readU8(slice);\n let channelMappingTable;\n if (channelMappingFamily !== 0) {\n channelMappingTable = readBytes(slice, 2 + outputChannelCount);\n }\n else {\n channelMappingTable = new Uint8Array(0);\n }\n // https://datatracker.ietf.org/doc/html/draft-ietf-codec-oggopus-06\n const description = new Uint8Array(8 + 1 + 1 + 2 + 4 + 2 + 1 + channelMappingTable.byteLength);\n const view = new DataView(description.buffer);\n view.setUint32(0, 0x4f707573, false); // 'Opus'\n view.setUint32(4, 0x48656164, false); // 'Head'\n view.setUint8(8, 1); // Version\n view.setUint8(9, outputChannelCount);\n view.setUint16(10, preSkip, true);\n view.setUint32(12, inputSampleRate, true);\n view.setInt16(16, outputGain, true);\n view.setUint8(18, channelMappingFamily);\n description.set(channelMappingTable, 19);\n track.info.codecDescription = description;\n track.info.numberOfChannels = outputChannelCount;\n // Don't copy the input sample rate, irrelevant, and output sample rate is fixed\n }\n ;\n break;\n case 'dfLa':\n { // Used for FLAC audio\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n slice.skip(4); // Version + flags\n // https://datatracker.ietf.org/doc/rfc9639/\n const BLOCK_TYPE_MASK = 0x7f;\n const LAST_METADATA_BLOCK_FLAG_MASK = 0x80;\n const startPos = slice.filePos;\n while (slice.filePos < boxEndPos) {\n const flagAndType = readU8(slice);\n const metadataBlockLength = readU24Be(slice);\n const type = flagAndType & BLOCK_TYPE_MASK;\n // It's a STREAMINFO block; let's extract the actual sample rate and channel count\n if (type === FlacBlockType.STREAMINFO) {\n slice.skip(10);\n // Extract sample rate and channel count\n const word = readU32Be(slice);\n const sampleRate = word >>> 12;\n const numberOfChannels = ((word >> 9) & 0b111) + 1;\n track.info.sampleRate = sampleRate;\n track.info.numberOfChannels = numberOfChannels;\n slice.skip(20);\n }\n else {\n // Simply skip ahead to the next block\n slice.skip(metadataBlockLength);\n }\n if (flagAndType & LAST_METADATA_BLOCK_FLAG_MASK) {\n break;\n }\n }\n const endPos = slice.filePos;\n slice.filePos = startPos;\n const bytes = readBytes(slice, endPos - startPos);\n const description = new Uint8Array(4 + bytes.byteLength);\n const view = new DataView(description.buffer);\n view.setUint32(0, 0x664c6143, false); // 'fLaC'\n description.set(bytes, 4);\n // Set the codec description to be 'fLaC' + all metadata blocks\n track.info.codecDescription = description;\n }\n ;\n break;\n case 'dac3':\n { // AC3SpecificBox\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n const bytes = readBytes(slice, 3);\n const bitstream = new Bitstream(bytes);\n const fscod = bitstream.readBits(2);\n bitstream.skipBits(5 + 3); // Skip bsid and bsmod\n const acmod = bitstream.readBits(3);\n const lfeon = bitstream.readBits(1);\n if (fscod < 3) {\n track.info.sampleRate = AC3_SAMPLE_RATES[fscod];\n }\n track.info.numberOfChannels = AC3_ACMOD_CHANNEL_COUNTS[acmod] + lfeon;\n }\n ;\n break;\n case 'dec3':\n { // EC3SpecificBox\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.info?.type === 'audio');\n const bytes = readBytes(slice, boxInfo.contentSize);\n const config = parseEac3Config(bytes);\n if (!config) {\n console.warn('Invalid dec3 box contents, ignoring.');\n break;\n }\n const sampleRate = getEac3SampleRate(config);\n if (sampleRate !== null) {\n track.info.sampleRate = sampleRate;\n }\n track.info.numberOfChannels = getEac3ChannelCount(config);\n }\n ;\n break;\n case 'stts':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n const entryCount = readU32Be(slice);\n let currentIndex = 0;\n let currentTimestamp = 0;\n for (let i = 0; i < entryCount; i++) {\n const sampleCount = readU32Be(slice);\n const sampleDelta = readU32Be(slice);\n track.sampleTable.sampleTimingEntries.push({\n startIndex: currentIndex,\n startDecodeTimestamp: currentTimestamp,\n count: sampleCount,\n delta: sampleDelta,\n });\n currentIndex += sampleCount;\n currentTimestamp += sampleCount * sampleDelta;\n }\n }\n ;\n break;\n case 'ctts':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(1 + 3); // Version + flags\n const entryCount = readU32Be(slice);\n let sampleIndex = 0;\n for (let i = 0; i < entryCount; i++) {\n const sampleCount = readU32Be(slice);\n const sampleOffset = readI32Be(slice);\n track.sampleTable.sampleCompositionTimeOffsets.push({\n startIndex: sampleIndex,\n count: sampleCount,\n offset: sampleOffset,\n });\n sampleIndex += sampleCount;\n }\n }\n ;\n break;\n case 'stsz':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n const sampleSize = readU32Be(slice);\n const sampleCount = readU32Be(slice);\n if (sampleSize === 0) {\n for (let i = 0; i < sampleCount; i++) {\n const sampleSize = readU32Be(slice);\n track.sampleTable.sampleSizes.push(sampleSize);\n }\n }\n else {\n track.sampleTable.sampleSizes.push(sampleSize);\n }\n }\n ;\n break;\n case 'stz2':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n slice.skip(3); // Reserved\n const fieldSize = readU8(slice); // in bits\n const sampleCount = readU32Be(slice);\n const bytes = readBytes(slice, Math.ceil(sampleCount * fieldSize / 8));\n const bitstream = new Bitstream(bytes);\n for (let i = 0; i < sampleCount; i++) {\n const sampleSize = bitstream.readBits(fieldSize);\n track.sampleTable.sampleSizes.push(sampleSize);\n }\n }\n ;\n break;\n case 'stss':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n track.sampleTable.keySampleIndices = [];\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const sampleIndex = readU32Be(slice) - 1; // Convert to 0-indexed\n track.sampleTable.keySampleIndices.push(sampleIndex);\n }\n if (track.sampleTable.keySampleIndices[0] !== 0) {\n // Some files don't mark the first sample a key sample, which is basically almost always incorrect.\n // Here, we correct for that mistake:\n track.sampleTable.keySampleIndices.unshift(0);\n }\n }\n ;\n break;\n case 'stsc':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4);\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const startChunkIndex = readU32Be(slice) - 1; // Convert to 0-indexed\n const samplesPerChunk = readU32Be(slice);\n const sampleDescriptionIndex = readU32Be(slice);\n track.sampleTable.sampleToChunk.push({\n startSampleIndex: -1,\n startChunkIndex,\n samplesPerChunk,\n sampleDescriptionIndex,\n });\n }\n let startSampleIndex = 0;\n for (let i = 0; i < track.sampleTable.sampleToChunk.length; i++) {\n track.sampleTable.sampleToChunk[i].startSampleIndex = startSampleIndex;\n if (i < track.sampleTable.sampleToChunk.length - 1) {\n const nextChunk = track.sampleTable.sampleToChunk[i + 1];\n const chunkCount = nextChunk.startChunkIndex\n - track.sampleTable.sampleToChunk[i].startChunkIndex;\n startSampleIndex += chunkCount * track.sampleTable.sampleToChunk[i].samplesPerChunk;\n }\n }\n }\n ;\n break;\n case 'stco':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const chunkOffset = readU32Be(slice);\n track.sampleTable.chunkOffsets.push(chunkOffset);\n }\n }\n ;\n break;\n case 'co64':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n if (!track.sampleTable) {\n break;\n }\n slice.skip(4); // Version + flags\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const chunkOffset = readU64Be(slice);\n track.sampleTable.chunkOffsets.push(chunkOffset);\n }\n }\n ;\n break;\n case 'mvex':\n {\n this.isFragmented = true;\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n }\n ;\n break;\n case 'mehd':\n {\n const version = readU8(slice);\n slice.skip(3); // Flags\n const fragmentDuration = version === 1 ? readU64Be(slice) : readU32Be(slice);\n this.movieDurationInTimescale = fragmentDuration;\n }\n ;\n break;\n case 'trex':\n {\n slice.skip(4); // Version + flags\n const trackId = readU32Be(slice);\n const defaultSampleDescriptionIndex = readU32Be(slice);\n const defaultSampleDuration = readU32Be(slice);\n const defaultSampleSize = readU32Be(slice);\n const defaultSampleFlags = readU32Be(slice);\n // We store these separately rather than in the tracks since the tracks may not exist yet\n this.fragmentTrackDefaults.push({\n trackId,\n defaultSampleDescriptionIndex,\n defaultSampleDuration,\n defaultSampleSize,\n defaultSampleFlags,\n });\n }\n ;\n break;\n case 'tfra':\n {\n const version = readU8(slice);\n slice.skip(3); // Flags\n const trackId = readU32Be(slice);\n const track = this.tracks.find(x => x.id === trackId);\n if (!track) {\n break;\n }\n const word = readU32Be(slice);\n const lengthSizeOfTrafNum = (word & 0b110000) >> 4;\n const lengthSizeOfTrunNum = (word & 0b001100) >> 2;\n const lengthSizeOfSampleNum = word & 0b000011;\n const functions = [readU8, readU16Be, readU24Be, readU32Be];\n const readTrafNum = functions[lengthSizeOfTrafNum];\n const readTrunNum = functions[lengthSizeOfTrunNum];\n const readSampleNum = functions[lengthSizeOfSampleNum];\n const numberOfEntries = readU32Be(slice);\n for (let i = 0; i < numberOfEntries; i++) {\n const time = version === 1 ? readU64Be(slice) : readU32Be(slice);\n const moofOffset = version === 1 ? readU64Be(slice) : readU32Be(slice);\n readTrafNum(slice);\n readTrunNum(slice);\n readSampleNum(slice);\n track.fragmentLookupTable.push({\n timestamp: time,\n moofOffset,\n });\n }\n // Sort by timestamp in case it's not naturally sorted\n track.fragmentLookupTable.sort((a, b) => a.timestamp - b.timestamp);\n // Remove multiple entries for the same time\n for (let i = 0; i < track.fragmentLookupTable.length - 1; i++) {\n const entry1 = track.fragmentLookupTable[i];\n const entry2 = track.fragmentLookupTable[i + 1];\n if (entry1.timestamp === entry2.timestamp) {\n track.fragmentLookupTable.splice(i + 1, 1);\n i--;\n }\n }\n }\n ;\n break;\n case 'moof':\n {\n this.currentFragment = {\n moofOffset: startPos,\n moofSize: boxInfo.totalSize,\n implicitBaseDataOffset: startPos,\n trackData: new Map(),\n };\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n this.lastReadFragment = this.currentFragment;\n this.currentFragment = null;\n }\n ;\n break;\n case 'traf':\n {\n assert(this.currentFragment);\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n // It is possible that there is no current track, for example when we don't care about the track\n // referenced in the track fragment header.\n if (this.currentTrack) {\n const trackData = this.currentFragment.trackData.get(this.currentTrack.id);\n if (trackData) {\n const { currentFragmentState } = this.currentTrack;\n assert(currentFragmentState);\n if (currentFragmentState.startTimestamp !== null) {\n offsetFragmentTrackDataByTimestamp(trackData, currentFragmentState.startTimestamp);\n trackData.startTimestampIsFinal = true;\n }\n }\n this.currentTrack.currentFragmentState = null;\n this.currentTrack = null;\n }\n }\n ;\n break;\n case 'tfhd':\n {\n assert(this.currentFragment);\n slice.skip(1); // Version\n const flags = readU24Be(slice);\n const baseDataOffsetPresent = Boolean(flags & 0x000001);\n const sampleDescriptionIndexPresent = Boolean(flags & 0x000002);\n const defaultSampleDurationPresent = Boolean(flags & 0x000008);\n const defaultSampleSizePresent = Boolean(flags & 0x000010);\n const defaultSampleFlagsPresent = Boolean(flags & 0x000020);\n const durationIsEmpty = Boolean(flags & 0x010000);\n const defaultBaseIsMoof = Boolean(flags & 0x020000);\n const trackId = readU32Be(slice);\n const track = this.tracks.find(x => x.id === trackId);\n if (!track) {\n // We don't care about this track\n break;\n }\n const defaults = this.fragmentTrackDefaults.find(x => x.trackId === trackId);\n this.currentTrack = track;\n track.currentFragmentState = {\n baseDataOffset: this.currentFragment.implicitBaseDataOffset,\n sampleDescriptionIndex: defaults?.defaultSampleDescriptionIndex ?? null,\n defaultSampleDuration: defaults?.defaultSampleDuration ?? null,\n defaultSampleSize: defaults?.defaultSampleSize ?? null,\n defaultSampleFlags: defaults?.defaultSampleFlags ?? null,\n startTimestamp: null,\n };\n if (baseDataOffsetPresent) {\n track.currentFragmentState.baseDataOffset = readU64Be(slice);\n }\n else if (defaultBaseIsMoof) {\n track.currentFragmentState.baseDataOffset = this.currentFragment.moofOffset;\n }\n if (sampleDescriptionIndexPresent) {\n track.currentFragmentState.sampleDescriptionIndex = readU32Be(slice);\n }\n if (defaultSampleDurationPresent) {\n track.currentFragmentState.defaultSampleDuration = readU32Be(slice);\n }\n if (defaultSampleSizePresent) {\n track.currentFragmentState.defaultSampleSize = readU32Be(slice);\n }\n if (defaultSampleFlagsPresent) {\n track.currentFragmentState.defaultSampleFlags = readU32Be(slice);\n }\n if (durationIsEmpty) {\n track.currentFragmentState.defaultSampleDuration = 0;\n }\n }\n ;\n break;\n case 'tfdt':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(track.currentFragmentState);\n const version = readU8(slice);\n slice.skip(3); // Flags\n const baseMediaDecodeTime = version === 0 ? readU32Be(slice) : readU64Be(slice);\n track.currentFragmentState.startTimestamp = baseMediaDecodeTime;\n }\n ;\n break;\n case 'trun':\n {\n const track = this.currentTrack;\n if (!track) {\n break;\n }\n assert(this.currentFragment);\n assert(track.currentFragmentState);\n if (this.currentFragment.trackData.has(track.id)) {\n console.warn('Can\\'t have two trun boxes for the same track in one fragment. Ignoring...');\n break;\n }\n const version = readU8(slice);\n const flags = readU24Be(slice);\n const dataOffsetPresent = Boolean(flags & 0x000001);\n const firstSampleFlagsPresent = Boolean(flags & 0x000004);\n const sampleDurationPresent = Boolean(flags & 0x000100);\n const sampleSizePresent = Boolean(flags & 0x000200);\n const sampleFlagsPresent = Boolean(flags & 0x000400);\n const sampleCompositionTimeOffsetsPresent = Boolean(flags & 0x000800);\n const sampleCount = readU32Be(slice);\n let dataOffset = track.currentFragmentState.baseDataOffset;\n if (dataOffsetPresent) {\n dataOffset += readI32Be(slice);\n }\n let firstSampleFlags = null;\n if (firstSampleFlagsPresent) {\n firstSampleFlags = readU32Be(slice);\n }\n let currentOffset = dataOffset;\n if (sampleCount === 0) {\n // Don't associate the fragment with the track if it has no samples, this simplifies other code\n this.currentFragment.implicitBaseDataOffset = currentOffset;\n break;\n }\n let currentTimestamp = 0;\n const trackData = {\n track,\n startTimestamp: 0,\n endTimestamp: 0,\n firstKeyFrameTimestamp: null,\n samples: [],\n presentationTimestamps: [],\n startTimestampIsFinal: false,\n };\n this.currentFragment.trackData.set(track.id, trackData);\n for (let i = 0; i < sampleCount; i++) {\n let sampleDuration;\n if (sampleDurationPresent) {\n sampleDuration = readU32Be(slice);\n }\n else {\n assert(track.currentFragmentState.defaultSampleDuration !== null);\n sampleDuration = track.currentFragmentState.defaultSampleDuration;\n }\n let sampleSize;\n if (sampleSizePresent) {\n sampleSize = readU32Be(slice);\n }\n else {\n assert(track.currentFragmentState.defaultSampleSize !== null);\n sampleSize = track.currentFragmentState.defaultSampleSize;\n }\n let sampleFlags;\n if (sampleFlagsPresent) {\n sampleFlags = readU32Be(slice);\n }\n else {\n assert(track.currentFragmentState.defaultSampleFlags !== null);\n sampleFlags = track.currentFragmentState.defaultSampleFlags;\n }\n if (i === 0 && firstSampleFlags !== null) {\n sampleFlags = firstSampleFlags;\n }\n let sampleCompositionTimeOffset = 0;\n if (sampleCompositionTimeOffsetsPresent) {\n if (version === 0) {\n sampleCompositionTimeOffset = readU32Be(slice);\n }\n else {\n sampleCompositionTimeOffset = readI32Be(slice);\n }\n }\n const isKeyFrame = !(sampleFlags & 0x00010000);\n trackData.samples.push({\n presentationTimestamp: currentTimestamp + sampleCompositionTimeOffset,\n duration: sampleDuration,\n byteOffset: currentOffset,\n byteSize: sampleSize,\n isKeyFrame,\n });\n currentOffset += sampleSize;\n currentTimestamp += sampleDuration;\n }\n trackData.presentationTimestamps = trackData.samples\n .map((x, i) => ({ presentationTimestamp: x.presentationTimestamp, sampleIndex: i }))\n .sort((a, b) => a.presentationTimestamp - b.presentationTimestamp);\n for (let i = 0; i < trackData.presentationTimestamps.length; i++) {\n const currentEntry = trackData.presentationTimestamps[i];\n const currentSample = trackData.samples[currentEntry.sampleIndex];\n if (trackData.firstKeyFrameTimestamp === null && currentSample.isKeyFrame) {\n trackData.firstKeyFrameTimestamp = currentSample.presentationTimestamp;\n }\n if (i < trackData.presentationTimestamps.length - 1) {\n // Update sample durations based on presentation order\n const nextEntry = trackData.presentationTimestamps[i + 1];\n currentSample.duration = nextEntry.presentationTimestamp - currentEntry.presentationTimestamp;\n }\n }\n const firstSample = trackData.samples[trackData.presentationTimestamps[0].sampleIndex];\n const lastSample = trackData.samples[last(trackData.presentationTimestamps).sampleIndex];\n trackData.startTimestamp = firstSample.presentationTimestamp;\n trackData.endTimestamp = lastSample.presentationTimestamp + lastSample.duration;\n this.currentFragment.implicitBaseDataOffset = currentOffset;\n }\n ;\n break;\n // Metadata section\n // https://exiftool.org/TagNames/QuickTime.html\n // https://mp4workshop.com/about\n case 'udta':\n { // Contains either movie metadata or track metadata\n const iterator = this.iterateContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n for (const { boxInfo, slice } of iterator) {\n if (boxInfo.name !== 'meta' && !this.currentTrack) {\n const startPos = slice.filePos;\n this.metadataTags.raw ??= {};\n if (boxInfo.name[0] === '©') {\n // https://mp4workshop.com/about\n // Box name starting with © indicates \"international text\"\n this.metadataTags.raw[boxInfo.name] ??= readMetadataStringShort(slice);\n }\n else {\n this.metadataTags.raw[boxInfo.name] ??= readBytes(slice, boxInfo.contentSize);\n }\n slice.filePos = startPos;\n }\n switch (boxInfo.name) {\n case 'meta':\n {\n slice.skip(-boxInfo.headerSize);\n this.traverseBox(slice);\n }\n ;\n break;\n case '©nam':\n case 'name':\n {\n if (this.currentTrack) {\n this.currentTrack.name = textDecoder.decode(readBytes(slice, boxInfo.contentSize));\n }\n else {\n this.metadataTags.title ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©des':\n {\n if (!this.currentTrack) {\n this.metadataTags.description ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©ART':\n {\n if (!this.currentTrack) {\n this.metadataTags.artist ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©alb':\n {\n if (!this.currentTrack) {\n this.metadataTags.album ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case 'albr':\n {\n if (!this.currentTrack) {\n this.metadataTags.albumArtist ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©gen':\n {\n if (!this.currentTrack) {\n this.metadataTags.genre ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©day':\n {\n if (!this.currentTrack) {\n const date = new Date(readMetadataStringShort(slice));\n if (!Number.isNaN(date.getTime())) {\n this.metadataTags.date ??= date;\n }\n }\n }\n ;\n break;\n case '©cmt':\n {\n if (!this.currentTrack) {\n this.metadataTags.comment ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n case '©lyr':\n {\n if (!this.currentTrack) {\n this.metadataTags.lyrics ??= readMetadataStringShort(slice);\n }\n }\n ;\n break;\n }\n }\n }\n ;\n break;\n case 'meta':\n {\n if (this.currentTrack) {\n break; // Only care about movie-level metadata for now\n }\n // The 'meta' box comes in two flavors, one with flags/version and one without. To know which is which,\n // let's read the next 4 bytes, which are either the version or the size of the first subbox.\n const word = readU32Be(slice);\n const isQuickTime = word !== 0;\n this.currentMetadataKeys = new Map();\n if (isQuickTime) {\n this.readContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n }\n else {\n this.readContiguousBoxes(slice.slice(contentStartPos + 4, boxInfo.contentSize - 4));\n }\n this.currentMetadataKeys = null;\n }\n ;\n break;\n case 'keys':\n {\n if (!this.currentMetadataKeys) {\n break;\n }\n slice.skip(4); // Version + flags\n const entryCount = readU32Be(slice);\n for (let i = 0; i < entryCount; i++) {\n const keySize = readU32Be(slice);\n slice.skip(4); // Key namespace\n const keyName = textDecoder.decode(readBytes(slice, keySize - 8));\n this.currentMetadataKeys.set(i + 1, keyName);\n }\n }\n ;\n break;\n case 'ilst':\n {\n if (!this.currentMetadataKeys) {\n break;\n }\n const iterator = this.iterateContiguousBoxes(slice.slice(contentStartPos, boxInfo.contentSize));\n for (const { boxInfo, slice } of iterator) {\n let metadataKey = boxInfo.name;\n // Interpret the box name as a u32be\n const nameAsNumber = (metadataKey.charCodeAt(0) << 24)\n + (metadataKey.charCodeAt(1) << 16)\n + (metadataKey.charCodeAt(2) << 8)\n + metadataKey.charCodeAt(3);\n if (this.currentMetadataKeys.has(nameAsNumber)) {\n // An entry exists for this number\n metadataKey = this.currentMetadataKeys.get(nameAsNumber);\n }\n const data = readDataBox(slice);\n this.metadataTags.raw ??= {};\n this.metadataTags.raw[metadataKey] ??= data;\n switch (metadataKey) {\n case '©nam':\n case 'titl':\n case 'com.apple.quicktime.title':\n case 'title':\n {\n if (typeof data === 'string') {\n this.metadataTags.title ??= data;\n }\n }\n ;\n break;\n case '©des':\n case 'desc':\n case 'dscp':\n case 'com.apple.quicktime.description':\n case 'description':\n {\n if (typeof data === 'string') {\n this.metadataTags.description ??= data;\n }\n }\n ;\n break;\n case '©ART':\n case 'com.apple.quicktime.artist':\n case 'artist':\n {\n if (typeof data === 'string') {\n this.metadataTags.artist ??= data;\n }\n }\n ;\n break;\n case '©alb':\n case 'albm':\n case 'com.apple.quicktime.album':\n case 'album':\n {\n if (typeof data === 'string') {\n this.metadataTags.album ??= data;\n }\n }\n ;\n break;\n case 'aART':\n case 'album_artist':\n {\n if (typeof data === 'string') {\n this.metadataTags.albumArtist ??= data;\n }\n }\n ;\n break;\n case '©cmt':\n case 'com.apple.quicktime.comment':\n case 'comment':\n {\n if (typeof data === 'string') {\n this.metadataTags.comment ??= data;\n }\n }\n ;\n break;\n case '©gen':\n case 'gnre':\n case 'com.apple.quicktime.genre':\n case 'genre':\n {\n if (typeof data === 'string') {\n this.metadataTags.genre ??= data;\n }\n }\n ;\n break;\n case '©lyr':\n case 'lyrics':\n {\n if (typeof data === 'string') {\n this.metadataTags.lyrics ??= data;\n }\n }\n ;\n break;\n case '©day':\n case 'rldt':\n case 'com.apple.quicktime.creationdate':\n case 'date':\n {\n if (typeof data === 'string') {\n const date = new Date(data);\n if (!Number.isNaN(date.getTime())) {\n this.metadataTags.date ??= date;\n }\n }\n }\n ;\n break;\n case 'covr':\n case 'com.apple.quicktime.artwork':\n {\n if (data instanceof RichImageData) {\n this.metadataTags.images ??= [];\n this.metadataTags.images.push({\n data: data.data,\n kind: 'coverFront',\n mimeType: data.mimeType,\n });\n }\n else if (data instanceof Uint8Array) {\n this.metadataTags.images ??= [];\n this.metadataTags.images.push({\n data,\n kind: 'coverFront',\n mimeType: 'image/*',\n });\n }\n }\n ;\n break;\n case 'track':\n {\n if (typeof data === 'string') {\n const parts = data.split('/');\n const trackNum = Number.parseInt(parts[0], 10);\n const tracksTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(trackNum) && trackNum > 0) {\n this.metadataTags.trackNumber ??= trackNum;\n }\n if (tracksTotal && Number.isInteger(tracksTotal) && tracksTotal > 0) {\n this.metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n }\n ;\n break;\n case 'trkn':\n {\n if (data instanceof Uint8Array && data.length >= 6) {\n const view = toDataView(data);\n const trackNumber = view.getUint16(2, false);\n const tracksTotal = view.getUint16(4, false);\n if (trackNumber > 0) {\n this.metadataTags.trackNumber ??= trackNumber;\n }\n if (tracksTotal > 0) {\n this.metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n }\n ;\n break;\n case 'disc':\n case 'disk':\n {\n if (data instanceof Uint8Array && data.length >= 6) {\n const view = toDataView(data);\n const discNumber = view.getUint16(2, false);\n const discNumberMax = view.getUint16(4, false);\n if (discNumber > 0) {\n this.metadataTags.discNumber ??= discNumber;\n }\n if (discNumberMax > 0) {\n this.metadataTags.discsTotal ??= discNumberMax;\n }\n }\n }\n ;\n break;\n }\n }\n }\n ;\n break;\n }\n slice.filePos = boxEndPos;\n return true;\n }\n}\nclass IsobmffTrackBacking {\n constructor(internalTrack) {\n this.internalTrack = internalTrack;\n this.packetToSampleIndex = new WeakMap();\n this.packetToFragmentLocation = new WeakMap();\n }\n getId() {\n return this.internalTrack.id;\n }\n getNumber() {\n const demuxer = this.internalTrack.demuxer;\n const inputTrack = this.internalTrack.inputTrack;\n const trackType = inputTrack.type;\n let number = 0;\n for (const track of demuxer.tracks) {\n if (track.inputTrack.type === trackType) {\n number++;\n }\n if (track === this.internalTrack) {\n break;\n }\n }\n return number;\n }\n getCodec() {\n throw new Error('Not implemented on base class.');\n }\n getInternalCodecId() {\n return this.internalTrack.internalCodecId;\n }\n getName() {\n return this.internalTrack.name;\n }\n getLanguageCode() {\n return this.internalTrack.languageCode;\n }\n getTimeResolution() {\n return this.internalTrack.timescale;\n }\n getDisposition() {\n return this.internalTrack.disposition;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n async getFirstTimestamp() {\n const firstPacket = await this.getFirstPacket({ metadataOnly: true });\n return firstPacket?.timestamp ?? 0;\n }\n async getFirstPacket(options) {\n const regularPacket = await this.fetchPacketForSampleIndex(0, options);\n if (regularPacket || !this.internalTrack.demuxer.isFragmented) {\n // If there's a non-fragmented packet, always prefer that\n return regularPacket;\n }\n return this.performFragmentedLookup(null, (fragment) => {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (trackData) {\n return {\n sampleIndex: 0,\n correctSampleFound: true,\n };\n }\n return {\n sampleIndex: -1,\n correctSampleFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the lookup entries\n Infinity, options);\n }\n mapTimestampIntoTimescale(timestamp) {\n // Do a little rounding to catch cases where the result is very close to an integer. If it is, it's likely\n // that the number was originally an integer divided by the timescale. For stability, it's best\n // to return the integer in this case.\n return roundIfAlmostInteger(timestamp * this.internalTrack.timescale) + this.internalTrack.editListOffset;\n }\n async getPacket(timestamp, options) {\n const timestampInTimescale = this.mapTimestampIntoTimescale(timestamp);\n const sampleTable = this.internalTrack.demuxer.getSampleTableForTrack(this.internalTrack);\n const sampleIndex = getSampleIndexForTimestamp(sampleTable, timestampInTimescale);\n const regularPacket = await this.fetchPacketForSampleIndex(sampleIndex, options);\n if (!sampleTableIsEmpty(sampleTable) || !this.internalTrack.demuxer.isFragmented) {\n // Prefer the non-fragmented packet\n return regularPacket;\n }\n return this.performFragmentedLookup(null, (fragment) => {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (!trackData) {\n return { sampleIndex: -1, correctSampleFound: false };\n }\n const index = binarySearchLessOrEqual(trackData.presentationTimestamps, timestampInTimescale, x => x.presentationTimestamp);\n const sampleIndex = index !== -1 ? trackData.presentationTimestamps[index].sampleIndex : -1;\n const correctSampleFound = index !== -1 && timestampInTimescale < trackData.endTimestamp;\n return { sampleIndex, correctSampleFound };\n }, timestampInTimescale, timestampInTimescale, options);\n }\n async getNextPacket(packet, options) {\n const regularSampleIndex = this.packetToSampleIndex.get(packet);\n if (regularSampleIndex !== undefined) {\n // Prefer the non-fragmented packet\n return this.fetchPacketForSampleIndex(regularSampleIndex + 1, options);\n }\n const locationInFragment = this.packetToFragmentLocation.get(packet);\n if (locationInFragment === undefined) {\n throw new Error('Packet was not created from this track.');\n }\n return this.performFragmentedLookup(locationInFragment.fragment, (fragment) => {\n if (fragment === locationInFragment.fragment) {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (locationInFragment.sampleIndex + 1 < trackData.samples.length) {\n // We can simply take the next sample in the fragment\n return {\n sampleIndex: locationInFragment.sampleIndex + 1,\n correctSampleFound: true,\n };\n }\n }\n else {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (trackData) {\n return {\n sampleIndex: 0,\n correctSampleFound: true,\n };\n }\n }\n return {\n sampleIndex: -1,\n correctSampleFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the lookup entries\n Infinity, options);\n }\n async getKeyPacket(timestamp, options) {\n const timestampInTimescale = this.mapTimestampIntoTimescale(timestamp);\n const sampleTable = this.internalTrack.demuxer.getSampleTableForTrack(this.internalTrack);\n const sampleIndex = getKeyframeSampleIndexForTimestamp(sampleTable, timestampInTimescale);\n const regularPacket = await this.fetchPacketForSampleIndex(sampleIndex, options);\n if (!sampleTableIsEmpty(sampleTable) || !this.internalTrack.demuxer.isFragmented) {\n // Prefer the non-fragmented packet\n return regularPacket;\n }\n return this.performFragmentedLookup(null, (fragment) => {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (!trackData) {\n return { sampleIndex: -1, correctSampleFound: false };\n }\n const index = findLastIndex(trackData.presentationTimestamps, (x) => {\n const sample = trackData.samples[x.sampleIndex];\n return sample.isKeyFrame && x.presentationTimestamp <= timestampInTimescale;\n });\n const sampleIndex = index !== -1 ? trackData.presentationTimestamps[index].sampleIndex : -1;\n const correctSampleFound = index !== -1 && timestampInTimescale < trackData.endTimestamp;\n return { sampleIndex, correctSampleFound };\n }, timestampInTimescale, timestampInTimescale, options);\n }\n async getNextKeyPacket(packet, options) {\n const regularSampleIndex = this.packetToSampleIndex.get(packet);\n if (regularSampleIndex !== undefined) {\n // Prefer the non-fragmented packet\n const sampleTable = this.internalTrack.demuxer.getSampleTableForTrack(this.internalTrack);\n const nextKeyFrameSampleIndex = getNextKeyframeIndexForSample(sampleTable, regularSampleIndex);\n return this.fetchPacketForSampleIndex(nextKeyFrameSampleIndex, options);\n }\n const locationInFragment = this.packetToFragmentLocation.get(packet);\n if (locationInFragment === undefined) {\n throw new Error('Packet was not created from this track.');\n }\n return this.performFragmentedLookup(locationInFragment.fragment, (fragment) => {\n if (fragment === locationInFragment.fragment) {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n const nextKeyFrameIndex = trackData.samples.findIndex((x, i) => x.isKeyFrame && i > locationInFragment.sampleIndex);\n if (nextKeyFrameIndex !== -1) {\n // We can simply take the next key frame in the fragment\n return {\n sampleIndex: nextKeyFrameIndex,\n correctSampleFound: true,\n };\n }\n }\n else {\n const trackData = fragment.trackData.get(this.internalTrack.id);\n if (trackData && trackData.firstKeyFrameTimestamp !== null) {\n const keyFrameIndex = trackData.samples.findIndex(x => x.isKeyFrame);\n assert(keyFrameIndex !== -1); // There must be one\n return {\n sampleIndex: keyFrameIndex,\n correctSampleFound: true,\n };\n }\n }\n return {\n sampleIndex: -1,\n correctSampleFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the lookup entries\n Infinity, options);\n }\n async fetchPacketForSampleIndex(sampleIndex, options) {\n if (sampleIndex === -1) {\n return null;\n }\n const sampleTable = this.internalTrack.demuxer.getSampleTableForTrack(this.internalTrack);\n const sampleInfo = getSampleInfo(sampleTable, sampleIndex);\n if (!sampleInfo) {\n return null;\n }\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.internalTrack.demuxer.reader.requestSlice(sampleInfo.sampleOffset, sampleInfo.sampleSize);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n data = readBytes(slice, sampleInfo.sampleSize);\n }\n const timestamp = (sampleInfo.presentationTimestamp - this.internalTrack.editListOffset)\n / this.internalTrack.timescale;\n const duration = sampleInfo.duration / this.internalTrack.timescale;\n const packet = new EncodedPacket(data, sampleInfo.isKeyFrame ? 'key' : 'delta', timestamp, duration, sampleIndex, sampleInfo.sampleSize);\n this.packetToSampleIndex.set(packet, sampleIndex);\n return packet;\n }\n async fetchPacketInFragment(fragment, sampleIndex, options) {\n if (sampleIndex === -1) {\n return null;\n }\n const trackData = fragment.trackData.get(this.internalTrack.id);\n const fragmentSample = trackData.samples[sampleIndex];\n assert(fragmentSample);\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.internalTrack.demuxer.reader.requestSlice(fragmentSample.byteOffset, fragmentSample.byteSize);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n data = readBytes(slice, fragmentSample.byteSize);\n }\n const timestamp = (fragmentSample.presentationTimestamp - this.internalTrack.editListOffset)\n / this.internalTrack.timescale;\n const duration = fragmentSample.duration / this.internalTrack.timescale;\n const packet = new EncodedPacket(data, fragmentSample.isKeyFrame ? 'key' : 'delta', timestamp, duration, fragment.moofOffset + sampleIndex, fragmentSample.byteSize);\n this.packetToFragmentLocation.set(packet, { fragment, sampleIndex });\n return packet;\n }\n /** Looks for a packet in the fragments while trying to load as few fragments as possible to retrieve it. */\n async performFragmentedLookup(\n // The fragment where we start looking\n startFragment, \n // This function returns the best-matching sample in a given fragment\n getMatchInFragment, \n // The timestamp with which we can search the lookup table\n searchTimestamp, \n // The timestamp for which we know the correct sample will not come after it\n latestTimestamp, options) {\n const demuxer = this.internalTrack.demuxer;\n let currentFragment = null;\n let bestFragment = null;\n let bestSampleIndex = -1;\n if (startFragment) {\n const { sampleIndex, correctSampleFound } = getMatchInFragment(startFragment);\n if (correctSampleFound) {\n return this.fetchPacketInFragment(startFragment, sampleIndex, options);\n }\n if (sampleIndex !== -1) {\n bestFragment = startFragment;\n bestSampleIndex = sampleIndex;\n }\n }\n // Search for a lookup entry; this way, we won't need to start searching from the start of the file\n // but can jump right into the correct fragment (or at least nearby).\n const lookupEntryIndex = binarySearchLessOrEqual(this.internalTrack.fragmentLookupTable, searchTimestamp, x => x.timestamp);\n const lookupEntry = lookupEntryIndex !== -1\n ? this.internalTrack.fragmentLookupTable[lookupEntryIndex]\n : null;\n const positionCacheIndex = binarySearchLessOrEqual(this.internalTrack.fragmentPositionCache, searchTimestamp, x => x.startTimestamp);\n const positionCacheEntry = positionCacheIndex !== -1\n ? this.internalTrack.fragmentPositionCache[positionCacheIndex]\n : null;\n const lookupEntryPosition = Math.max(lookupEntry?.moofOffset ?? 0, positionCacheEntry?.moofOffset ?? 0) || null;\n let currentPos;\n if (!startFragment) {\n currentPos = lookupEntryPosition ?? 0;\n }\n else {\n if (lookupEntryPosition === null || startFragment.moofOffset >= lookupEntryPosition) {\n currentPos = startFragment.moofOffset + startFragment.moofSize;\n currentFragment = startFragment;\n }\n else {\n // Use the lookup entry\n currentPos = lookupEntryPosition;\n }\n }\n while (true) {\n if (currentFragment) {\n const trackData = currentFragment.trackData.get(this.internalTrack.id);\n if (trackData && trackData.startTimestamp > latestTimestamp) {\n // We're already past the upper bound, no need to keep searching\n break;\n }\n }\n // Load the header\n let slice = demuxer.reader.requestSliceRange(currentPos, MIN_BOX_HEADER_SIZE, MAX_BOX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const boxStartPos = currentPos;\n const boxInfo = readBoxHeader(slice);\n if (!boxInfo) {\n break;\n }\n if (boxInfo.name === 'moof') {\n currentFragment = await demuxer.readFragment(boxStartPos);\n const { sampleIndex, correctSampleFound } = getMatchInFragment(currentFragment);\n if (correctSampleFound) {\n return this.fetchPacketInFragment(currentFragment, sampleIndex, options);\n }\n if (sampleIndex !== -1) {\n bestFragment = currentFragment;\n bestSampleIndex = sampleIndex;\n }\n }\n currentPos = boxStartPos + boxInfo.totalSize;\n }\n // Catch faulty lookup table entries\n if (lookupEntry && (!bestFragment || bestFragment.moofOffset < lookupEntry.moofOffset)) {\n // The lookup table entry lied to us! We found a lookup entry but no fragment there that satisfied\n // the match. In this case, let's search again but using the lookup entry before that.\n const previousLookupEntry = this.internalTrack.fragmentLookupTable[lookupEntryIndex - 1];\n assert(!previousLookupEntry || previousLookupEntry.timestamp < lookupEntry.timestamp);\n const newSearchTimestamp = previousLookupEntry?.timestamp ?? -Infinity;\n return this.performFragmentedLookup(null, getMatchInFragment, newSearchTimestamp, latestTimestamp, options);\n }\n if (bestFragment) {\n // If we finished looping but didn't find a perfect match, still return the best match we found\n return this.fetchPacketInFragment(bestFragment, bestSampleIndex, options);\n }\n return null;\n }\n}\nclass IsobmffVideoTrackBacking extends IsobmffTrackBacking {\n constructor(internalTrack) {\n super(internalTrack);\n this.decoderConfigPromise = null;\n this.internalTrack = internalTrack;\n }\n getCodec() {\n return this.internalTrack.info.codec;\n }\n getCodedWidth() {\n return this.internalTrack.info.width;\n }\n getCodedHeight() {\n return this.internalTrack.info.height;\n }\n getSquarePixelWidth() {\n return this.internalTrack.info.squarePixelWidth;\n }\n getSquarePixelHeight() {\n return this.internalTrack.info.squarePixelHeight;\n }\n getRotation() {\n return this.internalTrack.rotation;\n }\n async getColorSpace() {\n return {\n primaries: this.internalTrack.info.colorSpace?.primaries,\n transfer: this.internalTrack.info.colorSpace?.transfer,\n matrix: this.internalTrack.info.colorSpace?.matrix,\n fullRange: this.internalTrack.info.colorSpace?.fullRange,\n };\n }\n async canBeTransparent() {\n return false;\n }\n async getDecoderConfig() {\n if (!this.internalTrack.info.codec) {\n return null;\n }\n return this.decoderConfigPromise ??= (async () => {\n if (this.internalTrack.info.codec === 'vp9' && !this.internalTrack.info.vp9CodecInfo) {\n const firstPacket = await this.getFirstPacket({});\n this.internalTrack.info.vp9CodecInfo = firstPacket && extractVp9CodecInfoFromPacket(firstPacket.data);\n }\n else if (this.internalTrack.info.codec === 'av1' && !this.internalTrack.info.av1CodecInfo) {\n const firstPacket = await this.getFirstPacket({});\n this.internalTrack.info.av1CodecInfo = firstPacket && extractAv1CodecInfoFromPacket(firstPacket.data);\n }\n return {\n codec: extractVideoCodecString(this.internalTrack.info),\n codedWidth: this.internalTrack.info.width,\n codedHeight: this.internalTrack.info.height,\n displayAspectWidth: this.internalTrack.info.squarePixelWidth,\n displayAspectHeight: this.internalTrack.info.squarePixelHeight,\n description: this.internalTrack.info.codecDescription ?? undefined,\n colorSpace: this.internalTrack.info.colorSpace ?? undefined,\n };\n })();\n }\n}\nclass IsobmffAudioTrackBacking extends IsobmffTrackBacking {\n constructor(internalTrack) {\n super(internalTrack);\n this.decoderConfig = null;\n this.internalTrack = internalTrack;\n }\n getCodec() {\n return this.internalTrack.info.codec;\n }\n getNumberOfChannels() {\n return this.internalTrack.info.numberOfChannels;\n }\n getSampleRate() {\n return this.internalTrack.info.sampleRate;\n }\n async getDecoderConfig() {\n if (!this.internalTrack.info.codec) {\n return null;\n }\n return this.decoderConfig ??= {\n codec: extractAudioCodecString(this.internalTrack.info),\n numberOfChannels: this.internalTrack.info.numberOfChannels,\n sampleRate: this.internalTrack.info.sampleRate,\n description: this.internalTrack.info.codecDescription ?? undefined,\n };\n }\n}\nconst getSampleIndexForTimestamp = (sampleTable, timescaleUnits) => {\n if (sampleTable.presentationTimestamps) {\n const index = binarySearchLessOrEqual(sampleTable.presentationTimestamps, timescaleUnits, x => x.presentationTimestamp);\n if (index === -1) {\n return -1;\n }\n return sampleTable.presentationTimestamps[index].sampleIndex;\n }\n else {\n const index = binarySearchLessOrEqual(sampleTable.sampleTimingEntries, timescaleUnits, x => x.startDecodeTimestamp);\n if (index === -1) {\n return -1;\n }\n const entry = sampleTable.sampleTimingEntries[index];\n return entry.startIndex\n + Math.min(Math.floor((timescaleUnits - entry.startDecodeTimestamp) / entry.delta), entry.count - 1);\n }\n};\nconst getKeyframeSampleIndexForTimestamp = (sampleTable, timescaleUnits) => {\n if (!sampleTable.keySampleIndices) {\n // Every sample is a keyframe\n return getSampleIndexForTimestamp(sampleTable, timescaleUnits);\n }\n if (sampleTable.presentationTimestamps) {\n const index = binarySearchLessOrEqual(sampleTable.presentationTimestamps, timescaleUnits, x => x.presentationTimestamp);\n if (index === -1) {\n return -1;\n }\n // Walk the samples in presentation order until we find one that's a keyframe\n for (let i = index; i >= 0; i--) {\n const sampleIndex = sampleTable.presentationTimestamps[i].sampleIndex;\n const isKeyFrame = binarySearchExact(sampleTable.keySampleIndices, sampleIndex, x => x) !== -1;\n if (isKeyFrame) {\n return sampleIndex;\n }\n }\n return -1;\n }\n else {\n const sampleIndex = getSampleIndexForTimestamp(sampleTable, timescaleUnits);\n const index = binarySearchLessOrEqual(sampleTable.keySampleIndices, sampleIndex, x => x);\n return sampleTable.keySampleIndices[index] ?? -1;\n }\n};\nconst getSampleInfo = (sampleTable, sampleIndex) => {\n const timingEntryIndex = binarySearchLessOrEqual(sampleTable.sampleTimingEntries, sampleIndex, x => x.startIndex);\n const timingEntry = sampleTable.sampleTimingEntries[timingEntryIndex];\n if (!timingEntry || timingEntry.startIndex + timingEntry.count <= sampleIndex) {\n return null;\n }\n const decodeTimestamp = timingEntry.startDecodeTimestamp\n + (sampleIndex - timingEntry.startIndex) * timingEntry.delta;\n let presentationTimestamp = decodeTimestamp;\n const offsetEntryIndex = binarySearchLessOrEqual(sampleTable.sampleCompositionTimeOffsets, sampleIndex, x => x.startIndex);\n const offsetEntry = sampleTable.sampleCompositionTimeOffsets[offsetEntryIndex];\n if (offsetEntry && sampleIndex - offsetEntry.startIndex < offsetEntry.count) {\n presentationTimestamp += offsetEntry.offset;\n }\n const sampleSize = sampleTable.sampleSizes[Math.min(sampleIndex, sampleTable.sampleSizes.length - 1)];\n const chunkEntryIndex = binarySearchLessOrEqual(sampleTable.sampleToChunk, sampleIndex, x => x.startSampleIndex);\n const chunkEntry = sampleTable.sampleToChunk[chunkEntryIndex];\n assert(chunkEntry);\n const chunkIndex = chunkEntry.startChunkIndex\n + Math.floor((sampleIndex - chunkEntry.startSampleIndex) / chunkEntry.samplesPerChunk);\n const chunkOffset = sampleTable.chunkOffsets[chunkIndex];\n const startSampleIndexOfChunk = chunkEntry.startSampleIndex\n + (chunkIndex - chunkEntry.startChunkIndex) * chunkEntry.samplesPerChunk;\n let chunkSize = 0;\n let sampleOffset = chunkOffset;\n if (sampleTable.sampleSizes.length === 1) {\n sampleOffset += sampleSize * (sampleIndex - startSampleIndexOfChunk);\n chunkSize += sampleSize * chunkEntry.samplesPerChunk;\n }\n else {\n for (let i = startSampleIndexOfChunk; i < startSampleIndexOfChunk + chunkEntry.samplesPerChunk; i++) {\n const sampleSize = sampleTable.sampleSizes[i];\n if (i < sampleIndex) {\n sampleOffset += sampleSize;\n }\n chunkSize += sampleSize;\n }\n }\n let duration = timingEntry.delta;\n if (sampleTable.presentationTimestamps) {\n // In order to accurately compute the duration, we need to take the duration to the next sample in presentation\n // order, not in decode order\n const presentationIndex = sampleTable.presentationTimestampIndexMap[sampleIndex];\n assert(presentationIndex !== undefined);\n if (presentationIndex < sampleTable.presentationTimestamps.length - 1) {\n const nextEntry = sampleTable.presentationTimestamps[presentationIndex + 1];\n const nextPresentationTimestamp = nextEntry.presentationTimestamp;\n duration = nextPresentationTimestamp - presentationTimestamp;\n }\n }\n return {\n presentationTimestamp,\n duration,\n sampleOffset,\n sampleSize,\n chunkOffset,\n chunkSize,\n isKeyFrame: sampleTable.keySampleIndices\n ? binarySearchExact(sampleTable.keySampleIndices, sampleIndex, x => x) !== -1\n : true,\n };\n};\nconst getNextKeyframeIndexForSample = (sampleTable, sampleIndex) => {\n if (!sampleTable.keySampleIndices) {\n return sampleIndex + 1;\n }\n const index = binarySearchLessOrEqual(sampleTable.keySampleIndices, sampleIndex, x => x);\n return sampleTable.keySampleIndices[index + 1] ?? -1;\n};\nconst offsetFragmentTrackDataByTimestamp = (trackData, timestamp) => {\n trackData.startTimestamp += timestamp;\n trackData.endTimestamp += timestamp;\n for (const sample of trackData.samples) {\n sample.presentationTimestamp += timestamp;\n }\n for (const entry of trackData.presentationTimestamps) {\n entry.presentationTimestamp += timestamp;\n }\n};\n/** Extracts the rotation component from a transformation matrix, in degrees. */\nconst extractRotationFromMatrix = (matrix) => {\n const [m11, , , m21] = matrix;\n const scaleX = Math.hypot(m11, m21);\n const cosTheta = m11 / scaleX;\n const sinTheta = m21 / scaleX;\n // Invert the rotation because matrices are post-multiplied in ISOBMFF\n const result = -Math.atan2(sinTheta, cosTheta) * (180 / Math.PI);\n if (!Number.isFinite(result)) {\n // Can happen if the entire matrix is 0, for example\n return 0;\n }\n return result;\n};\nconst sampleTableIsEmpty = (sampleTable) => {\n return sampleTable.sampleSizes.length === 0;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { extractAv1CodecInfoFromPacket, extractAvcDecoderConfigurationRecord, extractHevcDecoderConfigurationRecord, extractVp9CodecInfoFromPacket, } from '../codec-data.js';\nimport { extractAudioCodecString, extractVideoCodecString, OPUS_SAMPLE_RATE, } from '../codec.js';\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack, InputVideoTrack, } from '../input-track.js';\nimport { AttachedFile, DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, binarySearchLessOrEqual, COLOR_PRIMARIES_MAP_INVERSE, findLastIndex, isIso639Dash2LanguageCode, last, MATRIX_COEFFICIENTS_MAP_INVERSE, normalizeRotation, roundIfAlmostInteger, TRANSFER_CHARACTERISTICS_MAP_INVERSE, UNDETERMINED_LANGUAGE, } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { assertDefinedSize, CODEC_STRING_MAP, EBMLId, LEVEL_0_AND_1_EBML_IDS, LEVEL_1_EBML_IDS, MAX_HEADER_SIZE, MIN_HEADER_SIZE, readAsciiString, readUnicodeString, readElementHeader, readElementId, readFloat, readUnsignedInt, readVarInt, resync, searchForNextElementId, readUnsignedBigInt, } from './ebml.js';\nimport { buildMatroskaMimeType } from './matroska-misc.js';\nimport { FileSlice, readBytes, readI16Be, readU8 } from '../reader.js';\nvar BlockLacing;\n(function (BlockLacing) {\n BlockLacing[BlockLacing[\"None\"] = 0] = \"None\";\n BlockLacing[BlockLacing[\"Xiph\"] = 1] = \"Xiph\";\n BlockLacing[BlockLacing[\"FixedSize\"] = 2] = \"FixedSize\";\n BlockLacing[BlockLacing[\"Ebml\"] = 3] = \"Ebml\";\n})(BlockLacing || (BlockLacing = {}));\nvar ContentEncodingScope;\n(function (ContentEncodingScope) {\n ContentEncodingScope[ContentEncodingScope[\"Block\"] = 1] = \"Block\";\n ContentEncodingScope[ContentEncodingScope[\"Private\"] = 2] = \"Private\";\n ContentEncodingScope[ContentEncodingScope[\"Next\"] = 4] = \"Next\";\n})(ContentEncodingScope || (ContentEncodingScope = {}));\nvar ContentCompAlgo;\n(function (ContentCompAlgo) {\n ContentCompAlgo[ContentCompAlgo[\"Zlib\"] = 0] = \"Zlib\";\n ContentCompAlgo[ContentCompAlgo[\"Bzlib\"] = 1] = \"Bzlib\";\n ContentCompAlgo[ContentCompAlgo[\"lzo1x\"] = 2] = \"lzo1x\";\n ContentCompAlgo[ContentCompAlgo[\"HeaderStripping\"] = 3] = \"HeaderStripping\";\n})(ContentCompAlgo || (ContentCompAlgo = {}));\nconst METADATA_ELEMENTS = [\n { id: EBMLId.SeekHead, flag: 'seekHeadSeen' },\n { id: EBMLId.Info, flag: 'infoSeen' },\n { id: EBMLId.Tracks, flag: 'tracksSeen' },\n { id: EBMLId.Cues, flag: 'cuesSeen' },\n];\nconst MAX_RESYNC_LENGTH = 10 * 2 ** 20; // 10 MiB\nexport class MatroskaDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.readMetadataPromise = null;\n this.segments = [];\n this.currentSegment = null;\n this.currentTrack = null;\n this.currentCluster = null;\n this.currentBlock = null;\n this.currentBlockAdditional = null;\n this.currentCueTime = null;\n this.currentDecodingInstruction = null;\n this.currentTagTargetIsMovie = true;\n this.currentSimpleTagName = null;\n this.currentAttachedFile = null;\n this.isWebM = false;\n this.reader = input._reader;\n }\n async computeDuration() {\n const tracks = await this.getTracks();\n const trackDurations = await Promise.all(tracks.map(x => x.computeDuration()));\n return Math.max(0, ...trackDurations);\n }\n async getTracks() {\n await this.readMetadata();\n return this.segments.flatMap(segment => segment.tracks.map(track => track.inputTrack));\n }\n async getMimeType() {\n await this.readMetadata();\n const tracks = await this.getTracks();\n const codecStrings = await Promise.all(tracks.map(x => x.getCodecParameterString()));\n return buildMatroskaMimeType({\n isWebM: this.isWebM,\n hasVideo: this.segments.some(segment => segment.tracks.some(x => x.info?.type === 'video')),\n hasAudio: this.segments.some(segment => segment.tracks.some(x => x.info?.type === 'audio')),\n codecStrings: codecStrings.filter(Boolean),\n });\n }\n async getMetadataTags() {\n await this.readMetadata();\n // Load metadata tags from each segment lazily (only once)\n for (const segment of this.segments) {\n if (!segment.metadataTagsCollected) {\n if (this.reader.fileSize !== null) {\n await this.loadSegmentMetadata(segment);\n }\n else {\n // The seeking would be too crazy, let's not\n }\n segment.metadataTagsCollected = true;\n }\n }\n // This is kinda handwavy, and how we handle multiple segments isn't suuuuper well-defined anyway; so we just\n // shallow-merge metadata tags from all (usually just one) segments.\n let metadataTags = {};\n for (const segment of this.segments) {\n metadataTags = { ...metadataTags, ...segment.metadataTags };\n }\n return metadataTags;\n }\n readMetadata() {\n return this.readMetadataPromise ??= (async () => {\n let currentPos = 0;\n // Loop over all top-level elements in the file\n while (true) {\n let slice = this.reader.requestSliceRange(currentPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const header = readElementHeader(slice);\n if (!header) {\n break; // Zero padding at the end of the file triggers this, for example\n }\n const id = header.id;\n let size = header.size;\n const dataStartPos = slice.filePos;\n if (id === EBMLId.EBML) {\n assertDefinedSize(size);\n let slice = this.reader.requestSlice(dataStartPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n this.readContiguousElements(slice);\n }\n else if (id === EBMLId.Segment) { // Segment found!\n await this.readSegment(dataStartPos, size);\n if (size === undefined) {\n // Segment sizes can be undefined (common in livestreamed files), so assume this is the last\n // and only segment\n break;\n }\n if (this.reader.fileSize === null) {\n break; // Stop at the first segment\n }\n }\n else if (id === EBMLId.Cluster) {\n if (this.reader.fileSize === null) {\n break; // Shouldn't be reached anyway, since we stop at the first segment\n }\n // Clusters are not a top-level element in Matroska, but some files contain a Segment whose size\n // doesn't contain any of the clusters that follow it. In the case, we apply the following logic: if\n // we find a top-level cluster, attribute it to the previous segment.\n if (size === undefined) {\n // Just in case this is one of those weird sizeless clusters, let's do our best and still try to\n // determine its size.\n const nextElementPos = await searchForNextElementId(this.reader, dataStartPos, LEVEL_0_AND_1_EBML_IDS, this.reader.fileSize);\n size = nextElementPos.pos - dataStartPos;\n }\n const lastSegment = last(this.segments);\n if (lastSegment) {\n // Extend the previous segment's size\n lastSegment.elementEndPos = dataStartPos + size;\n }\n }\n assertDefinedSize(size);\n currentPos = dataStartPos + size;\n }\n })();\n }\n async readSegment(segmentDataStart, dataSize) {\n this.currentSegment = {\n seekHeadSeen: false,\n infoSeen: false,\n tracksSeen: false,\n cuesSeen: false,\n tagsSeen: false,\n attachmentsSeen: false,\n timestampScale: -1,\n timestampFactor: -1,\n duration: -1,\n seekEntries: [],\n tracks: [],\n cuePoints: [],\n dataStartPos: segmentDataStart,\n elementEndPos: dataSize === undefined\n ? null // Assume it goes until the end of the file\n : segmentDataStart + dataSize,\n clusterSeekStartPos: segmentDataStart,\n lastReadCluster: null,\n metadataTags: {},\n metadataTagsCollected: false,\n };\n this.segments.push(this.currentSegment);\n let currentPos = segmentDataStart;\n while (this.currentSegment.elementEndPos === null || currentPos < this.currentSegment.elementEndPos) {\n let slice = this.reader.requestSliceRange(currentPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const elementStartPos = currentPos;\n const header = readElementHeader(slice);\n if (!header || (!LEVEL_1_EBML_IDS.includes(header.id) && header.id !== EBMLId.Void)) {\n // Potential junk. Let's try to resync\n const nextPos = await resync(this.reader, elementStartPos, LEVEL_1_EBML_IDS, Math.min(this.currentSegment.elementEndPos ?? Infinity, elementStartPos + MAX_RESYNC_LENGTH));\n if (nextPos) {\n currentPos = nextPos;\n continue;\n }\n else {\n break; // Resync failed\n }\n }\n const { id, size } = header;\n const dataStartPos = slice.filePos;\n const metadataElementIndex = METADATA_ELEMENTS.findIndex(x => x.id === id);\n if (metadataElementIndex !== -1) {\n const field = METADATA_ELEMENTS[metadataElementIndex].flag;\n this.currentSegment[field] = true;\n assertDefinedSize(size);\n let slice = this.reader.requestSlice(dataStartPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (slice) {\n this.readContiguousElements(slice);\n }\n }\n else if (id === EBMLId.Tags || id === EBMLId.Attachments) {\n // Metadata found at the beginning of the segment, great, let's parse it\n if (id === EBMLId.Tags) {\n this.currentSegment.tagsSeen = true;\n }\n else {\n this.currentSegment.attachmentsSeen = true;\n }\n assertDefinedSize(size);\n let slice = this.reader.requestSlice(dataStartPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (slice) {\n this.readContiguousElements(slice);\n }\n }\n else if (id === EBMLId.Cluster) {\n this.currentSegment.clusterSeekStartPos = elementStartPos;\n break; // Stop at the first cluster\n }\n if (size === undefined) {\n break;\n }\n else {\n currentPos = dataStartPos + size;\n }\n }\n // Sort the seek entries by file position so reading them exhibits a sequential pattern\n this.currentSegment.seekEntries.sort((a, b) => a.segmentPosition - b.segmentPosition);\n if (this.reader.fileSize !== null) {\n // Use the seek head to read missing metadata elements\n for (const seekEntry of this.currentSegment.seekEntries) {\n const target = METADATA_ELEMENTS.find(x => x.id === seekEntry.id);\n if (!target) {\n continue;\n }\n if (this.currentSegment[target.flag])\n continue;\n let slice = this.reader.requestSliceRange(segmentDataStart + seekEntry.segmentPosition, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n continue;\n const header = readElementHeader(slice);\n if (!header)\n continue;\n const { id, size } = header;\n if (id !== target.id)\n continue;\n assertDefinedSize(size);\n this.currentSegment[target.flag] = true;\n let dataSlice = this.reader.requestSlice(slice.filePos, size);\n if (dataSlice instanceof Promise)\n dataSlice = await dataSlice;\n if (!dataSlice)\n continue;\n this.readContiguousElements(dataSlice);\n }\n }\n if (this.currentSegment.timestampScale === -1) {\n // TimestampScale element is missing. Technically an invalid file, but let's default to the typical value,\n // which is 1e6.\n this.currentSegment.timestampScale = 1e6;\n this.currentSegment.timestampFactor = 1e9 / 1e6;\n }\n // Compute default duration for all tracks now that we have the timestamp factor\n for (const track of this.currentSegment.tracks) {\n if (track.defaultDurationNs !== null) {\n track.defaultDuration = (this.currentSegment.timestampFactor * track.defaultDurationNs) / 1e9;\n }\n }\n // Put default tracks first\n this.currentSegment.tracks.sort((a, b) => Number(b.disposition.default) - Number(a.disposition.default));\n // Now, let's distribute the cue points to the tracks\n const idToTrack = new Map(this.currentSegment.tracks.map(x => [x.id, x]));\n // Assign cue points to their respective tracks\n for (const cuePoint of this.currentSegment.cuePoints) {\n const track = idToTrack.get(cuePoint.trackId);\n if (track) {\n track.cuePoints.push(cuePoint);\n }\n }\n for (const track of this.currentSegment.tracks) {\n // Sort cue points by time\n track.cuePoints.sort((a, b) => a.time - b.time);\n // Remove multiple cue points for the same time\n for (let i = 0; i < track.cuePoints.length - 1; i++) {\n const cuePoint1 = track.cuePoints[i];\n const cuePoint2 = track.cuePoints[i + 1];\n if (cuePoint1.time === cuePoint2.time) {\n track.cuePoints.splice(i + 1, 1);\n i--;\n }\n }\n }\n let trackWithMostCuePoints = null;\n let maxCuePointCount = -Infinity;\n for (const track of this.currentSegment.tracks) {\n if (track.cuePoints.length > maxCuePointCount) {\n maxCuePointCount = track.cuePoints.length;\n trackWithMostCuePoints = track;\n }\n }\n // For every track that has received 0 cue points (can happen, often only the video track receives cue points),\n // we still want to have better seeking. Therefore, let's give it the cue points of the track with the most cue\n // points, which should provide us with the most fine-grained seeking.\n for (const track of this.currentSegment.tracks) {\n if (track.cuePoints.length === 0) {\n track.cuePoints = trackWithMostCuePoints.cuePoints;\n }\n }\n this.currentSegment = null;\n }\n async readCluster(startPos, segment) {\n if (segment.lastReadCluster?.elementStartPos === startPos) {\n return segment.lastReadCluster;\n }\n let headerSlice = this.reader.requestSliceRange(startPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n assert(headerSlice);\n const elementStartPos = startPos;\n const elementHeader = readElementHeader(headerSlice);\n assert(elementHeader);\n const id = elementHeader.id;\n assert(id === EBMLId.Cluster);\n let size = elementHeader.size;\n const dataStartPos = headerSlice.filePos;\n if (size === undefined) {\n // The cluster's size is undefined (can happen in livestreamed files). We'd still like to know the size of\n // it, so we have no other choice but to iterate over the EBML structure until we find an element at level\n // 0 or 1, indicating the end of the cluster (all elements inside the cluster are at level 2).\n const nextElementPos = await searchForNextElementId(this.reader, dataStartPos, LEVEL_0_AND_1_EBML_IDS, segment.elementEndPos);\n size = nextElementPos.pos - dataStartPos;\n }\n // Load the entire cluster\n let dataSlice = this.reader.requestSlice(dataStartPos, size);\n if (dataSlice instanceof Promise)\n dataSlice = await dataSlice;\n const cluster = {\n segment,\n elementStartPos,\n elementEndPos: dataStartPos + size,\n dataStartPos,\n timestamp: -1,\n trackData: new Map(),\n };\n this.currentCluster = cluster;\n if (dataSlice) {\n // Read the children of the cluster, stopping early at level 0 or 1 EBML elements. We do this because some\n // clusters have incorrect sizes that are too large\n const endPos = this.readContiguousElements(dataSlice, LEVEL_0_AND_1_EBML_IDS);\n cluster.elementEndPos = endPos;\n }\n for (const [, trackData] of cluster.trackData) {\n const track = trackData.track;\n // This must hold, as track datas only get created if a block for that track is encountered\n assert(trackData.blocks.length > 0);\n let hasLacedBlocks = false;\n for (let i = 0; i < trackData.blocks.length; i++) {\n const block = trackData.blocks[i];\n block.timestamp += cluster.timestamp;\n hasLacedBlocks ||= block.lacing !== BlockLacing.None;\n }\n trackData.presentationTimestamps = trackData.blocks\n .map((block, i) => ({ timestamp: block.timestamp, blockIndex: i }))\n .sort((a, b) => a.timestamp - b.timestamp);\n for (let i = 0; i < trackData.presentationTimestamps.length; i++) {\n const currentEntry = trackData.presentationTimestamps[i];\n const currentBlock = trackData.blocks[currentEntry.blockIndex];\n if (trackData.firstKeyFrameTimestamp === null && currentBlock.isKeyFrame) {\n trackData.firstKeyFrameTimestamp = currentBlock.timestamp;\n }\n if (i < trackData.presentationTimestamps.length - 1) {\n // Update block durations based on presentation order\n const nextEntry = trackData.presentationTimestamps[i + 1];\n currentBlock.duration = nextEntry.timestamp - currentBlock.timestamp;\n }\n else if (currentBlock.duration === 0) {\n if (track.defaultDuration != null) {\n if (currentBlock.lacing === BlockLacing.None) {\n currentBlock.duration = track.defaultDuration;\n }\n else {\n // Handled by the lace resolution code\n }\n }\n }\n }\n if (hasLacedBlocks) {\n // Perform lace resolution. Here, we expand each laced block into multiple blocks where each contains\n // one frame of the lace. We do this after determining block timestamps so we can properly distribute\n // the block's duration across the laced frames.\n this.expandLacedBlocks(trackData.blocks, track);\n // Recompute since blocks have changed\n trackData.presentationTimestamps = trackData.blocks\n .map((block, i) => ({ timestamp: block.timestamp, blockIndex: i }))\n .sort((a, b) => a.timestamp - b.timestamp);\n }\n const firstBlock = trackData.blocks[trackData.presentationTimestamps[0].blockIndex];\n const lastBlock = trackData.blocks[last(trackData.presentationTimestamps).blockIndex];\n trackData.startTimestamp = firstBlock.timestamp;\n trackData.endTimestamp = lastBlock.timestamp + lastBlock.duration;\n // Let's remember that a cluster with a given timestamp is here, speeding up future lookups if no cues exist\n const insertionIndex = binarySearchLessOrEqual(track.clusterPositionCache, trackData.startTimestamp, x => x.startTimestamp);\n if (insertionIndex === -1\n || track.clusterPositionCache[insertionIndex].elementStartPos !== elementStartPos) {\n track.clusterPositionCache.splice(insertionIndex + 1, 0, {\n elementStartPos: cluster.elementStartPos,\n startTimestamp: trackData.startTimestamp,\n });\n }\n }\n segment.lastReadCluster = cluster;\n return cluster;\n }\n getTrackDataInCluster(cluster, trackNumber) {\n let trackData = cluster.trackData.get(trackNumber);\n if (!trackData) {\n const track = cluster.segment.tracks.find(x => x.id === trackNumber);\n if (!track) {\n return null;\n }\n trackData = {\n track,\n startTimestamp: 0,\n endTimestamp: 0,\n firstKeyFrameTimestamp: null,\n blocks: [],\n presentationTimestamps: [],\n };\n cluster.trackData.set(trackNumber, trackData);\n }\n return trackData;\n }\n expandLacedBlocks(blocks, track) {\n // https://www.matroska.org/technical/notes.html#block-lacing\n for (let blockIndex = 0; blockIndex < blocks.length; blockIndex++) {\n const originalBlock = blocks[blockIndex];\n if (originalBlock.lacing === BlockLacing.None) {\n continue;\n }\n // Decode the block data if it hasn't been decoded yet (needed for lacing expansion)\n if (!originalBlock.decoded) {\n originalBlock.data = this.decodeBlockData(track, originalBlock.data);\n originalBlock.decoded = true;\n }\n const slice = FileSlice.tempFromBytes(originalBlock.data);\n const frameSizes = [];\n const frameCount = readU8(slice) + 1;\n switch (originalBlock.lacing) {\n case BlockLacing.Xiph:\n {\n let totalUsedSize = 0;\n // Xiph lacing, just like in Ogg\n for (let i = 0; i < frameCount - 1; i++) {\n let frameSize = 0;\n while (slice.bufferPos < slice.length) {\n const value = readU8(slice);\n frameSize += value;\n if (value < 255) {\n frameSizes.push(frameSize);\n totalUsedSize += frameSize;\n break;\n }\n }\n }\n // Compute the last frame's size from whatever's left\n frameSizes.push(slice.length - (slice.bufferPos + totalUsedSize));\n }\n ;\n break;\n case BlockLacing.FixedSize:\n {\n // Fixed size lacing: all frames have same size\n const totalDataSize = slice.length - 1; // Minus the frame count byte\n const frameSize = Math.floor(totalDataSize / frameCount);\n for (let i = 0; i < frameCount; i++) {\n frameSizes.push(frameSize);\n }\n }\n ;\n break;\n case BlockLacing.Ebml:\n {\n // EBML lacing: first size absolute, subsequent ones are coded as signed differences from the last\n const firstResult = readVarInt(slice);\n assert(firstResult !== null); // Assume it's not an invalid VINT\n let currentSize = firstResult;\n frameSizes.push(currentSize);\n let totalUsedSize = currentSize;\n for (let i = 1; i < frameCount - 1; i++) {\n const startPos = slice.bufferPos;\n const diffResult = readVarInt(slice);\n assert(diffResult !== null);\n const unsignedDiff = diffResult;\n const width = slice.bufferPos - startPos;\n const bias = (1 << (width * 7 - 1)) - 1; // Typo-corrected version of 2^((7*n)-1)^-1\n const diff = unsignedDiff - bias;\n currentSize += diff;\n frameSizes.push(currentSize);\n totalUsedSize += currentSize;\n }\n // Compute the last frame's size from whatever's left\n frameSizes.push(slice.length - (slice.bufferPos + totalUsedSize));\n }\n ;\n break;\n default: assert(false);\n }\n assert(frameSizes.length === frameCount);\n blocks.splice(blockIndex, 1); // Remove the original block\n const blockDuration = originalBlock.duration || frameCount * (track.defaultDuration ?? 0);\n // Now, let's insert each frame as its own block\n for (let i = 0; i < frameCount; i++) {\n const frameSize = frameSizes[i];\n const frameData = readBytes(slice, frameSize);\n // Distribute timestamps evenly across the block duration\n const frameTimestamp = originalBlock.timestamp + (blockDuration * i / frameCount);\n const frameDuration = blockDuration / frameCount;\n blocks.splice(blockIndex + i, 0, {\n timestamp: frameTimestamp,\n duration: frameDuration,\n isKeyFrame: originalBlock.isKeyFrame,\n data: frameData,\n lacing: BlockLacing.None,\n decoded: true,\n mainAdditional: originalBlock.mainAdditional,\n });\n }\n blockIndex += frameCount; // Skip the blocks we just added\n blockIndex--;\n }\n }\n async loadSegmentMetadata(segment) {\n for (const seekEntry of segment.seekEntries) {\n if (seekEntry.id === EBMLId.Tags && !segment.tagsSeen) {\n // We need to load the tags\n }\n else if (seekEntry.id === EBMLId.Attachments && !segment.attachmentsSeen) {\n // We need to load the attachments\n }\n else {\n continue;\n }\n let slice = this.reader.requestSliceRange(segment.dataStartPos + seekEntry.segmentPosition, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n continue;\n const header = readElementHeader(slice);\n if (!header || header.id !== seekEntry.id)\n continue;\n const { size } = header;\n assertDefinedSize(size);\n assert(!this.currentSegment);\n this.currentSegment = segment;\n let dataSlice = this.reader.requestSlice(slice.filePos, size);\n if (dataSlice instanceof Promise)\n dataSlice = await dataSlice;\n if (dataSlice) {\n this.readContiguousElements(dataSlice);\n }\n this.currentSegment = null;\n // Mark as seen\n if (seekEntry.id === EBMLId.Tags) {\n segment.tagsSeen = true;\n }\n else if (seekEntry.id === EBMLId.Attachments) {\n segment.attachmentsSeen = true;\n }\n }\n }\n readContiguousElements(slice, stopIds) {\n while (slice.remainingLength >= MIN_HEADER_SIZE) {\n const startPos = slice.filePos;\n const foundElement = this.traverseElement(slice, stopIds);\n if (!foundElement) {\n return startPos;\n }\n }\n return slice.filePos;\n }\n traverseElement(slice, stopIds) {\n const header = readElementHeader(slice);\n if (!header) {\n return false;\n }\n if (stopIds && stopIds.includes(header.id)) {\n return false;\n }\n const { id, size } = header;\n const dataStartPos = slice.filePos;\n assertDefinedSize(size);\n switch (id) {\n case EBMLId.DocType:\n {\n this.isWebM = readAsciiString(slice, size) === 'webm';\n }\n ;\n break;\n case EBMLId.Seek:\n {\n if (!this.currentSegment)\n break;\n const seekEntry = { id: -1, segmentPosition: -1 };\n this.currentSegment.seekEntries.push(seekEntry);\n this.readContiguousElements(slice.slice(dataStartPos, size));\n if (seekEntry.id === -1 || seekEntry.segmentPosition === -1) {\n this.currentSegment.seekEntries.pop();\n }\n }\n ;\n break;\n case EBMLId.SeekID:\n {\n const lastSeekEntry = this.currentSegment?.seekEntries[this.currentSegment.seekEntries.length - 1];\n if (!lastSeekEntry)\n break;\n lastSeekEntry.id = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.SeekPosition:\n {\n const lastSeekEntry = this.currentSegment?.seekEntries[this.currentSegment.seekEntries.length - 1];\n if (!lastSeekEntry)\n break;\n lastSeekEntry.segmentPosition = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.TimestampScale:\n {\n if (!this.currentSegment)\n break;\n this.currentSegment.timestampScale = readUnsignedInt(slice, size);\n this.currentSegment.timestampFactor = 1e9 / this.currentSegment.timestampScale;\n }\n ;\n break;\n case EBMLId.Duration:\n {\n if (!this.currentSegment)\n break;\n this.currentSegment.duration = readFloat(slice, size);\n }\n ;\n break;\n case EBMLId.TrackEntry:\n {\n if (!this.currentSegment)\n break;\n this.currentTrack = {\n id: -1,\n segment: this.currentSegment,\n demuxer: this,\n clusterPositionCache: [],\n cuePoints: [],\n disposition: {\n ...DEFAULT_TRACK_DISPOSITION,\n },\n inputTrack: null,\n codecId: null,\n codecPrivate: null,\n defaultDuration: null,\n defaultDurationNs: null,\n name: null,\n languageCode: UNDETERMINED_LANGUAGE,\n decodingInstructions: [],\n info: null,\n };\n this.readContiguousElements(slice.slice(dataStartPos, size));\n // Check if track was disabled during parsing (e.g., by FlagEnabled being 0)\n if (!this.currentTrack) {\n break;\n }\n if (this.currentTrack.decodingInstructions.some((instruction) => {\n return instruction.data?.type !== 'decompress'\n || instruction.scope !== ContentEncodingScope.Block\n || instruction.data.algorithm !== ContentCompAlgo.HeaderStripping;\n })) {\n console.warn(`Track #${this.currentTrack.id} has an unsupported content encoding; dropping.`);\n this.currentTrack = null;\n }\n if (this.currentTrack\n && this.currentTrack.id !== -1\n && this.currentTrack.codecId\n && this.currentTrack.info) {\n const slashIndex = this.currentTrack.codecId.indexOf('/');\n const codecIdWithoutSuffix = slashIndex === -1\n ? this.currentTrack.codecId\n : this.currentTrack.codecId.slice(0, slashIndex);\n if (this.currentTrack.info.type === 'video'\n && this.currentTrack.info.width !== -1\n && this.currentTrack.info.height !== -1) {\n this.currentTrack.info.squarePixelWidth = this.currentTrack.info.width;\n this.currentTrack.info.squarePixelHeight = this.currentTrack.info.height;\n if (this.currentTrack.info.displayWidth !== null\n && this.currentTrack.info.displayHeight !== null) {\n const num = this.currentTrack.info.displayWidth * this.currentTrack.info.height;\n const den = this.currentTrack.info.displayHeight * this.currentTrack.info.width;\n if (num > den) {\n this.currentTrack.info.squarePixelWidth = Math.round(this.currentTrack.info.width * num / den);\n }\n else {\n this.currentTrack.info.squarePixelHeight = Math.round(this.currentTrack.info.height * den / num);\n }\n }\n if (this.currentTrack.codecId === CODEC_STRING_MAP.avc) {\n this.currentTrack.info.codec = 'avc';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (this.currentTrack.codecId === CODEC_STRING_MAP.hevc) {\n this.currentTrack.info.codec = 'hevc';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.vp8) {\n this.currentTrack.info.codec = 'vp8';\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.vp9) {\n this.currentTrack.info.codec = 'vp9';\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.av1) {\n this.currentTrack.info.codec = 'av1';\n }\n const videoTrack = this.currentTrack;\n const inputTrack = new InputVideoTrack(this.input, new MatroskaVideoTrackBacking(videoTrack));\n this.currentTrack.inputTrack = inputTrack;\n this.currentSegment.tracks.push(this.currentTrack);\n }\n else if (this.currentTrack.info.type === 'audio'\n && this.currentTrack.info.numberOfChannels !== -1\n && this.currentTrack.info.sampleRate !== -1) {\n if (codecIdWithoutSuffix === CODEC_STRING_MAP.aac) {\n this.currentTrack.info.codec = 'aac';\n this.currentTrack.info.aacCodecInfo = {\n isMpeg2: this.currentTrack.codecId.includes('MPEG2'),\n objectType: null,\n };\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (this.currentTrack.codecId === CODEC_STRING_MAP.mp3) {\n this.currentTrack.info.codec = 'mp3';\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.opus) {\n this.currentTrack.info.codec = 'opus';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n this.currentTrack.info.sampleRate = OPUS_SAMPLE_RATE; // Always the same\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.vorbis) {\n this.currentTrack.info.codec = 'vorbis';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.flac) {\n this.currentTrack.info.codec = 'flac';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.ac3) {\n this.currentTrack.info.codec = 'ac3';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (codecIdWithoutSuffix === CODEC_STRING_MAP.eac3) {\n this.currentTrack.info.codec = 'eac3';\n this.currentTrack.info.codecDescription = this.currentTrack.codecPrivate;\n }\n else if (this.currentTrack.codecId === 'A_PCM/INT/LIT') {\n if (this.currentTrack.info.bitDepth === 8) {\n this.currentTrack.info.codec = 'pcm-u8';\n }\n else if (this.currentTrack.info.bitDepth === 16) {\n this.currentTrack.info.codec = 'pcm-s16';\n }\n else if (this.currentTrack.info.bitDepth === 24) {\n this.currentTrack.info.codec = 'pcm-s24';\n }\n else if (this.currentTrack.info.bitDepth === 32) {\n this.currentTrack.info.codec = 'pcm-s32';\n }\n }\n else if (this.currentTrack.codecId === 'A_PCM/INT/BIG') {\n if (this.currentTrack.info.bitDepth === 8) {\n this.currentTrack.info.codec = 'pcm-u8';\n }\n else if (this.currentTrack.info.bitDepth === 16) {\n this.currentTrack.info.codec = 'pcm-s16be';\n }\n else if (this.currentTrack.info.bitDepth === 24) {\n this.currentTrack.info.codec = 'pcm-s24be';\n }\n else if (this.currentTrack.info.bitDepth === 32) {\n this.currentTrack.info.codec = 'pcm-s32be';\n }\n }\n else if (this.currentTrack.codecId === 'A_PCM/FLOAT/IEEE') {\n if (this.currentTrack.info.bitDepth === 32) {\n this.currentTrack.info.codec = 'pcm-f32';\n }\n else if (this.currentTrack.info.bitDepth === 64) {\n this.currentTrack.info.codec = 'pcm-f64';\n }\n }\n const audioTrack = this.currentTrack;\n const inputTrack = new InputAudioTrack(this.input, new MatroskaAudioTrackBacking(audioTrack));\n this.currentTrack.inputTrack = inputTrack;\n this.currentSegment.tracks.push(this.currentTrack);\n }\n }\n this.currentTrack = null;\n }\n ;\n break;\n case EBMLId.TrackNumber:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.id = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.TrackType:\n {\n if (!this.currentTrack)\n break;\n const type = readUnsignedInt(slice, size);\n if (type === 1) {\n this.currentTrack.info = {\n type: 'video',\n width: -1,\n height: -1,\n displayWidth: null,\n displayHeight: null,\n displayUnit: null,\n squarePixelWidth: -1,\n squarePixelHeight: -1,\n rotation: 0,\n codec: null,\n codecDescription: null,\n colorSpace: null,\n alphaMode: false,\n };\n }\n else if (type === 2) {\n this.currentTrack.info = {\n type: 'audio',\n numberOfChannels: -1,\n sampleRate: -1,\n bitDepth: -1,\n codec: null,\n codecDescription: null,\n aacCodecInfo: null,\n };\n }\n }\n ;\n break;\n case EBMLId.FlagEnabled:\n {\n if (!this.currentTrack)\n break;\n const enabled = readUnsignedInt(slice, size);\n if (!enabled) {\n this.currentTrack = null;\n }\n }\n ;\n break;\n case EBMLId.FlagDefault:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.default = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.FlagForced:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.forced = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.FlagOriginal:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.original = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.FlagHearingImpaired:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.hearingImpaired = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.FlagVisualImpaired:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.visuallyImpaired = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.FlagCommentary:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.disposition.commentary = !!readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.CodecID:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.codecId = readAsciiString(slice, size);\n }\n ;\n break;\n case EBMLId.CodecPrivate:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.codecPrivate = readBytes(slice, size);\n }\n ;\n break;\n case EBMLId.DefaultDuration:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.defaultDurationNs = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.Name:\n {\n if (!this.currentTrack)\n break;\n this.currentTrack.name = readUnicodeString(slice, size);\n }\n ;\n break;\n case EBMLId.Language:\n {\n if (!this.currentTrack)\n break;\n if (this.currentTrack.languageCode !== UNDETERMINED_LANGUAGE) {\n // LanguageBCP47 was present, which takes precedence\n break;\n }\n this.currentTrack.languageCode = readAsciiString(slice, size);\n if (!isIso639Dash2LanguageCode(this.currentTrack.languageCode)) {\n this.currentTrack.languageCode = UNDETERMINED_LANGUAGE;\n }\n }\n ;\n break;\n case EBMLId.LanguageBCP47:\n {\n if (!this.currentTrack)\n break;\n const bcp47 = readAsciiString(slice, size);\n const languageSubtag = bcp47.split('-')[0];\n if (languageSubtag) {\n // Technically invalid, for now: The language subtag might be a language code from ISO 639-1,\n // ISO 639-2, ISO 639-3, ISO 639-5 or some other thing (source: Wikipedia). But, `languageCode` is\n // documented as ISO 639-2. Changing the definition would be a breaking change. This will get\n // cleaned up in the future by defining languageCode to be BCP 47 instead.\n this.currentTrack.languageCode = languageSubtag;\n }\n else {\n this.currentTrack.languageCode = UNDETERMINED_LANGUAGE;\n }\n }\n ;\n break;\n case EBMLId.Video:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.PixelWidth:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.width = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.PixelHeight:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.height = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.DisplayWidth:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.displayWidth = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.DisplayHeight:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.displayHeight = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.DisplayUnit:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.displayUnit = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.AlphaMode:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.alphaMode = readUnsignedInt(slice, size) === 1;\n }\n ;\n break;\n case EBMLId.Colour:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.currentTrack.info.colorSpace = {};\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.MatrixCoefficients:\n {\n if (this.currentTrack?.info?.type !== 'video' || !this.currentTrack.info.colorSpace)\n break;\n const matrixCoefficients = readUnsignedInt(slice, size);\n const mapped = MATRIX_COEFFICIENTS_MAP_INVERSE[matrixCoefficients] ?? null;\n this.currentTrack.info.colorSpace.matrix = mapped;\n }\n ;\n break;\n case EBMLId.Range:\n {\n if (this.currentTrack?.info?.type !== 'video' || !this.currentTrack.info.colorSpace)\n break;\n this.currentTrack.info.colorSpace.fullRange = readUnsignedInt(slice, size) === 2;\n }\n ;\n break;\n case EBMLId.TransferCharacteristics:\n {\n if (this.currentTrack?.info?.type !== 'video' || !this.currentTrack.info.colorSpace)\n break;\n const transferCharacteristics = readUnsignedInt(slice, size);\n const mapped = TRANSFER_CHARACTERISTICS_MAP_INVERSE[transferCharacteristics] ?? null;\n this.currentTrack.info.colorSpace.transfer = mapped;\n }\n ;\n break;\n case EBMLId.Primaries:\n {\n if (this.currentTrack?.info?.type !== 'video' || !this.currentTrack.info.colorSpace)\n break;\n const primaries = readUnsignedInt(slice, size);\n const mapped = COLOR_PRIMARIES_MAP_INVERSE[primaries] ?? null;\n this.currentTrack.info.colorSpace.primaries = mapped;\n }\n ;\n break;\n case EBMLId.Projection:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.ProjectionPoseRoll:\n {\n if (this.currentTrack?.info?.type !== 'video')\n break;\n const rotation = readFloat(slice, size);\n const flippedRotation = -rotation; // Convert counter-clockwise to clockwise\n try {\n this.currentTrack.info.rotation = normalizeRotation(flippedRotation);\n }\n catch {\n // It wasn't a valid rotation\n }\n }\n ;\n break;\n case EBMLId.Audio:\n {\n if (this.currentTrack?.info?.type !== 'audio')\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.SamplingFrequency:\n {\n if (this.currentTrack?.info?.type !== 'audio')\n break;\n this.currentTrack.info.sampleRate = readFloat(slice, size);\n }\n ;\n break;\n case EBMLId.Channels:\n {\n if (this.currentTrack?.info?.type !== 'audio')\n break;\n this.currentTrack.info.numberOfChannels = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.BitDepth:\n {\n if (this.currentTrack?.info?.type !== 'audio')\n break;\n this.currentTrack.info.bitDepth = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.CuePoint:\n {\n if (!this.currentSegment)\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n this.currentCueTime = null;\n }\n ;\n break;\n case EBMLId.CueTime:\n {\n this.currentCueTime = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.CueTrackPositions:\n {\n if (this.currentCueTime === null)\n break;\n assert(this.currentSegment);\n const cuePoint = { time: this.currentCueTime, trackId: -1, clusterPosition: -1 };\n this.currentSegment.cuePoints.push(cuePoint);\n this.readContiguousElements(slice.slice(dataStartPos, size));\n if (cuePoint.trackId === -1 || cuePoint.clusterPosition === -1) {\n this.currentSegment.cuePoints.pop();\n }\n }\n ;\n break;\n case EBMLId.CueTrack:\n {\n const lastCuePoint = this.currentSegment?.cuePoints[this.currentSegment.cuePoints.length - 1];\n if (!lastCuePoint)\n break;\n lastCuePoint.trackId = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.CueClusterPosition:\n {\n const lastCuePoint = this.currentSegment?.cuePoints[this.currentSegment.cuePoints.length - 1];\n if (!lastCuePoint)\n break;\n assert(this.currentSegment);\n lastCuePoint.clusterPosition = this.currentSegment.dataStartPos + readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.Timestamp:\n {\n if (!this.currentCluster)\n break;\n this.currentCluster.timestamp = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.SimpleBlock:\n {\n if (!this.currentCluster)\n break;\n const trackNumber = readVarInt(slice);\n if (trackNumber === null)\n break;\n const trackData = this.getTrackDataInCluster(this.currentCluster, trackNumber);\n if (!trackData)\n break; // Not a track we care about\n const relativeTimestamp = readI16Be(slice);\n const flags = readU8(slice);\n const lacing = (flags >> 1) & 0x3; // If the block is laced, we'll expand it later\n let isKeyFrame = !!(flags & 0x80);\n if (trackData.track.info?.type === 'audio' && trackData.track.info.codec) {\n // Some files don't mark their audio packets as key packets (I'm looking at you, Firefox). But, we\n // can fix this in most cases: if we recognize the codec of the track, then we know every packet is\n // necessarily a key packet, no matter what the container says.\n // https://github.com/Vanilagy/mediabunny/issues/192\n isKeyFrame = true;\n }\n const blockData = readBytes(slice, size - (slice.filePos - dataStartPos));\n const hasDecodingInstructions = trackData.track.decodingInstructions.length > 0;\n trackData.blocks.push({\n timestamp: relativeTimestamp, // We'll add the cluster's timestamp to this later\n duration: 0, // Will set later\n isKeyFrame,\n data: blockData,\n lacing,\n decoded: !hasDecodingInstructions,\n mainAdditional: null,\n });\n }\n ;\n break;\n case EBMLId.BlockGroup:\n {\n if (!this.currentCluster)\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n this.currentBlock = null;\n }\n ;\n break;\n case EBMLId.Block:\n {\n if (!this.currentCluster)\n break;\n const trackNumber = readVarInt(slice);\n if (trackNumber === null)\n break;\n const trackData = this.getTrackDataInCluster(this.currentCluster, trackNumber);\n if (!trackData)\n break;\n const relativeTimestamp = readI16Be(slice);\n const flags = readU8(slice);\n const lacing = (flags >> 1) & 0x3; // If the block is laced, we'll expand it later\n const blockData = readBytes(slice, size - (slice.filePos - dataStartPos));\n const hasDecodingInstructions = trackData.track.decodingInstructions.length > 0;\n this.currentBlock = {\n timestamp: relativeTimestamp, // We'll add the cluster's timestamp to this later\n duration: 0, // Will set later\n isKeyFrame: true,\n data: blockData,\n lacing,\n decoded: !hasDecodingInstructions,\n mainAdditional: null,\n };\n trackData.blocks.push(this.currentBlock);\n }\n ;\n break;\n case EBMLId.BlockAdditions:\n {\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.BlockMore:\n {\n if (!this.currentBlock)\n break;\n this.currentBlockAdditional = {\n addId: 1,\n data: null,\n };\n this.readContiguousElements(slice.slice(dataStartPos, size));\n if (this.currentBlockAdditional.data && this.currentBlockAdditional.addId === 1) {\n this.currentBlock.mainAdditional = this.currentBlockAdditional.data;\n }\n this.currentBlockAdditional = null;\n }\n ;\n break;\n case EBMLId.BlockAdditional:\n {\n if (!this.currentBlockAdditional)\n break;\n this.currentBlockAdditional.data = readBytes(slice, size);\n }\n ;\n break;\n case EBMLId.BlockAddID:\n {\n if (!this.currentBlockAdditional)\n break;\n this.currentBlockAdditional.addId = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.BlockDuration:\n {\n if (!this.currentBlock)\n break;\n this.currentBlock.duration = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.ReferenceBlock:\n {\n if (!this.currentBlock)\n break;\n this.currentBlock.isKeyFrame = false;\n // We ignore the actual value here, we just use the reference as an indicator for \"not a key frame\".\n // This is in line with FFmpeg's behavior.\n }\n ;\n break;\n case EBMLId.Tag:\n {\n this.currentTagTargetIsMovie = true;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.Targets:\n {\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.TargetTypeValue:\n {\n const targetTypeValue = readUnsignedInt(slice, size);\n if (targetTypeValue !== 50) {\n this.currentTagTargetIsMovie = false;\n }\n }\n ;\n break;\n case EBMLId.TagTrackUID:\n case EBMLId.TagEditionUID:\n case EBMLId.TagChapterUID:\n case EBMLId.TagAttachmentUID:\n {\n this.currentTagTargetIsMovie = false;\n }\n ;\n break;\n case EBMLId.SimpleTag:\n {\n if (!this.currentTagTargetIsMovie)\n break;\n this.currentSimpleTagName = null;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.TagName:\n {\n this.currentSimpleTagName = readUnicodeString(slice, size);\n }\n ;\n break;\n case EBMLId.TagString:\n {\n if (!this.currentSimpleTagName)\n break;\n const value = readUnicodeString(slice, size);\n this.processTagValue(this.currentSimpleTagName, value);\n }\n ;\n break;\n case EBMLId.TagBinary:\n {\n if (!this.currentSimpleTagName)\n break;\n const value = readBytes(slice, size);\n this.processTagValue(this.currentSimpleTagName, value);\n }\n ;\n break;\n case EBMLId.AttachedFile:\n {\n if (!this.currentSegment)\n break;\n this.currentAttachedFile = {\n fileUid: null,\n fileName: null,\n fileMediaType: null,\n fileData: null,\n fileDescription: null,\n };\n this.readContiguousElements(slice.slice(dataStartPos, size));\n const tags = this.currentSegment.metadataTags;\n if (this.currentAttachedFile.fileUid && this.currentAttachedFile.fileData) {\n // All attached files get surfaced in the `raw` metadata tags\n tags.raw ??= {};\n tags.raw[this.currentAttachedFile.fileUid.toString()] = new AttachedFile(this.currentAttachedFile.fileData, this.currentAttachedFile.fileMediaType ?? undefined, this.currentAttachedFile.fileName ?? undefined, this.currentAttachedFile.fileDescription ?? undefined);\n }\n // Only process image attachments\n if (this.currentAttachedFile.fileMediaType?.startsWith('image/') && this.currentAttachedFile.fileData) {\n const fileName = this.currentAttachedFile.fileName;\n let kind = 'unknown';\n if (fileName) {\n const lowerName = fileName.toLowerCase();\n if (lowerName.startsWith('cover.')) {\n kind = 'coverFront';\n }\n else if (lowerName.startsWith('back.')) {\n kind = 'coverBack';\n }\n }\n tags.images ??= [];\n tags.images.push({\n data: this.currentAttachedFile.fileData,\n mimeType: this.currentAttachedFile.fileMediaType,\n kind,\n name: this.currentAttachedFile.fileName ?? undefined,\n description: this.currentAttachedFile.fileDescription ?? undefined,\n });\n }\n this.currentAttachedFile = null;\n }\n ;\n break;\n case EBMLId.FileUID:\n {\n if (!this.currentAttachedFile)\n break;\n this.currentAttachedFile.fileUid = readUnsignedBigInt(slice, size);\n }\n ;\n break;\n case EBMLId.FileName:\n {\n if (!this.currentAttachedFile)\n break;\n this.currentAttachedFile.fileName = readUnicodeString(slice, size);\n }\n ;\n break;\n case EBMLId.FileMediaType:\n {\n if (!this.currentAttachedFile)\n break;\n this.currentAttachedFile.fileMediaType = readAsciiString(slice, size);\n }\n ;\n break;\n case EBMLId.FileData:\n {\n if (!this.currentAttachedFile)\n break;\n this.currentAttachedFile.fileData = readBytes(slice, size);\n }\n ;\n break;\n case EBMLId.FileDescription:\n {\n if (!this.currentAttachedFile)\n break;\n this.currentAttachedFile.fileDescription = readUnicodeString(slice, size);\n }\n ;\n break;\n case EBMLId.ContentEncodings:\n {\n if (!this.currentTrack)\n break;\n this.readContiguousElements(slice.slice(dataStartPos, size));\n // \"**MUST** start with the `ContentEncoding` with the highest `ContentEncodingOrder`\"\n this.currentTrack.decodingInstructions.sort((a, b) => b.order - a.order);\n }\n ;\n break;\n case EBMLId.ContentEncoding:\n {\n this.currentDecodingInstruction = {\n order: 0,\n scope: ContentEncodingScope.Block,\n data: null,\n };\n this.readContiguousElements(slice.slice(dataStartPos, size));\n if (this.currentDecodingInstruction.data) {\n this.currentTrack.decodingInstructions.push(this.currentDecodingInstruction);\n }\n this.currentDecodingInstruction = null;\n }\n ;\n break;\n case EBMLId.ContentEncodingOrder:\n {\n if (!this.currentDecodingInstruction)\n break;\n this.currentDecodingInstruction.order = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.ContentEncodingScope:\n {\n if (!this.currentDecodingInstruction)\n break;\n this.currentDecodingInstruction.scope = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.ContentCompression:\n {\n if (!this.currentDecodingInstruction)\n break;\n this.currentDecodingInstruction.data = {\n type: 'decompress',\n algorithm: ContentCompAlgo.Zlib,\n settings: null,\n };\n this.readContiguousElements(slice.slice(dataStartPos, size));\n }\n ;\n break;\n case EBMLId.ContentCompAlgo:\n {\n if (this.currentDecodingInstruction?.data?.type !== 'decompress')\n break;\n this.currentDecodingInstruction.data.algorithm = readUnsignedInt(slice, size);\n }\n ;\n break;\n case EBMLId.ContentCompSettings:\n {\n if (this.currentDecodingInstruction?.data?.type !== 'decompress')\n break;\n this.currentDecodingInstruction.data.settings = readBytes(slice, size);\n }\n ;\n break;\n case EBMLId.ContentEncryption:\n {\n if (!this.currentDecodingInstruction)\n break;\n this.currentDecodingInstruction.data = {\n type: 'decrypt',\n };\n }\n ;\n break;\n }\n slice.filePos = dataStartPos + size;\n return true;\n }\n decodeBlockData(track, rawData) {\n assert(track.decodingInstructions.length > 0); // This method shouldn't be called otherwise\n let currentData = rawData;\n for (const instruction of track.decodingInstructions) {\n assert(instruction.data);\n switch (instruction.data.type) {\n case 'decompress':\n {\n switch (instruction.data.algorithm) {\n case ContentCompAlgo.HeaderStripping:\n {\n if (instruction.data.settings && instruction.data.settings.length > 0) {\n const prefix = instruction.data.settings;\n const newData = new Uint8Array(prefix.length + currentData.length);\n newData.set(prefix, 0);\n newData.set(currentData, prefix.length);\n currentData = newData;\n }\n }\n ;\n break;\n default:\n {\n // Unhandled\n }\n ;\n }\n }\n ;\n break;\n default:\n {\n // Unhandled\n }\n ;\n }\n }\n return currentData;\n }\n processTagValue(name, value) {\n if (!this.currentSegment?.metadataTags)\n return;\n const metadataTags = this.currentSegment.metadataTags;\n metadataTags.raw ??= {};\n metadataTags.raw[name] ??= value;\n if (typeof value === 'string') {\n switch (name.toLowerCase()) {\n case 'title':\n {\n metadataTags.title ??= value;\n }\n ;\n break;\n case 'description':\n {\n metadataTags.description ??= value;\n }\n ;\n break;\n case 'artist':\n {\n metadataTags.artist ??= value;\n }\n ;\n break;\n case 'album':\n {\n metadataTags.album ??= value;\n }\n ;\n break;\n case 'album_artist':\n {\n metadataTags.albumArtist ??= value;\n }\n ;\n break;\n case 'genre':\n {\n metadataTags.genre ??= value;\n }\n ;\n break;\n case 'comment':\n {\n metadataTags.comment ??= value;\n }\n ;\n break;\n case 'lyrics':\n {\n metadataTags.lyrics ??= value;\n }\n ;\n break;\n case 'date':\n {\n const date = new Date(value);\n if (!Number.isNaN(date.getTime())) {\n metadataTags.date ??= date;\n }\n }\n ;\n break;\n case 'track_number':\n case 'part_number':\n {\n const parts = value.split('/');\n const trackNum = Number.parseInt(parts[0], 10);\n const tracksTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(trackNum) && trackNum > 0) {\n metadataTags.trackNumber ??= trackNum;\n }\n if (tracksTotal && Number.isInteger(tracksTotal) && tracksTotal > 0) {\n metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n ;\n break;\n case 'disc_number':\n case 'disc':\n {\n const discParts = value.split('/');\n const discNum = Number.parseInt(discParts[0], 10);\n const discsTotal = discParts[1] && Number.parseInt(discParts[1], 10);\n if (Number.isInteger(discNum) && discNum > 0) {\n metadataTags.discNumber ??= discNum;\n }\n if (discsTotal && Number.isInteger(discsTotal) && discsTotal > 0) {\n metadataTags.discsTotal ??= discsTotal;\n }\n }\n ;\n break;\n }\n }\n }\n}\nclass MatroskaTrackBacking {\n constructor(internalTrack) {\n this.internalTrack = internalTrack;\n this.packetToClusterLocation = new WeakMap();\n }\n getId() {\n return this.internalTrack.id;\n }\n getNumber() {\n const demuxer = this.internalTrack.demuxer;\n const inputTrack = this.internalTrack.inputTrack;\n const trackType = inputTrack.type;\n let number = 0;\n for (const segment of demuxer.segments) {\n for (const track of segment.tracks) {\n if (track.inputTrack.type === trackType) {\n number++;\n }\n if (track === this.internalTrack) {\n break;\n }\n }\n }\n return number;\n }\n getCodec() {\n throw new Error('Not implemented on base class.');\n }\n getInternalCodecId() {\n return this.internalTrack.codecId;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n getName() {\n return this.internalTrack.name;\n }\n getLanguageCode() {\n return this.internalTrack.languageCode;\n }\n async getFirstTimestamp() {\n const firstPacket = await this.getFirstPacket({ metadataOnly: true });\n return firstPacket?.timestamp ?? 0;\n }\n getTimeResolution() {\n return this.internalTrack.segment.timestampFactor;\n }\n getDisposition() {\n return this.internalTrack.disposition;\n }\n async getFirstPacket(options) {\n return this.performClusterLookup(null, (cluster) => {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (trackData) {\n return {\n blockIndex: 0,\n correctBlockFound: true,\n };\n }\n return {\n blockIndex: -1,\n correctBlockFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the cues\n Infinity, options);\n }\n intoTimescale(timestamp) {\n // Do a little rounding to catch cases where the result is very close to an integer. If it is, it's likely\n // that the number was originally an integer divided by the timescale. For stability, it's best\n // to return the integer in this case.\n return roundIfAlmostInteger(timestamp * this.internalTrack.segment.timestampFactor);\n }\n async getPacket(timestamp, options) {\n const timestampInTimescale = this.intoTimescale(timestamp);\n return this.performClusterLookup(null, (cluster) => {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (!trackData) {\n return { blockIndex: -1, correctBlockFound: false };\n }\n const index = binarySearchLessOrEqual(trackData.presentationTimestamps, timestampInTimescale, x => x.timestamp);\n const blockIndex = index !== -1 ? trackData.presentationTimestamps[index].blockIndex : -1;\n const correctBlockFound = index !== -1 && timestampInTimescale < trackData.endTimestamp;\n return { blockIndex, correctBlockFound };\n }, timestampInTimescale, timestampInTimescale, options);\n }\n async getNextPacket(packet, options) {\n const locationInCluster = this.packetToClusterLocation.get(packet);\n if (locationInCluster === undefined) {\n throw new Error('Packet was not created from this track.');\n }\n return this.performClusterLookup(locationInCluster.cluster, (cluster) => {\n if (cluster === locationInCluster.cluster) {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (locationInCluster.blockIndex + 1 < trackData.blocks.length) {\n // We can simply take the next block in the cluster\n return {\n blockIndex: locationInCluster.blockIndex + 1,\n correctBlockFound: true,\n };\n }\n }\n else {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (trackData) {\n return {\n blockIndex: 0,\n correctBlockFound: true,\n };\n }\n }\n return {\n blockIndex: -1,\n correctBlockFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the cues\n Infinity, options);\n }\n async getKeyPacket(timestamp, options) {\n const timestampInTimescale = this.intoTimescale(timestamp);\n return this.performClusterLookup(null, (cluster) => {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (!trackData) {\n return { blockIndex: -1, correctBlockFound: false };\n }\n const index = findLastIndex(trackData.presentationTimestamps, (x) => {\n const block = trackData.blocks[x.blockIndex];\n return block.isKeyFrame && x.timestamp <= timestampInTimescale;\n });\n const blockIndex = index !== -1 ? trackData.presentationTimestamps[index].blockIndex : -1;\n const correctBlockFound = index !== -1 && timestampInTimescale < trackData.endTimestamp;\n return { blockIndex, correctBlockFound };\n }, timestampInTimescale, timestampInTimescale, options);\n }\n async getNextKeyPacket(packet, options) {\n const locationInCluster = this.packetToClusterLocation.get(packet);\n if (locationInCluster === undefined) {\n throw new Error('Packet was not created from this track.');\n }\n return this.performClusterLookup(locationInCluster.cluster, (cluster) => {\n if (cluster === locationInCluster.cluster) {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n const nextKeyFrameIndex = trackData.blocks.findIndex((x, i) => x.isKeyFrame && i > locationInCluster.blockIndex);\n if (nextKeyFrameIndex !== -1) {\n // We can simply take the next key frame in the cluster\n return {\n blockIndex: nextKeyFrameIndex,\n correctBlockFound: true,\n };\n }\n }\n else {\n const trackData = cluster.trackData.get(this.internalTrack.id);\n if (trackData && trackData.firstKeyFrameTimestamp !== null) {\n const keyFrameIndex = trackData.blocks.findIndex(x => x.isKeyFrame);\n assert(keyFrameIndex !== -1); // There must be one\n return {\n blockIndex: keyFrameIndex,\n correctBlockFound: true,\n };\n }\n }\n return {\n blockIndex: -1,\n correctBlockFound: false,\n };\n }, -Infinity, // Use -Infinity as a search timestamp to avoid using the cues\n Infinity, options);\n }\n async fetchPacketInCluster(cluster, blockIndex, options) {\n if (blockIndex === -1) {\n return null;\n }\n const trackData = cluster.trackData.get(this.internalTrack.id);\n const block = trackData.blocks[blockIndex];\n assert(block);\n // Perform lazy decoding if needed\n if (!block.decoded) {\n block.data = this.internalTrack.demuxer.decodeBlockData(this.internalTrack, block.data);\n block.decoded = true;\n }\n const data = options.metadataOnly ? PLACEHOLDER_DATA : block.data;\n const timestamp = block.timestamp / this.internalTrack.segment.timestampFactor;\n const duration = block.duration / this.internalTrack.segment.timestampFactor;\n const sideData = {};\n if (block.mainAdditional && this.internalTrack.info?.type === 'video' && this.internalTrack.info.alphaMode) {\n sideData.alpha = options.metadataOnly ? PLACEHOLDER_DATA : block.mainAdditional;\n sideData.alphaByteLength = block.mainAdditional.byteLength;\n }\n const packet = new EncodedPacket(data, block.isKeyFrame ? 'key' : 'delta', timestamp, duration, cluster.dataStartPos + blockIndex, block.data.byteLength, sideData);\n this.packetToClusterLocation.set(packet, { cluster, blockIndex });\n return packet;\n }\n /** Looks for a packet in the clusters while trying to load as few clusters as possible to retrieve it. */\n async performClusterLookup(\n // The cluster where we start looking\n startCluster, \n // This function returns the best-matching block in a given cluster\n getMatchInCluster, \n // The timestamp with which we can search the lookup table\n searchTimestamp, \n // The timestamp for which we know the correct block will not come after it\n latestTimestamp, options) {\n const { demuxer, segment } = this.internalTrack;\n let currentCluster = null;\n let bestCluster = null;\n let bestBlockIndex = -1;\n if (startCluster) {\n const { blockIndex, correctBlockFound } = getMatchInCluster(startCluster);\n if (correctBlockFound) {\n return this.fetchPacketInCluster(startCluster, blockIndex, options);\n }\n if (blockIndex !== -1) {\n bestCluster = startCluster;\n bestBlockIndex = blockIndex;\n }\n }\n // Search for a cue point; this way, we won't need to start searching from the start of the file\n // but can jump right into the correct cluster (or at least nearby).\n const cuePointIndex = binarySearchLessOrEqual(this.internalTrack.cuePoints, searchTimestamp, x => x.time);\n const cuePoint = cuePointIndex !== -1\n ? this.internalTrack.cuePoints[cuePointIndex]\n : null;\n // Also check the position cache\n const positionCacheIndex = binarySearchLessOrEqual(this.internalTrack.clusterPositionCache, searchTimestamp, x => x.startTimestamp);\n const positionCacheEntry = positionCacheIndex !== -1\n ? this.internalTrack.clusterPositionCache[positionCacheIndex]\n : null;\n const lookupEntryPosition = Math.max(cuePoint?.clusterPosition ?? 0, positionCacheEntry?.elementStartPos ?? 0) || null;\n let currentPos;\n if (!startCluster) {\n currentPos = lookupEntryPosition ?? segment.clusterSeekStartPos;\n }\n else {\n if (lookupEntryPosition === null || startCluster.elementStartPos >= lookupEntryPosition) {\n currentPos = startCluster.elementEndPos;\n currentCluster = startCluster;\n }\n else {\n // Use the lookup entry\n currentPos = lookupEntryPosition;\n }\n }\n while (segment.elementEndPos === null || currentPos <= segment.elementEndPos - MIN_HEADER_SIZE) {\n if (currentCluster) {\n const trackData = currentCluster.trackData.get(this.internalTrack.id);\n if (trackData && trackData.startTimestamp > latestTimestamp) {\n // We're already past the upper bound, no need to keep searching\n break;\n }\n }\n // Load the header\n let slice = demuxer.reader.requestSliceRange(currentPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const elementStartPos = currentPos;\n const elementHeader = readElementHeader(slice);\n if (!elementHeader\n || (!LEVEL_1_EBML_IDS.includes(elementHeader.id) && elementHeader.id !== EBMLId.Void)) {\n // There's an element here that shouldn't be here. Might be garbage. In this case, let's\n // try and resync to the next valid element.\n const nextPos = await resync(demuxer.reader, elementStartPos, LEVEL_1_EBML_IDS, Math.min(segment.elementEndPos ?? Infinity, elementStartPos + MAX_RESYNC_LENGTH));\n if (nextPos) {\n currentPos = nextPos;\n continue;\n }\n else {\n break; // Resync failed\n }\n }\n const id = elementHeader.id;\n let size = elementHeader.size;\n const dataStartPos = slice.filePos;\n if (id === EBMLId.Cluster) {\n currentCluster = await demuxer.readCluster(elementStartPos, segment);\n // readCluster computes the proper size even if it's undefined in the header, so let's use that instead\n size = currentCluster.elementEndPos - dataStartPos;\n const { blockIndex, correctBlockFound } = getMatchInCluster(currentCluster);\n if (correctBlockFound) {\n return this.fetchPacketInCluster(currentCluster, blockIndex, options);\n }\n if (blockIndex !== -1) {\n bestCluster = currentCluster;\n bestBlockIndex = blockIndex;\n }\n }\n if (size === undefined) {\n // Undefined element size (can happen in livestreamed files). In this case, we need to do some\n // searching to determine the actual size of the element.\n assert(id !== EBMLId.Cluster); // Undefined cluster sizes are fixed further up\n // Search for the next element at level 0 or 1\n const nextElementPos = await searchForNextElementId(demuxer.reader, dataStartPos, LEVEL_0_AND_1_EBML_IDS, segment.elementEndPos);\n size = nextElementPos.pos - dataStartPos;\n }\n const endPos = dataStartPos + size;\n if (segment.elementEndPos === null) {\n // Check the next element. If it's a new segment, we know this segment ends here. The new\n // segment is just ignored, since we're likely in a livestreamed file and thus only care about\n // the first segment.\n let slice = demuxer.reader.requestSliceRange(endPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const elementId = readElementId(slice);\n if (elementId === EBMLId.Segment) {\n segment.elementEndPos = endPos; // We now know the segment's size\n break;\n }\n }\n currentPos = endPos;\n }\n // Catch faulty cue points\n if (cuePoint && (!bestCluster || bestCluster.elementStartPos < cuePoint.clusterPosition)) {\n // The cue point lied to us! We found a cue point but no cluster there that satisfied the match. In this\n // case, let's search again but using the cue point before that.\n const previousCuePoint = this.internalTrack.cuePoints[cuePointIndex - 1];\n assert(!previousCuePoint || previousCuePoint.time < cuePoint.time);\n const newSearchTimestamp = previousCuePoint?.time ?? -Infinity;\n return this.performClusterLookup(null, getMatchInCluster, newSearchTimestamp, latestTimestamp, options);\n }\n if (bestCluster) {\n // If we finished looping but didn't find a perfect match, still return the best match we found\n return this.fetchPacketInCluster(bestCluster, bestBlockIndex, options);\n }\n return null;\n }\n}\nclass MatroskaVideoTrackBacking extends MatroskaTrackBacking {\n constructor(internalTrack) {\n super(internalTrack);\n this.decoderConfigPromise = null;\n this.internalTrack = internalTrack;\n }\n getCodec() {\n return this.internalTrack.info.codec;\n }\n getCodedWidth() {\n return this.internalTrack.info.width;\n }\n getCodedHeight() {\n return this.internalTrack.info.height;\n }\n getSquarePixelWidth() {\n return this.internalTrack.info.squarePixelWidth;\n }\n getSquarePixelHeight() {\n return this.internalTrack.info.squarePixelHeight;\n }\n getRotation() {\n return this.internalTrack.info.rotation;\n }\n async getColorSpace() {\n return {\n primaries: this.internalTrack.info.colorSpace?.primaries,\n transfer: this.internalTrack.info.colorSpace?.transfer,\n matrix: this.internalTrack.info.colorSpace?.matrix,\n fullRange: this.internalTrack.info.colorSpace?.fullRange,\n };\n }\n async canBeTransparent() {\n return this.internalTrack.info.alphaMode;\n }\n async getDecoderConfig() {\n if (!this.internalTrack.info.codec) {\n return null;\n }\n return this.decoderConfigPromise ??= (async () => {\n let firstPacket = null;\n const needsPacketForAdditionalInfo = this.internalTrack.info.codec === 'vp9'\n || this.internalTrack.info.codec === 'av1'\n // Packets are in Annex B format:\n || (this.internalTrack.info.codec === 'avc' && !this.internalTrack.info.codecDescription)\n // Packets are in Annex B format:\n || (this.internalTrack.info.codec === 'hevc' && !this.internalTrack.info.codecDescription);\n if (needsPacketForAdditionalInfo) {\n firstPacket = await this.getFirstPacket({});\n }\n return {\n codec: extractVideoCodecString({\n width: this.internalTrack.info.width,\n height: this.internalTrack.info.height,\n codec: this.internalTrack.info.codec,\n codecDescription: this.internalTrack.info.codecDescription,\n colorSpace: this.internalTrack.info.colorSpace,\n avcType: 1, // We don't know better (or do we?) so just assume 'avc1'\n avcCodecInfo: this.internalTrack.info.codec === 'avc' && firstPacket\n ? extractAvcDecoderConfigurationRecord(firstPacket.data)\n : null,\n hevcCodecInfo: this.internalTrack.info.codec === 'hevc' && firstPacket\n ? extractHevcDecoderConfigurationRecord(firstPacket.data)\n : null,\n vp9CodecInfo: this.internalTrack.info.codec === 'vp9' && firstPacket\n ? extractVp9CodecInfoFromPacket(firstPacket.data)\n : null,\n av1CodecInfo: this.internalTrack.info.codec === 'av1' && firstPacket\n ? extractAv1CodecInfoFromPacket(firstPacket.data)\n : null,\n }),\n codedWidth: this.internalTrack.info.width,\n codedHeight: this.internalTrack.info.height,\n displayAspectWidth: this.internalTrack.info.squarePixelWidth,\n displayAspectHeight: this.internalTrack.info.squarePixelHeight,\n description: this.internalTrack.info.codecDescription ?? undefined,\n colorSpace: this.internalTrack.info.colorSpace ?? undefined,\n };\n })();\n }\n}\nclass MatroskaAudioTrackBacking extends MatroskaTrackBacking {\n constructor(internalTrack) {\n super(internalTrack);\n this.decoderConfig = null;\n this.internalTrack = internalTrack;\n }\n getCodec() {\n return this.internalTrack.info.codec;\n }\n getNumberOfChannels() {\n return this.internalTrack.info.numberOfChannels;\n }\n getSampleRate() {\n return this.internalTrack.info.sampleRate;\n }\n async getDecoderConfig() {\n if (!this.internalTrack.info.codec) {\n return null;\n }\n return this.decoderConfig ??= {\n codec: extractAudioCodecString({\n codec: this.internalTrack.info.codec,\n codecDescription: this.internalTrack.info.codecDescription,\n aacCodecInfo: this.internalTrack.info.aacCodecInfo,\n }),\n numberOfChannels: this.internalTrack.info.numberOfChannels,\n sampleRate: this.internalTrack.info.sampleRate,\n description: this.internalTrack.info.codecDescription ?? undefined,\n };\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { FRAME_HEADER_SIZE, readMp3FrameHeader } from '../../shared/mp3-misc.js';\nimport { readU32Be } from '../reader.js';\nexport const readNextMp3FrameHeader = async (reader, startPos, until) => {\n let currentPos = startPos;\n while (until === null || currentPos < until) {\n let slice = reader.requestSlice(currentPos, FRAME_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const word = readU32Be(slice);\n const result = readMp3FrameHeader(word, reader.fileSize !== null ? reader.fileSize - currentPos : null);\n if (result.header) {\n return { header: result.header, startPos: currentPos };\n }\n currentPos += result.bytesAdvanced;\n }\n return null;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack } from '../input-track.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, AsyncMutex, binarySearchExact, binarySearchLessOrEqual, UNDETERMINED_LANGUAGE } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { getXingOffset, INFO, XING } from '../../shared/mp3-misc.js';\nimport { ID3_V1_TAG_SIZE, ID3_V2_HEADER_SIZE, parseId3V1Tag, parseId3V2Tag, readId3V2Header, } from '../id3.js';\nimport { readNextMp3FrameHeader } from './mp3-reader.js';\nimport { readAscii, readBytes, readU32Be } from '../reader.js';\nexport class Mp3Demuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.metadataPromise = null;\n this.firstFrameHeader = null;\n this.loadedSamples = []; // All samples from the start of the file to lastLoadedPos\n this.metadataTags = null;\n this.tracks = [];\n this.readingMutex = new AsyncMutex();\n this.lastSampleLoaded = false;\n this.lastLoadedPos = 0;\n this.nextTimestampInSamples = 0;\n this.reader = input._reader;\n }\n async readMetadata() {\n return this.metadataPromise ??= (async () => {\n // Keep loading until we find the first frame header\n while (!this.firstFrameHeader && !this.lastSampleLoaded) {\n await this.advanceReader();\n }\n if (!this.firstFrameHeader) {\n throw new Error('No valid MP3 frame found.');\n }\n this.tracks = [new InputAudioTrack(this.input, new Mp3AudioTrackBacking(this))];\n })();\n }\n async advanceReader() {\n if (this.lastLoadedPos === 0) {\n // Let's skip all ID3v2 tags at the start of the file\n while (true) {\n let slice = this.reader.requestSlice(this.lastLoadedPos, ID3_V2_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n this.lastSampleLoaded = true;\n return;\n }\n const id3V2Header = readId3V2Header(slice);\n if (!id3V2Header) {\n break;\n }\n this.lastLoadedPos = slice.filePos + id3V2Header.size;\n }\n }\n const result = await readNextMp3FrameHeader(this.reader, this.lastLoadedPos, this.reader.fileSize);\n if (!result) {\n this.lastSampleLoaded = true;\n return;\n }\n const header = result.header;\n this.lastLoadedPos = result.startPos + header.totalSize - 1; // -1 in case the frame is 1 byte too short\n const xingOffset = getXingOffset(header.mpegVersionId, header.channel);\n let slice = this.reader.requestSlice(result.startPos + xingOffset, 4);\n if (slice instanceof Promise)\n slice = await slice;\n if (slice) {\n const word = readU32Be(slice);\n const isXing = word === XING || word === INFO;\n if (isXing) {\n // There's no actual audio data in this frame, so let's skip it\n return;\n }\n }\n if (!this.firstFrameHeader) {\n this.firstFrameHeader = header;\n }\n if (header.sampleRate !== this.firstFrameHeader.sampleRate) {\n console.warn(`MP3 changed sample rate mid-file: ${this.firstFrameHeader.sampleRate} Hz to ${header.sampleRate} Hz.`\n + ` Might be a bug, so please report this file.`);\n }\n const sampleDuration = header.audioSamplesInFrame / this.firstFrameHeader.sampleRate;\n const sample = {\n timestamp: this.nextTimestampInSamples / this.firstFrameHeader.sampleRate,\n duration: sampleDuration,\n dataStart: result.startPos,\n dataSize: header.totalSize,\n };\n this.loadedSamples.push(sample);\n this.nextTimestampInSamples += header.audioSamplesInFrame;\n return;\n }\n async getMimeType() {\n return 'audio/mpeg';\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks;\n }\n async computeDuration() {\n await this.readMetadata();\n const track = this.tracks[0];\n assert(track);\n return track.computeDuration();\n }\n async getMetadataTags() {\n const release = await this.readingMutex.acquire();\n try {\n await this.readMetadata();\n if (this.metadataTags) {\n return this.metadataTags;\n }\n this.metadataTags = {};\n let currentPos = 0;\n let id3V2HeaderFound = false;\n while (true) {\n let headerSlice = this.reader.requestSlice(currentPos, ID3_V2_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n if (!headerSlice)\n break;\n const id3V2Header = readId3V2Header(headerSlice);\n if (!id3V2Header) {\n break;\n }\n id3V2HeaderFound = true;\n let contentSlice = this.reader.requestSlice(headerSlice.filePos, id3V2Header.size);\n if (contentSlice instanceof Promise)\n contentSlice = await contentSlice;\n if (!contentSlice)\n break;\n parseId3V2Tag(contentSlice, id3V2Header, this.metadataTags);\n currentPos = headerSlice.filePos + id3V2Header.size;\n }\n if (!id3V2HeaderFound && this.reader.fileSize !== null && this.reader.fileSize >= ID3_V1_TAG_SIZE) {\n // Try reading an ID3v1 tag at the end of the file\n let slice = this.reader.requestSlice(this.reader.fileSize - ID3_V1_TAG_SIZE, ID3_V1_TAG_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n const tag = readAscii(slice, 3);\n if (tag === 'TAG') {\n parseId3V1Tag(slice, this.metadataTags);\n }\n }\n return this.metadataTags;\n }\n finally {\n release();\n }\n }\n}\nclass Mp3AudioTrackBacking {\n constructor(demuxer) {\n this.demuxer = demuxer;\n }\n getId() {\n return 1;\n }\n getNumber() {\n return 1;\n }\n async getFirstTimestamp() {\n return 0;\n }\n getTimeResolution() {\n assert(this.demuxer.firstFrameHeader);\n return this.demuxer.firstFrameHeader.sampleRate / this.demuxer.firstFrameHeader.audioSamplesInFrame;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getCodec() {\n return 'mp3';\n }\n getInternalCodecId() {\n return null;\n }\n getNumberOfChannels() {\n assert(this.demuxer.firstFrameHeader);\n return this.demuxer.firstFrameHeader.channel === 3 ? 1 : 2;\n }\n getSampleRate() {\n assert(this.demuxer.firstFrameHeader);\n return this.demuxer.firstFrameHeader.sampleRate;\n }\n getDisposition() {\n return {\n ...DEFAULT_TRACK_DISPOSITION,\n };\n }\n async getDecoderConfig() {\n assert(this.demuxer.firstFrameHeader);\n return {\n codec: 'mp3',\n numberOfChannels: this.demuxer.firstFrameHeader.channel === 3 ? 1 : 2,\n sampleRate: this.demuxer.firstFrameHeader.sampleRate,\n };\n }\n async getPacketAtIndex(sampleIndex, options) {\n if (sampleIndex === -1) {\n return null;\n }\n const rawSample = this.demuxer.loadedSamples[sampleIndex];\n if (!rawSample) {\n return null;\n }\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.demuxer.reader.requestSlice(rawSample.dataStart, rawSample.dataSize);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null; // Data didn't fit into the rest of the file\n }\n data = readBytes(slice, rawSample.dataSize);\n }\n return new EncodedPacket(data, 'key', rawSample.timestamp, rawSample.duration, sampleIndex, rawSample.dataSize);\n }\n getFirstPacket(options) {\n return this.getPacketAtIndex(0, options);\n }\n async getNextPacket(packet, options) {\n const release = await this.demuxer.readingMutex.acquire();\n try {\n const sampleIndex = binarySearchExact(this.demuxer.loadedSamples, packet.timestamp, x => x.timestamp);\n if (sampleIndex === -1) {\n throw new Error('Packet was not created from this track.');\n }\n const nextIndex = sampleIndex + 1;\n // Ensure the next sample exists\n while (nextIndex >= this.demuxer.loadedSamples.length\n && !this.demuxer.lastSampleLoaded) {\n await this.demuxer.advanceReader();\n }\n return this.getPacketAtIndex(nextIndex, options);\n }\n finally {\n release();\n }\n }\n async getPacket(timestamp, options) {\n const release = await this.demuxer.readingMutex.acquire();\n try {\n while (true) {\n const index = binarySearchLessOrEqual(this.demuxer.loadedSamples, timestamp, x => x.timestamp);\n if (index === -1 && this.demuxer.loadedSamples.length > 0) {\n // We're before the first sample\n return null;\n }\n if (this.demuxer.lastSampleLoaded) {\n // All data is loaded, return what we found\n return this.getPacketAtIndex(index, options);\n }\n if (index >= 0 && index + 1 < this.demuxer.loadedSamples.length) {\n // The next packet also exists, we're done\n return this.getPacketAtIndex(index, options);\n }\n // Otherwise, keep loading data\n await this.demuxer.advanceReader();\n }\n }\n finally {\n release();\n }\n }\n getKeyPacket(timestamp, options) {\n return this.getPacket(timestamp, options);\n }\n getNextKeyPacket(packet, options) {\n return this.getNextPacket(packet, options);\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { OPUS_SAMPLE_RATE } from '../codec.js';\nimport { parseModesFromVorbisSetupPacket, parseOpusIdentificationHeader, readVorbisComments } from '../codec-data.js';\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack } from '../input-track.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, AsyncMutex, binarySearchLessOrEqual, findLast, last, roundIfAlmostInteger, toDataView, UNDETERMINED_LANGUAGE, } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { readBytes } from '../reader.js';\nimport { buildOggMimeType, computeOggPageCrc, extractSampleMetadata } from './ogg-misc.js';\nimport { findNextPageHeader, MAX_PAGE_HEADER_SIZE, MAX_PAGE_SIZE, MIN_PAGE_HEADER_SIZE, readPageHeader, } from './ogg-reader.js';\nexport class OggDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.metadataPromise = null;\n this.bitstreams = [];\n this.tracks = [];\n this.metadataTags = {};\n this.reader = input._reader;\n }\n async readMetadata() {\n return this.metadataPromise ??= (async () => {\n let currentPos = 0;\n while (true) {\n let slice = this.reader.requestSliceRange(currentPos, MIN_PAGE_HEADER_SIZE, MAX_PAGE_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const page = readPageHeader(slice);\n if (!page) {\n break;\n }\n const isBos = !!(page.headerType & 0x02);\n if (!isBos) {\n // All bos pages for all bitstreams are required to be at the start, so if the page is not bos then\n // we know we've seen all bitstreams (minus chaining)\n break;\n }\n this.bitstreams.push({\n serialNumber: page.serialNumber,\n bosPage: page,\n description: null,\n numberOfChannels: -1,\n sampleRate: -1,\n codecInfo: {\n codec: null,\n vorbisInfo: null,\n opusInfo: null,\n },\n lastMetadataPacket: null,\n });\n currentPos = page.headerStartPos + page.totalSize;\n }\n for (const bitstream of this.bitstreams) {\n const firstPacket = await this.readPacket(bitstream.bosPage, 0);\n if (!firstPacket) {\n continue;\n }\n if (\n // Check for Vorbis\n firstPacket.data.byteLength >= 7\n && firstPacket.data[0] === 0x01 // Packet type 1 = identification header\n && firstPacket.data[1] === 0x76 // 'v'\n && firstPacket.data[2] === 0x6f // 'o'\n && firstPacket.data[3] === 0x72 // 'r'\n && firstPacket.data[4] === 0x62 // 'b'\n && firstPacket.data[5] === 0x69 // 'i'\n && firstPacket.data[6] === 0x73 // 's'\n ) {\n await this.readVorbisMetadata(firstPacket, bitstream);\n }\n else if (\n // Check for Opus\n firstPacket.data.byteLength >= 8\n && firstPacket.data[0] === 0x4f // 'O'\n && firstPacket.data[1] === 0x70 // 'p'\n && firstPacket.data[2] === 0x75 // 'u'\n && firstPacket.data[3] === 0x73 // 's'\n && firstPacket.data[4] === 0x48 // 'H'\n && firstPacket.data[5] === 0x65 // 'e'\n && firstPacket.data[6] === 0x61 // 'a'\n && firstPacket.data[7] === 0x64 // 'd'\n ) {\n await this.readOpusMetadata(firstPacket, bitstream);\n }\n if (bitstream.codecInfo.codec !== null) {\n this.tracks.push(new InputAudioTrack(this.input, new OggAudioTrackBacking(bitstream, this)));\n }\n }\n })();\n }\n async readVorbisMetadata(firstPacket, bitstream) {\n let nextPacketPosition = await this.findNextPacketStart(firstPacket);\n if (!nextPacketPosition) {\n return;\n }\n const secondPacket = await this.readPacket(nextPacketPosition.startPage, nextPacketPosition.startSegmentIndex);\n if (!secondPacket) {\n return;\n }\n nextPacketPosition = await this.findNextPacketStart(secondPacket);\n if (!nextPacketPosition) {\n return;\n }\n const thirdPacket = await this.readPacket(nextPacketPosition.startPage, nextPacketPosition.startSegmentIndex);\n if (!thirdPacket) {\n return;\n }\n if (secondPacket.data[0] !== 0x03 || thirdPacket.data[0] !== 0x05) {\n return;\n }\n const lacingValues = [];\n const addBytesToSegmentTable = (bytes) => {\n while (true) {\n lacingValues.push(Math.min(255, bytes));\n if (bytes < 255) {\n break;\n }\n bytes -= 255;\n }\n };\n addBytesToSegmentTable(firstPacket.data.length);\n addBytesToSegmentTable(secondPacket.data.length);\n // We don't add the last packet to the segment table, as it is assumed to be whatever bytes remain\n const description = new Uint8Array(1 + lacingValues.length\n + firstPacket.data.length + secondPacket.data.length + thirdPacket.data.length);\n description[0] = 2; // Num entries in the segment table\n description.set(lacingValues, 1);\n description.set(firstPacket.data, 1 + lacingValues.length);\n description.set(secondPacket.data, 1 + lacingValues.length + firstPacket.data.length);\n description.set(thirdPacket.data, 1 + lacingValues.length + firstPacket.data.length + secondPacket.data.length);\n bitstream.codecInfo.codec = 'vorbis';\n bitstream.description = description;\n bitstream.lastMetadataPacket = thirdPacket;\n const view = toDataView(firstPacket.data);\n bitstream.numberOfChannels = view.getUint8(11);\n bitstream.sampleRate = view.getUint32(12, true);\n const blockSizeByte = view.getUint8(28);\n bitstream.codecInfo.vorbisInfo = {\n blocksizes: [\n 1 << (blockSizeByte & 0xf),\n 1 << (blockSizeByte >> 4),\n ],\n modeBlockflags: parseModesFromVorbisSetupPacket(thirdPacket.data).modeBlockflags,\n };\n readVorbisComments(secondPacket.data.subarray(7), this.metadataTags); // Skip header type and 'vorbis'\n }\n async readOpusMetadata(firstPacket, bitstream) {\n // From https://datatracker.ietf.org/doc/html/rfc7845#section-5:\n // \"An Ogg Opus logical stream contains exactly two mandatory header packets: an identification header and a\n // comment header.\"\n const nextPacketPosition = await this.findNextPacketStart(firstPacket);\n if (!nextPacketPosition) {\n return;\n }\n const secondPacket = await this.readPacket(nextPacketPosition.startPage, nextPacketPosition.startSegmentIndex);\n if (!secondPacket) {\n return;\n }\n bitstream.codecInfo.codec = 'opus';\n bitstream.description = firstPacket.data;\n bitstream.lastMetadataPacket = secondPacket;\n const header = parseOpusIdentificationHeader(firstPacket.data);\n bitstream.numberOfChannels = header.outputChannelCount;\n bitstream.sampleRate = OPUS_SAMPLE_RATE; // Always the same\n bitstream.codecInfo.opusInfo = {\n preSkip: header.preSkip,\n };\n readVorbisComments(secondPacket.data.subarray(8), this.metadataTags); // Skip 'OpusTags'\n }\n async readPacket(startPage, startSegmentIndex) {\n assert(startSegmentIndex < startPage.lacingValues.length);\n let startDataOffset = 0;\n for (let i = 0; i < startSegmentIndex; i++) {\n startDataOffset += startPage.lacingValues[i];\n }\n let currentPage = startPage;\n let currentDataOffset = startDataOffset;\n let currentSegmentIndex = startSegmentIndex;\n const chunks = [];\n outer: while (true) {\n // Load the entire page data\n let pageSlice = this.reader.requestSlice(currentPage.dataStartPos, currentPage.dataSize);\n if (pageSlice instanceof Promise)\n pageSlice = await pageSlice;\n assert(pageSlice);\n const pageData = readBytes(pageSlice, currentPage.dataSize);\n while (true) {\n if (currentSegmentIndex === currentPage.lacingValues.length) {\n chunks.push(pageData.subarray(startDataOffset, currentDataOffset));\n break;\n }\n const lacingValue = currentPage.lacingValues[currentSegmentIndex];\n currentDataOffset += lacingValue;\n if (lacingValue < 255) {\n chunks.push(pageData.subarray(startDataOffset, currentDataOffset));\n break outer;\n }\n currentSegmentIndex++;\n }\n // The packet extends to the next page; let's find it\n let currentPos = currentPage.headerStartPos + currentPage.totalSize;\n while (true) {\n let headerSlice = this.reader.requestSliceRange(currentPos, MIN_PAGE_HEADER_SIZE, MAX_PAGE_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n if (!headerSlice) {\n return null;\n }\n const nextPage = readPageHeader(headerSlice);\n if (!nextPage) {\n return null;\n }\n currentPage = nextPage;\n if (currentPage.serialNumber === startPage.serialNumber) {\n break;\n }\n currentPos = currentPage.headerStartPos + currentPage.totalSize;\n }\n startDataOffset = 0;\n currentDataOffset = 0;\n currentSegmentIndex = 0;\n }\n const totalPacketSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);\n if (totalPacketSize === 0) {\n return null; // Invalid packet, treat it as end of stream\n }\n const packetData = new Uint8Array(totalPacketSize);\n let offset = 0;\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i];\n packetData.set(chunk, offset);\n offset += chunk.length;\n }\n return {\n data: packetData,\n endPage: currentPage,\n endSegmentIndex: currentSegmentIndex,\n };\n }\n async findNextPacketStart(lastPacket) {\n // If there's another segment in the same page, return it\n if (lastPacket.endSegmentIndex < lastPacket.endPage.lacingValues.length - 1) {\n return { startPage: lastPacket.endPage, startSegmentIndex: lastPacket.endSegmentIndex + 1 };\n }\n const isEos = !!(lastPacket.endPage.headerType & 0x04);\n if (isEos) {\n // The page is marked as the last page of the logical bitstream, so we won't find anything beyond it\n return null;\n }\n // Otherwise, search for the next page belonging to the same bitstream\n let currentPos = lastPacket.endPage.headerStartPos + lastPacket.endPage.totalSize;\n while (true) {\n let slice = this.reader.requestSliceRange(currentPos, MIN_PAGE_HEADER_SIZE, MAX_PAGE_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null;\n }\n const nextPage = readPageHeader(slice);\n if (!nextPage) {\n return null;\n }\n if (nextPage.serialNumber === lastPacket.endPage.serialNumber) {\n return { startPage: nextPage, startSegmentIndex: 0 };\n }\n currentPos = nextPage.headerStartPos + nextPage.totalSize;\n }\n }\n async getMimeType() {\n await this.readMetadata();\n const codecStrings = await Promise.all(this.tracks.map(x => x.getCodecParameterString()));\n return buildOggMimeType({\n codecStrings: codecStrings.filter(Boolean),\n });\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks;\n }\n async computeDuration() {\n const tracks = await this.getTracks();\n const trackDurations = await Promise.all(tracks.map(x => x.computeDuration()));\n return Math.max(0, ...trackDurations);\n }\n async getMetadataTags() {\n await this.readMetadata();\n return this.metadataTags;\n }\n}\nclass OggAudioTrackBacking {\n constructor(bitstream, demuxer) {\n this.bitstream = bitstream;\n this.demuxer = demuxer;\n this.encodedPacketToMetadata = new WeakMap();\n this.sequentialScanCache = [];\n this.sequentialScanMutex = new AsyncMutex();\n // Opus always uses a fixed sample rate for its internal calculations, even if the actual rate is different\n this.internalSampleRate = bitstream.codecInfo.codec === 'opus'\n ? OPUS_SAMPLE_RATE\n : bitstream.sampleRate;\n }\n getId() {\n return this.bitstream.serialNumber;\n }\n getNumber() {\n // All Ogg tracks are audio, so the track's index + 1 is its number\n const index = this.demuxer.tracks.findIndex(t => t._backing.bitstream === this.bitstream);\n assert(index !== -1);\n return index + 1;\n }\n getNumberOfChannels() {\n return this.bitstream.numberOfChannels;\n }\n getSampleRate() {\n return this.bitstream.sampleRate;\n }\n getTimeResolution() {\n return this.bitstream.sampleRate;\n }\n getCodec() {\n return this.bitstream.codecInfo.codec;\n }\n getInternalCodecId() {\n return null;\n }\n async getDecoderConfig() {\n assert(this.bitstream.codecInfo.codec);\n return {\n codec: this.bitstream.codecInfo.codec,\n numberOfChannels: this.bitstream.numberOfChannels,\n sampleRate: this.bitstream.sampleRate,\n description: this.bitstream.description ?? undefined,\n };\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getDisposition() {\n return {\n ...DEFAULT_TRACK_DISPOSITION,\n };\n }\n async getFirstTimestamp() {\n return 0;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n granulePositionToTimestampInSamples(granulePosition) {\n if (this.bitstream.codecInfo.codec === 'opus') {\n assert(this.bitstream.codecInfo.opusInfo);\n return granulePosition - this.bitstream.codecInfo.opusInfo.preSkip;\n }\n return granulePosition;\n }\n createEncodedPacketFromOggPacket(packet, additional, options) {\n if (!packet) {\n return null;\n }\n const { durationInSamples, vorbisBlockSize } = extractSampleMetadata(packet.data, this.bitstream.codecInfo, additional.vorbisLastBlocksize);\n const encodedPacket = new EncodedPacket(options.metadataOnly ? PLACEHOLDER_DATA : packet.data, 'key', Math.max(0, additional.timestampInSamples) / this.internalSampleRate, durationInSamples / this.internalSampleRate, packet.endPage.headerStartPos + packet.endSegmentIndex, packet.data.byteLength);\n this.encodedPacketToMetadata.set(encodedPacket, {\n packet,\n timestampInSamples: additional.timestampInSamples,\n durationInSamples,\n vorbisLastBlockSize: additional.vorbisLastBlocksize,\n vorbisBlockSize,\n });\n return encodedPacket;\n }\n async getFirstPacket(options) {\n assert(this.bitstream.lastMetadataPacket);\n const packetPosition = await this.demuxer.findNextPacketStart(this.bitstream.lastMetadataPacket);\n if (!packetPosition) {\n return null;\n }\n let timestampInSamples = 0;\n if (this.bitstream.codecInfo.codec === 'opus') {\n assert(this.bitstream.codecInfo.opusInfo);\n timestampInSamples -= this.bitstream.codecInfo.opusInfo.preSkip;\n }\n const packet = await this.demuxer.readPacket(packetPosition.startPage, packetPosition.startSegmentIndex);\n return this.createEncodedPacketFromOggPacket(packet, {\n timestampInSamples,\n vorbisLastBlocksize: null,\n }, options);\n }\n async getNextPacket(prevPacket, options) {\n const prevMetadata = this.encodedPacketToMetadata.get(prevPacket);\n if (!prevMetadata) {\n throw new Error('Packet was not created from this track.');\n }\n const packetPosition = await this.demuxer.findNextPacketStart(prevMetadata.packet);\n if (!packetPosition) {\n return null;\n }\n const timestampInSamples = prevMetadata.timestampInSamples + prevMetadata.durationInSamples;\n const packet = await this.demuxer.readPacket(packetPosition.startPage, packetPosition.startSegmentIndex);\n return this.createEncodedPacketFromOggPacket(packet, {\n timestampInSamples,\n vorbisLastBlocksize: prevMetadata.vorbisBlockSize,\n }, options);\n }\n async getPacket(timestamp, options) {\n if (this.demuxer.reader.fileSize === null) {\n // No file size known, can't do binary search, but fall back to sequential algo instead\n return this.getPacketSequential(timestamp, options);\n }\n const timestampInSamples = roundIfAlmostInteger(timestamp * this.internalSampleRate);\n if (timestampInSamples === 0) {\n // Fast path for timestamp 0 - avoids binary search when playing back from the start\n return this.getFirstPacket(options);\n }\n if (timestampInSamples < 0) {\n // There's nothing here\n return null;\n }\n assert(this.bitstream.lastMetadataPacket);\n const startPosition = await this.demuxer.findNextPacketStart(this.bitstream.lastMetadataPacket);\n if (!startPosition) {\n return null;\n }\n let lowPage = startPosition.startPage;\n let high = this.demuxer.reader.fileSize;\n const lowPages = [lowPage];\n // First, let's perform a binary serach (bisection search) on the file to find the approximate page where\n // we'll find the packet. We want to find a page whose end packet position is less than or equal to the\n // packet position we're searching for.\n // Outer loop: Does the binary serach\n outer: while (lowPage.headerStartPos + lowPage.totalSize < high) {\n const low = lowPage.headerStartPos;\n const mid = Math.floor((low + high) / 2);\n let searchStartPos = mid;\n // Inner loop: Does a linear forward scan if the page cannot be found immediately\n while (true) {\n const until = Math.min(searchStartPos + MAX_PAGE_SIZE, high - MIN_PAGE_HEADER_SIZE);\n let searchSlice = this.demuxer.reader.requestSlice(searchStartPos, until - searchStartPos);\n if (searchSlice instanceof Promise)\n searchSlice = await searchSlice;\n assert(searchSlice);\n const found = findNextPageHeader(searchSlice, until);\n if (!found) {\n high = mid + MIN_PAGE_HEADER_SIZE;\n continue outer;\n }\n let headerSlice = this.demuxer.reader.requestSliceRange(searchSlice.filePos, MIN_PAGE_HEADER_SIZE, MAX_PAGE_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n assert(headerSlice);\n const page = readPageHeader(headerSlice);\n assert(page);\n let pageValid = false;\n if (page.serialNumber === this.bitstream.serialNumber) {\n // Serial numbers are basically random numbers, and the chance of finding a fake page with\n // matching serial number is astronomically low, so we can be pretty sure this page is legit.\n pageValid = true;\n }\n else {\n let pageSlice = this.demuxer.reader.requestSlice(page.headerStartPos, page.totalSize);\n if (pageSlice instanceof Promise)\n pageSlice = await pageSlice;\n assert(pageSlice);\n // Validate the page by checking checksum\n const bytes = readBytes(pageSlice, page.totalSize);\n const crc = computeOggPageCrc(bytes);\n pageValid = crc === page.checksum;\n }\n if (!pageValid) {\n // Keep searching for a valid page\n searchStartPos = page.headerStartPos + 4; // 'OggS' is 4 bytes\n continue;\n }\n if (pageValid && page.serialNumber !== this.bitstream.serialNumber) {\n // Page is valid but from a different bitstream, so keep searching forward until we find one\n // belonging to the our bitstream\n searchStartPos = page.headerStartPos + page.totalSize;\n continue;\n }\n const isContinuationPage = page.granulePosition === -1;\n if (isContinuationPage) {\n // No packet ends on this page - keep looking\n searchStartPos = page.headerStartPos + page.totalSize;\n continue;\n }\n // The page is valid and belongs to our bitstream; let's check its granule position to see where we\n // need to take the bisection search.\n if (this.granulePositionToTimestampInSamples(page.granulePosition) > timestampInSamples) {\n high = page.headerStartPos;\n }\n else {\n lowPage = page;\n lowPages.push(page);\n }\n continue outer;\n }\n }\n // Now we have the last page with a packet position <= the packet position we're looking for, but there\n // might be multiple pages with the packet position, in which case we actually need to find the first of\n // such pages. We'll do this in two steps: First, let's find the latest page we know with an earlier packet\n // position, and then linear scan ourselves forward until we find the correct page.\n let lowerPage = startPosition.startPage;\n for (const otherLowPage of lowPages) {\n if (otherLowPage.granulePosition === lowPage.granulePosition) {\n break;\n }\n if (!lowerPage || otherLowPage.headerStartPos > lowerPage.headerStartPos) {\n lowerPage = otherLowPage;\n }\n }\n let currentPage = lowerPage;\n // Keep track of the pages we traversed, we need these later for backwards seeking\n const previousPages = [currentPage];\n while (true) {\n // This loop must terminate as we'll eventually reach lowPage\n if (currentPage.serialNumber === this.bitstream.serialNumber\n && currentPage.granulePosition === lowPage.granulePosition) {\n break;\n }\n const nextPos = currentPage.headerStartPos + currentPage.totalSize;\n let slice = this.demuxer.reader.requestSliceRange(nextPos, MIN_PAGE_HEADER_SIZE, MAX_PAGE_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n const nextPage = readPageHeader(slice);\n assert(nextPage);\n currentPage = nextPage;\n if (currentPage.serialNumber === this.bitstream.serialNumber) {\n previousPages.push(currentPage);\n }\n }\n assert(currentPage.granulePosition !== -1);\n let currentSegmentIndex = null;\n let currentTimestampInSamples;\n let currentTimestampIsCorrect;\n // These indicate the end position of the packet that the granule position belongs to\n let endPage = currentPage;\n let endSegmentIndex = 0;\n if (currentPage.headerStartPos === startPosition.startPage.headerStartPos) {\n currentTimestampInSamples = this.granulePositionToTimestampInSamples(0);\n currentTimestampIsCorrect = true;\n currentSegmentIndex = 0;\n }\n else {\n currentTimestampInSamples = 0; // Placeholder value! We'll refine it once we can\n currentTimestampIsCorrect = false;\n // Find the segment index of the next packet\n for (let i = currentPage.lacingValues.length - 1; i >= 0; i--) {\n const value = currentPage.lacingValues[i];\n if (value < 255) {\n // We know the last packet ended at i, so the next one starts at i + 1\n currentSegmentIndex = i + 1;\n break;\n }\n }\n // This must hold: Since this page has a granule position set, that means there must be a packet that\n // ends in this page.\n if (currentSegmentIndex === null) {\n throw new Error('Invalid page with granule position: no packets end on this page.');\n }\n endSegmentIndex = currentSegmentIndex - 1;\n const pseudopacket = {\n data: PLACEHOLDER_DATA,\n endPage,\n endSegmentIndex,\n };\n const nextPosition = await this.demuxer.findNextPacketStart(pseudopacket);\n if (nextPosition) {\n // Let's rewind a single step (packet) - this previous packet ensures that we'll correctly compute\n // the duration for the packet we're looking for.\n const endPosition = findPreviousPacketEndPosition(previousPages, currentPage, currentSegmentIndex);\n assert(endPosition);\n const startPosition = findPacketStartPosition(previousPages, endPosition.page, endPosition.segmentIndex);\n if (startPosition) {\n currentPage = startPosition.page;\n currentSegmentIndex = startPosition.segmentIndex;\n }\n }\n else {\n // There is no next position, which means we're looking for the last packet in the bitstream. The\n // granule position on the last page tends to be fucky, so let's instead start the search on the\n // page before that. So let's loop until we find a packet that ends in a previous page.\n while (true) {\n const endPosition = findPreviousPacketEndPosition(previousPages, currentPage, currentSegmentIndex);\n if (!endPosition) {\n break;\n }\n const startPosition = findPacketStartPosition(previousPages, endPosition.page, endPosition.segmentIndex);\n if (!startPosition) {\n break;\n }\n currentPage = startPosition.page;\n currentSegmentIndex = startPosition.segmentIndex;\n if (endPosition.page.headerStartPos !== endPage.headerStartPos) {\n endPage = endPosition.page;\n endSegmentIndex = endPosition.segmentIndex;\n break;\n }\n }\n }\n }\n let lastEncodedPacket = null;\n let lastEncodedPacketMetadata = null;\n // Alright, now it's time for the final, granular seek: We keep iterating over packets until we've found the\n // one with the correct timestamp - i.e., the last one with a timestamp <= the timestamp we're looking for.\n while (currentPage !== null) {\n assert(currentSegmentIndex !== null);\n const packet = await this.demuxer.readPacket(currentPage, currentSegmentIndex);\n if (!packet) {\n break;\n }\n // We might need to skip the packet if it's a metadata one\n const skipPacket = currentPage.headerStartPos === startPosition.startPage.headerStartPos\n && currentSegmentIndex < startPosition.startSegmentIndex;\n if (!skipPacket) {\n let encodedPacket = this.createEncodedPacketFromOggPacket(packet, {\n timestampInSamples: currentTimestampInSamples,\n vorbisLastBlocksize: lastEncodedPacketMetadata?.vorbisBlockSize ?? null,\n }, options);\n assert(encodedPacket);\n let encodedPacketMetadata = this.encodedPacketToMetadata.get(encodedPacket);\n assert(encodedPacketMetadata);\n if (!currentTimestampIsCorrect\n && packet.endPage.headerStartPos === endPage.headerStartPos\n && packet.endSegmentIndex === endSegmentIndex) {\n // We know this packet end timestamp can be derived from the page's granule position\n currentTimestampInSamples = this.granulePositionToTimestampInSamples(currentPage.granulePosition);\n currentTimestampIsCorrect = true;\n // Let's backpatch the packet we just created with the correct timestamp\n encodedPacket = this.createEncodedPacketFromOggPacket(packet, {\n timestampInSamples: currentTimestampInSamples - encodedPacketMetadata.durationInSamples,\n vorbisLastBlocksize: lastEncodedPacketMetadata?.vorbisBlockSize ?? null,\n }, options);\n assert(encodedPacket);\n encodedPacketMetadata = this.encodedPacketToMetadata.get(encodedPacket);\n assert(encodedPacketMetadata);\n }\n else {\n currentTimestampInSamples += encodedPacketMetadata.durationInSamples;\n }\n lastEncodedPacket = encodedPacket;\n lastEncodedPacketMetadata = encodedPacketMetadata;\n if (currentTimestampIsCorrect\n && (\n // Next timestamp will be too late\n Math.max(currentTimestampInSamples, 0) > timestampInSamples\n // This timestamp already matches\n || Math.max(encodedPacketMetadata.timestampInSamples, 0) === timestampInSamples)) {\n break;\n }\n }\n const nextPosition = await this.demuxer.findNextPacketStart(packet);\n if (!nextPosition) {\n break;\n }\n currentPage = nextPosition.startPage;\n currentSegmentIndex = nextPosition.startSegmentIndex;\n }\n return lastEncodedPacket;\n }\n // A slower but simpler and sequential algorithm for finding a packet in a file\n async getPacketSequential(timestamp, options) {\n const release = await this.sequentialScanMutex.acquire(); // Requires exclusivity because we write to a cache\n try {\n const timestampInSamples = roundIfAlmostInteger(timestamp * this.internalSampleRate);\n timestamp = timestampInSamples / this.internalSampleRate;\n const index = binarySearchLessOrEqual(this.sequentialScanCache, timestampInSamples, x => x.timestampInSamples);\n let currentPacket;\n if (index !== -1) {\n // We don't need to start from the beginning, we can start at a previous scan point\n const cacheEntry = this.sequentialScanCache[index];\n currentPacket = this.createEncodedPacketFromOggPacket(cacheEntry.packet, {\n timestampInSamples: cacheEntry.timestampInSamples,\n vorbisLastBlocksize: cacheEntry.vorbisLastBlockSize,\n }, options);\n }\n else {\n currentPacket = await this.getFirstPacket(options);\n }\n let i = 0;\n while (currentPacket && currentPacket.timestamp < timestamp) {\n const nextPacket = await this.getNextPacket(currentPacket, options);\n if (!nextPacket || nextPacket.timestamp > timestamp) {\n break;\n }\n currentPacket = nextPacket;\n i++;\n if (i === 100) {\n // Add \"checkpoints\" every once in a while to speed up subsequent random accesses\n i = 0;\n const metadata = this.encodedPacketToMetadata.get(currentPacket);\n assert(metadata);\n if (this.sequentialScanCache.length > 0) {\n // If we reach this case, we must be at the end of the cache\n assert(last(this.sequentialScanCache).timestampInSamples <= metadata.timestampInSamples);\n }\n this.sequentialScanCache.push(metadata);\n }\n }\n return currentPacket;\n }\n finally {\n release();\n }\n }\n getKeyPacket(timestamp, options) {\n return this.getPacket(timestamp, options);\n }\n getNextKeyPacket(packet, options) {\n return this.getNextPacket(packet, options);\n }\n}\n/** Finds the start position of a packet given its end position. */\nconst findPacketStartPosition = (pageList, endPage, endSegmentIndex) => {\n let page = endPage;\n let segmentIndex = endSegmentIndex;\n outer: while (true) {\n segmentIndex--;\n for (segmentIndex; segmentIndex >= 0; segmentIndex--) {\n const lacingValue = page.lacingValues[segmentIndex];\n if (lacingValue < 255) {\n segmentIndex++; // We know the last packet starts here\n break outer;\n }\n }\n assert(segmentIndex === -1);\n const pageStartsWithFreshPacket = !(page.headerType & 0x01);\n if (pageStartsWithFreshPacket) {\n // Fast exit: We know we don't need to look in the previous page\n segmentIndex = 0;\n break;\n }\n const previousPage = findLast(pageList, x => x.headerStartPos < page.headerStartPos);\n if (!previousPage) {\n return null;\n }\n page = previousPage;\n segmentIndex = page.lacingValues.length;\n }\n assert(segmentIndex !== -1);\n if (segmentIndex === page.lacingValues.length) {\n // Wrap back around to the first segment of the next page\n const nextPage = pageList[pageList.indexOf(page) + 1];\n assert(nextPage);\n page = nextPage;\n segmentIndex = 0;\n }\n return { page, segmentIndex };\n};\n/** Finds the end position of a packet given the start position of the following packet. */\nconst findPreviousPacketEndPosition = (pageList, startPage, startSegmentIndex) => {\n if (startSegmentIndex > 0) {\n // Easy\n return { page: startPage, segmentIndex: startSegmentIndex - 1 };\n }\n const previousPage = findLast(pageList, x => x.headerStartPos < startPage.headerStartPos);\n if (!previousPage) {\n return null;\n }\n return { page: previousPage, segmentIndex: previousPage.lacingValues.length - 1 };\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { aacChannelMap, aacFrequencyTable } from '../../shared/aac-misc.js';\nimport { Demuxer } from '../demuxer.js';\nimport { ID3_V2_HEADER_SIZE, parseId3V2Tag, readId3V2Header, } from '../id3.js';\nimport { InputAudioTrack } from '../input-track.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, AsyncMutex, binarySearchExact, binarySearchLessOrEqual, UNDETERMINED_LANGUAGE, } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { readBytes } from '../reader.js';\nimport { MIN_ADTS_FRAME_HEADER_SIZE, MAX_ADTS_FRAME_HEADER_SIZE, readAdtsFrameHeader, } from './adts-reader.js';\nexport const SAMPLES_PER_AAC_FRAME = 1024;\nexport class AdtsDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.metadataPromise = null;\n this.firstFrameHeader = null;\n this.loadedSamples = [];\n this.metadataTags = null;\n this.tracks = [];\n this.readingMutex = new AsyncMutex();\n this.lastSampleLoaded = false;\n this.lastLoadedPos = 0;\n this.nextTimestampInSamples = 0;\n this.reader = input._reader;\n }\n async readMetadata() {\n return this.metadataPromise ??= (async () => {\n // Keep loading until we find the first frame header\n while (!this.firstFrameHeader && !this.lastSampleLoaded) {\n await this.advanceReader();\n }\n // There has to be a frame if this demuxer got selected\n assert(this.firstFrameHeader);\n // Create the single audio track\n this.tracks = [new InputAudioTrack(this.input, new AdtsAudioTrackBacking(this))];\n })();\n }\n async advanceReader() {\n if (this.lastLoadedPos === 0) {\n // Skip all ID3v2 tags at the start of the file\n while (true) {\n let slice = this.reader.requestSlice(this.lastLoadedPos, ID3_V2_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n this.lastSampleLoaded = true;\n return;\n }\n const id3V2Header = readId3V2Header(slice);\n if (!id3V2Header) {\n break;\n }\n this.lastLoadedPos = slice.filePos + id3V2Header.size;\n }\n }\n let slice = this.reader.requestSliceRange(this.lastLoadedPos, MIN_ADTS_FRAME_HEADER_SIZE, MAX_ADTS_FRAME_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n this.lastSampleLoaded = true;\n return;\n }\n const header = readAdtsFrameHeader(slice);\n if (!header) {\n this.lastSampleLoaded = true;\n return;\n }\n if (this.reader.fileSize !== null && header.startPos + header.frameLength > this.reader.fileSize) {\n // Frame doesn't fit in the rest of the file\n this.lastSampleLoaded = true;\n return;\n }\n if (!this.firstFrameHeader) {\n this.firstFrameHeader = header;\n }\n const sampleRate = aacFrequencyTable[header.samplingFrequencyIndex];\n assert(sampleRate !== undefined);\n const sampleDuration = SAMPLES_PER_AAC_FRAME / sampleRate;\n const sample = {\n timestamp: this.nextTimestampInSamples / sampleRate,\n duration: sampleDuration,\n dataStart: header.startPos,\n dataSize: header.frameLength,\n };\n this.loadedSamples.push(sample);\n this.nextTimestampInSamples += SAMPLES_PER_AAC_FRAME;\n this.lastLoadedPos = header.startPos + header.frameLength;\n }\n async getMimeType() {\n return 'audio/aac';\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks;\n }\n async computeDuration() {\n await this.readMetadata();\n const track = this.tracks[0];\n assert(track);\n return track.computeDuration();\n }\n async getMetadataTags() {\n const release = await this.readingMutex.acquire();\n try {\n await this.readMetadata();\n if (this.metadataTags) {\n return this.metadataTags;\n }\n this.metadataTags = {};\n let currentPos = 0;\n while (true) {\n let headerSlice = this.reader.requestSlice(currentPos, ID3_V2_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n if (!headerSlice)\n break;\n const id3V2Header = readId3V2Header(headerSlice);\n if (!id3V2Header) {\n break;\n }\n let contentSlice = this.reader.requestSlice(headerSlice.filePos, id3V2Header.size);\n if (contentSlice instanceof Promise)\n contentSlice = await contentSlice;\n if (!contentSlice)\n break;\n parseId3V2Tag(contentSlice, id3V2Header, this.metadataTags);\n currentPos = headerSlice.filePos + id3V2Header.size;\n }\n return this.metadataTags;\n }\n finally {\n release();\n }\n }\n}\nclass AdtsAudioTrackBacking {\n constructor(demuxer) {\n this.demuxer = demuxer;\n }\n getId() {\n return 1;\n }\n getNumber() {\n return 1;\n }\n async getFirstTimestamp() {\n return 0;\n }\n getTimeResolution() {\n const sampleRate = this.getSampleRate();\n return sampleRate / SAMPLES_PER_AAC_FRAME;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getCodec() {\n return 'aac';\n }\n getInternalCodecId() {\n assert(this.demuxer.firstFrameHeader);\n return this.demuxer.firstFrameHeader.objectType;\n }\n getNumberOfChannels() {\n assert(this.demuxer.firstFrameHeader);\n const numberOfChannels = aacChannelMap[this.demuxer.firstFrameHeader.channelConfiguration];\n assert(numberOfChannels !== undefined);\n return numberOfChannels;\n }\n getSampleRate() {\n assert(this.demuxer.firstFrameHeader);\n const sampleRate = aacFrequencyTable[this.demuxer.firstFrameHeader.samplingFrequencyIndex];\n assert(sampleRate !== undefined);\n return sampleRate;\n }\n getDisposition() {\n return {\n ...DEFAULT_TRACK_DISPOSITION,\n };\n }\n async getDecoderConfig() {\n assert(this.demuxer.firstFrameHeader);\n return {\n codec: `mp4a.40.${this.demuxer.firstFrameHeader.objectType}`,\n numberOfChannels: this.getNumberOfChannels(),\n sampleRate: this.getSampleRate(),\n };\n }\n async getPacketAtIndex(sampleIndex, options) {\n if (sampleIndex === -1) {\n return null;\n }\n const rawSample = this.demuxer.loadedSamples[sampleIndex];\n if (!rawSample) {\n return null;\n }\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.demuxer.reader.requestSlice(rawSample.dataStart, rawSample.dataSize);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null; // Data didn't fit into the rest of the file\n }\n data = readBytes(slice, rawSample.dataSize);\n }\n return new EncodedPacket(data, 'key', rawSample.timestamp, rawSample.duration, sampleIndex, rawSample.dataSize);\n }\n getFirstPacket(options) {\n return this.getPacketAtIndex(0, options);\n }\n async getNextPacket(packet, options) {\n const release = await this.demuxer.readingMutex.acquire();\n try {\n const sampleIndex = binarySearchExact(this.demuxer.loadedSamples, packet.timestamp, x => x.timestamp);\n if (sampleIndex === -1) {\n throw new Error('Packet was not created from this track.');\n }\n const nextIndex = sampleIndex + 1;\n // Ensure the next sample exists\n while (nextIndex >= this.demuxer.loadedSamples.length\n && !this.demuxer.lastSampleLoaded) {\n await this.demuxer.advanceReader();\n }\n return this.getPacketAtIndex(nextIndex, options);\n }\n finally {\n release();\n }\n }\n async getPacket(timestamp, options) {\n const release = await this.demuxer.readingMutex.acquire();\n try {\n while (true) {\n const index = binarySearchLessOrEqual(this.demuxer.loadedSamples, timestamp, x => x.timestamp);\n if (index === -1 && this.demuxer.loadedSamples.length > 0) {\n // We're before the first sample\n return null;\n }\n if (this.demuxer.lastSampleLoaded) {\n // All data is loaded, return what we found\n return this.getPacketAtIndex(index, options);\n }\n if (index >= 0 && index + 1 < this.demuxer.loadedSamples.length) {\n // The next packet also exists, we're done\n return this.getPacketAtIndex(index, options);\n }\n // Otherwise, keep loading data\n await this.demuxer.advanceReader();\n }\n }\n finally {\n release();\n }\n }\n getKeyPacket(timestamp, options) {\n return this.getPacket(timestamp, options);\n }\n getNextKeyPacket(packet, options) {\n return this.getNextPacket(packet, options);\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { FlacBlockType, readVorbisComments } from '../codec-data.js';\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack } from '../input-track.js';\nimport { assert, AsyncMutex, binarySearchLessOrEqual, textDecoder, UNDETERMINED_LANGUAGE, } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { readBytes, readU24Be, readU32Be, readU8, } from '../reader.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { calculateCrc8, readBlockSize, getBlockSizeOrUncommon, readCodedNumber, readSampleRate, getSampleRateOrUncommon, } from './flac-misc.js';\nimport { Bitstream } from '../../shared/bitstream.js';\nexport class FlacDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.loadedSamples = []; // All samples from the start of the file to lastLoadedPos\n this.metadataPromise = null;\n this.track = null;\n this.metadataTags = {};\n this.audioInfo = null;\n this.lastLoadedPos = null;\n this.blockingBit = null;\n this.readingMutex = new AsyncMutex();\n this.lastSampleLoaded = false;\n this.reader = input._reader;\n }\n async computeDuration() {\n await this.readMetadata();\n assert(this.track);\n return this.track.computeDuration();\n }\n async getMetadataTags() {\n await this.readMetadata();\n return this.metadataTags;\n }\n async getTracks() {\n await this.readMetadata();\n assert(this.track);\n return [this.track];\n }\n async getMimeType() {\n return 'audio/flac';\n }\n async readMetadata() {\n let currentPos = 4; // Skip 'fLaC'\n return (this.metadataPromise ??= (async () => {\n while (this.reader.fileSize === null\n || currentPos < this.reader.fileSize) {\n let sizeSlice = this.reader.requestSlice(currentPos, 4);\n if (sizeSlice instanceof Promise)\n sizeSlice = await sizeSlice;\n currentPos += 4;\n if (sizeSlice === null) {\n throw new Error(`Metadata block at position ${currentPos} is too small! Corrupted file.`);\n }\n assert(sizeSlice);\n const byte = readU8(sizeSlice); // first bit: isLastMetadata, remaining 7 bits: metaBlockType\n const size = readU24Be(sizeSlice);\n const isLastMetadata = (byte & 0x80) !== 0;\n const metaBlockType = byte & 0x7f;\n switch (metaBlockType) {\n case FlacBlockType.STREAMINFO: {\n // Parse streaminfo block\n // https://www.rfc-editor.org/rfc/rfc9639.html#section-8.2\n let streamInfoBlock = this.reader.requestSlice(currentPos, size);\n if (streamInfoBlock instanceof Promise)\n streamInfoBlock = await streamInfoBlock;\n assert(streamInfoBlock);\n if (streamInfoBlock === null) {\n throw new Error(`StreamInfo block at position ${currentPos} is too small! Corrupted file.`);\n }\n const streamInfoBytes = readBytes(streamInfoBlock, 34);\n const bitstream = new Bitstream(streamInfoBytes);\n const minimumBlockSize = bitstream.readBits(16);\n const maximumBlockSize = bitstream.readBits(16);\n const minimumFrameSize = bitstream.readBits(24);\n const maximumFrameSize = bitstream.readBits(24);\n const sampleRate = bitstream.readBits(20);\n const numberOfChannels = bitstream.readBits(3) + 1;\n bitstream.readBits(5); // bitsPerSample - 1\n const totalSamples = bitstream.readBits(36);\n // https://www.w3.org/TR/webcodecs-flac-codec-registration/#audiodecoderconfig-description\n // description is required, and has to be the following:\n // 1. The bytes 0x66 0x4C 0x61 0x43 (\"fLaC\" in ASCII)\n // 2. A metadata block (called the STREAMINFO block) as described in section 7 of [FLAC]\n // 3. Optionaly (sic) other metadata blocks, that are not used by the specification\n bitstream.skipBits(16 * 8); // md5 hash\n const description = new Uint8Array(42);\n // 1. \"fLaC\"\n description.set(new Uint8Array([0x66, 0x4c, 0x61, 0x43]), 0);\n // 2. STREAMINFO block\n description.set(new Uint8Array([128, 0, 0, 34]), 4);\n // 3. Other metadata blocks\n description.set(streamInfoBytes, 8);\n this.audioInfo = {\n numberOfChannels,\n sampleRate,\n totalSamples,\n minimumBlockSize,\n maximumBlockSize,\n minimumFrameSize,\n maximumFrameSize,\n description,\n };\n this.track = new InputAudioTrack(this.input, new FlacAudioTrackBacking(this));\n break;\n }\n case FlacBlockType.VORBIS_COMMENT: {\n // Parse vorbis comment block\n // https://www.rfc-editor.org/rfc/rfc9639.html#name-vorbis-comment\n let vorbisCommentBlock = this.reader.requestSlice(currentPos, size);\n if (vorbisCommentBlock instanceof Promise)\n vorbisCommentBlock = await vorbisCommentBlock;\n assert(vorbisCommentBlock);\n readVorbisComments(readBytes(vorbisCommentBlock, size), this.metadataTags);\n break;\n }\n case FlacBlockType.PICTURE: {\n // Parse picture block\n // https://www.rfc-editor.org/rfc/rfc9639.html#name-picture\n let pictureBlock = this.reader.requestSlice(currentPos, size);\n if (pictureBlock instanceof Promise)\n pictureBlock = await pictureBlock;\n assert(pictureBlock);\n const pictureType = readU32Be(pictureBlock);\n const mediaTypeLength = readU32Be(pictureBlock);\n const mediaType = textDecoder.decode(readBytes(pictureBlock, mediaTypeLength));\n const descriptionLength = readU32Be(pictureBlock);\n const description = textDecoder.decode(readBytes(pictureBlock, descriptionLength));\n pictureBlock.skip(4 + 4 + 4 + 4); // Skip width, height, color depth, number of indexed colors\n const dataLength = readU32Be(pictureBlock);\n const data = readBytes(pictureBlock, dataLength);\n this.metadataTags.images ??= [];\n this.metadataTags.images.push({\n data,\n mimeType: mediaType,\n // https://www.rfc-editor.org/rfc/rfc9639.html#table13\n kind: pictureType === 3\n ? 'coverFront'\n : pictureType === 4\n ? 'coverBack'\n : 'unknown',\n description,\n });\n break;\n }\n default:\n break;\n }\n currentPos += size;\n if (isLastMetadata) {\n this.lastLoadedPos = currentPos;\n break;\n }\n }\n })());\n }\n async readNextFlacFrame({ startPos, isFirstPacket, }) {\n assert(this.audioInfo);\n // we expect that there are at least `minimumFrameSize` bytes left in the file\n // Ideally we also want to validate the next header is valid\n // to throw out an accidential sync word\n // The shortest valid FLAC header I can think of, based off the code\n // of readFlacFrameHeader:\n // 4 bytes used for bitstream from syncword to bit depth\n // 1 byte coded number\n // (uncommon values, no bytes read)\n // 1 byte crc\n // --> 6 bytes\n const minimumHeaderLength = 6;\n // If we read everything in readFlacFrameHeader, we read 16 bytes\n const maximumHeaderSize = 16;\n const maximumSliceLength = this.audioInfo.maximumFrameSize + maximumHeaderSize;\n const slice = await this.reader.requestSliceRange(startPos, this.audioInfo.minimumFrameSize, maximumSliceLength);\n if (!slice) {\n return null;\n }\n const frameHeader = this.readFlacFrameHeader({\n slice,\n isFirstPacket: isFirstPacket,\n });\n if (!frameHeader) {\n return null;\n }\n // We don't know exactly how long the packet is, we only know the `minimumFrameSize` and `maximumFrameSize`\n // The packet is over if the next 2 bytes are the sync word followed by a valid header\n // or the end of the file is reached\n // The next sync word is expected at earliest when `minimumFrameSize` is reached,\n // we can skip over anything before that\n slice.filePos = startPos + this.audioInfo.minimumFrameSize;\n while (true) {\n // Reached end of the file, packet is over\n if (slice.filePos > slice.end - minimumHeaderLength) {\n return {\n num: frameHeader.num,\n blockSize: frameHeader.blockSize,\n sampleRate: frameHeader.sampleRate,\n size: slice.end - startPos,\n isLastFrame: true,\n };\n }\n const nextByte = readU8(slice);\n if (nextByte === 0xff) {\n const positionBeforeReading = slice.filePos;\n const byteAfterNextByte = readU8(slice);\n const expected = this.blockingBit === 1 ? 0b1111_1001 : 0b1111_1000;\n if (byteAfterNextByte !== expected) {\n slice.filePos = positionBeforeReading;\n continue;\n }\n slice.skip(-2);\n const lengthIfNextFlacFrameHeaderIsLegit = slice.filePos - startPos;\n const nextFrameHeader = this.readFlacFrameHeader({\n slice,\n isFirstPacket: false,\n });\n if (!nextFrameHeader) {\n slice.filePos = positionBeforeReading;\n continue;\n }\n // Ensure the frameOrSampleNum is consecutive.\n // https://github.com/Vanilagy/mediabunny/issues/194\n if (this.blockingBit === 0) {\n // Case A: If the stream is fixed block size, this is the frame number, which increments by 1\n if (nextFrameHeader.num - frameHeader.num !== 1) {\n slice.filePos = positionBeforeReading;\n continue;\n }\n }\n else {\n // Case B: If the stream is variable block size, this is the sample number, which increments by\n // amount of samples in a frame.\n if (nextFrameHeader.num - frameHeader.num !== frameHeader.blockSize) {\n slice.filePos = positionBeforeReading;\n continue;\n }\n }\n return {\n num: frameHeader.num,\n blockSize: frameHeader.blockSize,\n sampleRate: frameHeader.sampleRate,\n size: lengthIfNextFlacFrameHeaderIsLegit,\n isLastFrame: false,\n };\n }\n }\n }\n readFlacFrameHeader({ slice, isFirstPacket, }) {\n // In this function, generally it is not safe to throw errors.\n // We might end up here because we stumbled upon a syncword,\n // but the data might not actually be a FLAC frame, it might be random bitstream\n // data, in that case we should return null and continue.\n const startOffset = slice.filePos;\n // https://www.rfc-editor.org/rfc/rfc9639.html#section-9.1\n // Each frame MUST start on a byte boundary and start with the 15-bit frame\n // sync code 0b111111111111100. Following the sync code is the blocking strategy\n // bit, which MUST NOT change during the audio stream.\n const bytes = readBytes(slice, 4);\n const bitstream = new Bitstream(bytes);\n const bits = bitstream.readBits(15);\n if (bits !== 0b111111111111100) {\n // This cannot be a valid FLAC frame, must start with the syncword\n return null;\n }\n if (this.blockingBit === null) {\n assert(isFirstPacket);\n const newBlockingBit = bitstream.readBits(1);\n this.blockingBit = newBlockingBit;\n }\n else if (this.blockingBit === 1) {\n assert(!isFirstPacket);\n const newBlockingBit = bitstream.readBits(1);\n if (newBlockingBit !== 1) {\n // This cannot be a valid FLAC frame, expected 1 but got 0\n return null;\n }\n }\n else if (this.blockingBit === 0) {\n assert(!isFirstPacket);\n const newBlockingBit = bitstream.readBits(1);\n if (newBlockingBit !== 0) {\n // This cannot be a valid FLAC frame, expected 0 but got 1\n return null;\n }\n }\n else {\n throw new Error('Invalid blocking bit');\n }\n const blockSizeOrUncommon = getBlockSizeOrUncommon(bitstream.readBits(4));\n if (!blockSizeOrUncommon) {\n // This cannot be a valid FLAC frame, the syncword was just coincidental\n return null;\n }\n assert(this.audioInfo);\n const sampleRateOrUncommon = getSampleRateOrUncommon(bitstream.readBits(4), this.audioInfo.sampleRate);\n if (!sampleRateOrUncommon) {\n // This cannot be a valid FLAC frame, the syncword was just coincidental\n return null;\n }\n bitstream.readBits(4); // channel count\n bitstream.readBits(3); // bit depth\n const reservedZero = bitstream.readBits(1); // reserved zero\n if (reservedZero !== 0) {\n // This cannot be a valid FLAC frame, the syncword was just coincidental\n return null;\n }\n const num = readCodedNumber(slice);\n const blockSize = readBlockSize(slice, blockSizeOrUncommon);\n const sampleRate = readSampleRate(slice, sampleRateOrUncommon);\n if (sampleRate === null) {\n // This cannot be a valid FLAC frame, the syncword was just coincidental\n return null;\n }\n if (sampleRate !== this.audioInfo.sampleRate) {\n // This cannot be a valid FLAC frame, the sample rate is not the same as in the stream info\n return null;\n }\n const size = slice.filePos - startOffset;\n const crc = readU8(slice);\n slice.skip(-size);\n slice.skip(-1);\n const crcCalculated = calculateCrc8(readBytes(slice, size));\n if (crc !== crcCalculated) {\n // Maybe this wasn't a FLAC frame at all, the syncword was just coincidentally\n // in the bitstream\n return null;\n }\n return { num, blockSize, sampleRate };\n }\n async advanceReader() {\n await this.readMetadata();\n assert(this.lastLoadedPos !== null);\n assert(this.audioInfo);\n const startPos = this.lastLoadedPos;\n const frame = await this.readNextFlacFrame({\n startPos,\n isFirstPacket: this.loadedSamples.length === 0,\n });\n if (!frame) {\n // Unexpected case, failed to read next FLAC frame\n // handling gracefully\n this.lastSampleLoaded = true;\n return;\n }\n const lastSample = this.loadedSamples[this.loadedSamples.length - 1];\n const blockOffset = lastSample\n ? lastSample.blockOffset + lastSample.blockSize\n : 0;\n const sample = {\n blockOffset,\n blockSize: frame.blockSize,\n byteOffset: startPos,\n byteSize: frame.size,\n };\n this.lastLoadedPos = this.lastLoadedPos + frame.size;\n this.loadedSamples.push(sample);\n if (frame.isLastFrame) {\n this.lastSampleLoaded = true;\n return;\n }\n }\n}\nclass FlacAudioTrackBacking {\n constructor(demuxer) {\n this.demuxer = demuxer;\n }\n getId() {\n return 1;\n }\n getNumber() {\n return 1;\n }\n getCodec() {\n return 'flac';\n }\n getInternalCodecId() {\n return null;\n }\n getNumberOfChannels() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.numberOfChannels;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n getSampleRate() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.sampleRate;\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getTimeResolution() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.sampleRate;\n }\n getDisposition() {\n return {\n ...DEFAULT_TRACK_DISPOSITION,\n };\n }\n async getFirstTimestamp() {\n return 0;\n }\n async getDecoderConfig() {\n assert(this.demuxer.audioInfo);\n return {\n codec: 'flac',\n numberOfChannels: this.demuxer.audioInfo.numberOfChannels,\n sampleRate: this.demuxer.audioInfo.sampleRate,\n description: this.demuxer.audioInfo.description,\n };\n }\n async getPacket(timestamp, options) {\n assert(this.demuxer.audioInfo);\n if (timestamp < 0) {\n throw new Error('Timestamp cannot be negative');\n }\n const release = await this.demuxer.readingMutex.acquire();\n try {\n while (true) {\n const packetIndex = binarySearchLessOrEqual(this.demuxer.loadedSamples, timestamp, x => x.blockOffset / this.demuxer.audioInfo.sampleRate);\n if (packetIndex === -1) {\n await this.demuxer.advanceReader();\n continue;\n }\n const packet = this.demuxer.loadedSamples[packetIndex];\n const sampleTimestamp = packet.blockOffset / this.demuxer.audioInfo.sampleRate;\n const sampleDuration = packet.blockSize / this.demuxer.audioInfo.sampleRate;\n if (sampleTimestamp + sampleDuration <= timestamp) {\n if (this.demuxer.lastSampleLoaded) {\n return this.getPacketAtIndex(this.demuxer.loadedSamples.length - 1, options);\n }\n await this.demuxer.advanceReader();\n continue;\n }\n return this.getPacketAtIndex(packetIndex, options);\n }\n }\n finally {\n release();\n }\n }\n async getNextPacket(packet, options) {\n const release = await this.demuxer.readingMutex.acquire();\n try {\n const nextIndex = packet.sequenceNumber + 1;\n if (this.demuxer.lastSampleLoaded\n && nextIndex >= this.demuxer.loadedSamples.length) {\n return null;\n }\n // Ensure the next sample exists\n while (nextIndex >= this.demuxer.loadedSamples.length\n && !this.demuxer.lastSampleLoaded) {\n await this.demuxer.advanceReader();\n }\n return this.getPacketAtIndex(nextIndex, options);\n }\n finally {\n release();\n }\n }\n getKeyPacket(timestamp, options) {\n return this.getPacket(timestamp, options);\n }\n getNextKeyPacket(packet, options) {\n return this.getNextPacket(packet, options);\n }\n async getPacketAtIndex(sampleIndex, options) {\n const rawSample = this.demuxer.loadedSamples[sampleIndex];\n if (!rawSample) {\n return null;\n }\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.demuxer.reader.requestSlice(rawSample.byteOffset, rawSample.byteSize);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null; // Data didn't fit into the rest of the file\n }\n data = readBytes(slice, rawSample.byteSize);\n }\n assert(this.demuxer.audioInfo);\n const timestamp = rawSample.blockOffset / this.demuxer.audioInfo.sampleRate;\n const duration = rawSample.blockSize / this.demuxer.audioInfo.sampleRate;\n return new EncodedPacket(data, 'key', timestamp, duration, sampleIndex, rawSample.byteSize);\n }\n async getFirstPacket(options) {\n // Ensure the next sample exists\n while (this.demuxer.loadedSamples.length === 0\n && !this.demuxer.lastSampleLoaded) {\n await this.demuxer.advanceReader();\n }\n return this.getPacketAtIndex(0, options);\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { SAMPLES_PER_AAC_FRAME } from '../adts/adts-demuxer.js';\nimport { MAX_ADTS_FRAME_HEADER_SIZE, readAdtsFrameHeader } from '../adts/adts-reader.js';\nimport { aacChannelMap, aacFrequencyTable } from '../../shared/aac-misc.js';\nimport { extractAudioCodecString, extractVideoCodecString, } from '../codec.js';\nimport { AC3_ACMOD_CHANNEL_COUNTS, AC3_SAMPLES_PER_FRAME, AvcNalUnitType, extractAvcDecoderConfigurationRecord, extractHevcDecoderConfigurationRecord, extractNalUnitTypeForAvc, extractNalUnitTypeForHevc, EAC3_NUMBLKS_TABLE, getEac3ChannelCount, getEac3SampleRate, HevcNalUnitType, parseAc3SyncFrame, parseAvcSps, parseEac3SyncFrame, parseHevcSps, AC3_FRAME_SIZES, } from '../codec-data.js';\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack, InputVideoTrack, } from '../input-track.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, binarySearchExact, binarySearchLessOrEqual, COLOR_PRIMARIES_MAP_INVERSE, findLastIndex, floorToMultiple, last, MATRIX_COEFFICIENTS_MAP_INVERSE, roundIfAlmostInteger, toDataView, TRANSFER_CHARACTERISTICS_MAP_INVERSE, UNDETERMINED_LANGUAGE, } from '../misc.js';\nimport { FRAME_HEADER_SIZE as MP3_FRAME_HEADER_SIZE, readMp3FrameHeader } from '../../shared/mp3-misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { FileSlice, readBytes, readU16Be, readU32Be, readU8 } from '../reader.js';\nimport { buildMpegTsMimeType, TIMESCALE, TS_PACKET_SIZE } from './mpeg-ts-misc.js';\nimport { AC3_SAMPLE_RATES } from '../../shared/ac3-misc.js';\nimport { Bitstream } from '../../shared/bitstream.js';\nexport class MpegTsDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.metadataPromise = null;\n this.elementaryStreams = [];\n this.tracks = [];\n this.packetOffset = 0;\n this.packetStride = -1;\n this.sectionEndPositions = [];\n this.seekChunkSize = 5 * 1024 * 1024; // 5 MiB, picked because most HLS segments are below this size\n this.minReferencePointByteDistance = -1;\n this.reader = input._reader;\n }\n async readMetadata() {\n return this.metadataPromise ??= (async () => {\n const lengthToCheck = TS_PACKET_SIZE + 16 + 1;\n let startingSlice = this.reader.requestSlice(0, lengthToCheck);\n if (startingSlice instanceof Promise)\n startingSlice = await startingSlice;\n assert(startingSlice);\n const startingBytes = readBytes(startingSlice, lengthToCheck);\n if (startingBytes[0] === 0x47 && startingBytes[TS_PACKET_SIZE] === 0x47) {\n // Regular MPEG-TS\n this.packetOffset = 0;\n this.packetStride = TS_PACKET_SIZE;\n }\n else if (startingBytes[0] === 0x47 && startingBytes[TS_PACKET_SIZE + 16] === 0x47) {\n // MPEG-TS with Forward Error Correction\n this.packetOffset = 0;\n this.packetStride = TS_PACKET_SIZE + 16;\n }\n else if (startingBytes[4] === 0x47 && startingBytes[4 + TS_PACKET_SIZE] === 0x47) {\n // MPEG-2-TS (DVHS)\n this.packetOffset = 4;\n this.packetStride = TS_PACKET_SIZE;\n }\n else {\n throw new Error('Unreachable.');\n }\n const MIN_REFERENCE_POINT_PACKET_DISTANCE = 256;\n this.minReferencePointByteDistance = MIN_REFERENCE_POINT_PACKET_DISTANCE * this.packetStride;\n let currentPos = this.packetOffset;\n let programMapPid = null;\n // Some files contain these multiple times, but we only care about their first appearance\n let hasProgramAssociationTable = false;\n let hasProgramMap = false;\n while (true) {\n const packetHeader = await this.readPacketHeader(currentPos);\n if (!packetHeader) {\n break;\n }\n if (packetHeader.payloadUnitStartIndicator === 0) {\n // Not the start of a section\n currentPos += this.packetStride;\n continue;\n }\n const section = await this.readSection(currentPos, true, !hasProgramMap);\n if (!section) {\n break;\n }\n const BYTES_BEFORE_SECTION_LENGTH = 3;\n const BITS_IN_CRC_32 = 32; // Duh\n // Some streams don't contain a PAT for some reason, so we must do some guesswork to figure out where\n // the PMT is.\n let isProbablyProgramMap = false;\n if (!hasProgramMap && section.pid !== 0) {\n const isPesPacket = section.payload[0] === 0x00 && section.payload[1] === 0x00 && section.payload[2] === 0x01;\n if (!isPesPacket) {\n // Assume it's a PSI\n const bitstream = new Bitstream(section.payload);\n const pointerField = bitstream.readAlignedByte();\n bitstream.skipBits(8 * pointerField);\n const tableId = bitstream.readBits(8);\n isProbablyProgramMap = tableId === 0x02; // 0x02 == TS_program_map_section\n }\n }\n if (section.pid === 0 && !hasProgramAssociationTable) {\n const bitstream = new Bitstream(section.payload);\n const pointerField = bitstream.readAlignedByte();\n bitstream.skipBits(8 * pointerField);\n bitstream.skipBits(14);\n const sectionLength = bitstream.readBits(10);\n bitstream.skipBits(40);\n while (8 * (sectionLength + BYTES_BEFORE_SECTION_LENGTH) - bitstream.pos > BITS_IN_CRC_32) {\n const programNumber = bitstream.readBits(16);\n bitstream.skipBits(3); // Reserved\n if (programNumber !== 0) {\n if (programMapPid !== null) {\n throw new Error('Only files with a single program are supported.');\n }\n else {\n programMapPid = bitstream.readBits(13);\n }\n }\n }\n if (programMapPid === null) {\n throw new Error('Program Association Table must link to a Program Map Table.');\n }\n hasProgramAssociationTable = true;\n }\n else if ((section.pid === programMapPid || isProbablyProgramMap) && !hasProgramMap) {\n const bitstream = new Bitstream(section.payload);\n const pointerField = bitstream.readAlignedByte();\n bitstream.skipBits(8 * pointerField);\n bitstream.skipBits(12);\n const sectionLength = bitstream.readBits(12);\n bitstream.skipBits(43);\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const pcrPid = bitstream.readBits(13);\n bitstream.skipBits(6);\n // \"The remaining 10 bits specify the number of bytes of the descriptors immediately following the\n // program_info_length field\"\n const programInfoLength = bitstream.readBits(10);\n bitstream.skipBits(8 * programInfoLength);\n while (8 * (sectionLength + BYTES_BEFORE_SECTION_LENGTH) - bitstream.pos > BITS_IN_CRC_32) {\n const streamType = bitstream.readBits(8);\n bitstream.skipBits(3);\n const elementaryPid = bitstream.readBits(13);\n bitstream.skipBits(6);\n const esInfoLength = bitstream.readBits(10);\n // Check ES descriptors to detect AC-3/E-AC-3 in System B\n const esInfoEndPos = bitstream.pos + 8 * esInfoLength;\n let hasAc3Descriptor = false;\n let hasEac3Descriptor = false;\n while (bitstream.pos < esInfoEndPos) {\n const descriptorTag = bitstream.readBits(8);\n const descriptorLength = bitstream.readBits(8);\n if (descriptorTag === 0x6a) {\n hasAc3Descriptor = true;\n }\n else if (descriptorTag === 0x7a || descriptorTag === 0xcc) {\n hasEac3Descriptor = true;\n }\n bitstream.skipBits(8 * descriptorLength);\n }\n let info = null;\n switch (streamType) {\n case 3 /* MpegTsStreamType.MP3_MPEG1 */:\n case 4 /* MpegTsStreamType.MP3_MPEG2 */:\n case 15 /* MpegTsStreamType.AAC */:\n {\n const codec = streamType === 15 /* MpegTsStreamType.AAC */ ? 'aac' : 'mp3';\n info = {\n type: 'audio',\n codec,\n aacCodecInfo: null,\n numberOfChannels: -1,\n sampleRate: -1,\n };\n }\n ;\n break;\n case 27 /* MpegTsStreamType.AVC */:\n case 36 /* MpegTsStreamType.HEVC */:\n {\n const codec = streamType === 27 /* MpegTsStreamType.AVC */ ? 'avc' : 'hevc';\n info = {\n type: 'video',\n codec: codec,\n avcCodecInfo: null,\n hevcCodecInfo: null,\n colorSpace: {\n primaries: null,\n transfer: null,\n matrix: null,\n fullRange: null,\n },\n width: -1,\n height: -1,\n squarePixelWidth: -1,\n squarePixelHeight: -1,\n reorderSize: -1,\n };\n }\n ;\n break;\n case 129 /* MpegTsStreamType.AC3_SYSTEM_A */:\n {\n info = {\n type: 'audio',\n codec: 'ac3',\n aacCodecInfo: null,\n numberOfChannels: -1,\n sampleRate: -1,\n };\n }\n ;\n break;\n case 135 /* MpegTsStreamType.EAC3_SYSTEM_A */:\n {\n info = {\n type: 'audio',\n codec: 'eac3',\n aacCodecInfo: null,\n numberOfChannels: -1,\n sampleRate: -1,\n };\n }\n ;\n break;\n case 6 /* MpegTsStreamType.PRIVATE_DATA */:\n {\n if (hasEac3Descriptor) {\n info = {\n type: 'audio',\n codec: 'eac3',\n aacCodecInfo: null,\n numberOfChannels: -1,\n sampleRate: -1,\n };\n }\n else if (hasAc3Descriptor) {\n info = {\n type: 'audio',\n codec: 'ac3',\n aacCodecInfo: null,\n numberOfChannels: -1,\n sampleRate: -1,\n };\n }\n }\n ;\n break;\n default: {\n // If we don't recognize the codec, we don't surface the track at all. This is because\n // we can't determine its metadata and also have no idea how to packetize its data.\n console.warn(`Unsupported stream_type 0x${streamType.toString(16)}; ignoring stream.`);\n }\n }\n if (info) {\n this.elementaryStreams.push({\n demuxer: this,\n pid: elementaryPid,\n streamType,\n initialized: false,\n firstSection: null,\n info,\n referencePesPackets: [],\n });\n }\n }\n hasProgramMap = true;\n }\n else {\n const elementaryStream = this.elementaryStreams.find(x => x.pid === section.pid);\n if (elementaryStream && !elementaryStream.initialized) {\n const pesPacket = readPesPacket(section);\n if (!pesPacket) {\n throw new Error(`Couldn't read first PES packet for Elementary Stream with PID ${elementaryStream.pid}`);\n }\n elementaryStream.firstSection = section;\n if (elementaryStream.info.type === 'video') {\n if (elementaryStream.info.codec === 'avc') {\n elementaryStream.info.avcCodecInfo\n = extractAvcDecoderConfigurationRecord(pesPacket.data);\n if (!elementaryStream.info.avcCodecInfo) {\n throw new Error('Invalid AVC video stream; could not extract AVCDecoderConfigurationRecord'\n + ' from first packet.');\n }\n const spsUnit = elementaryStream.info.avcCodecInfo.sequenceParameterSets[0];\n assert(spsUnit);\n const spsInfo = parseAvcSps(spsUnit);\n elementaryStream.info.width = spsInfo.displayWidth;\n elementaryStream.info.height = spsInfo.displayHeight;\n if (spsInfo.pixelAspectRatio.num > spsInfo.pixelAspectRatio.den) {\n elementaryStream.info.squarePixelWidth = Math.round(elementaryStream.info.width\n * spsInfo.pixelAspectRatio.num / spsInfo.pixelAspectRatio.den);\n elementaryStream.info.squarePixelHeight = elementaryStream.info.height;\n }\n else {\n elementaryStream.info.squarePixelWidth = elementaryStream.info.width;\n elementaryStream.info.squarePixelHeight = Math.round(elementaryStream.info.height\n * spsInfo.pixelAspectRatio.den / spsInfo.pixelAspectRatio.num);\n }\n elementaryStream.info.colorSpace = {\n primaries: COLOR_PRIMARIES_MAP_INVERSE[spsInfo.colourPrimaries],\n transfer: TRANSFER_CHARACTERISTICS_MAP_INVERSE[spsInfo.transferCharacteristics],\n matrix: MATRIX_COEFFICIENTS_MAP_INVERSE[spsInfo.matrixCoefficients],\n fullRange: !!spsInfo.fullRangeFlag,\n };\n elementaryStream.info.reorderSize = spsInfo.maxDecFrameBuffering;\n elementaryStream.initialized = true;\n }\n else if (elementaryStream.info.codec === 'hevc') {\n elementaryStream.info.hevcCodecInfo\n = extractHevcDecoderConfigurationRecord(pesPacket.data);\n if (!elementaryStream.info.hevcCodecInfo) {\n throw new Error('Invalid HEVC video stream; could not extract HVCDecoderConfigurationRecord'\n + ' from first packet.');\n }\n const spsArray = elementaryStream.info.hevcCodecInfo.arrays.find(a => a.nalUnitType === HevcNalUnitType.SPS_NUT);\n const spsUnit = spsArray.nalUnits[0];\n assert(spsUnit);\n const spsInfo = parseHevcSps(spsUnit);\n elementaryStream.info.width = spsInfo.displayWidth;\n elementaryStream.info.height = spsInfo.displayHeight;\n if (spsInfo.pixelAspectRatio.num > spsInfo.pixelAspectRatio.den) {\n elementaryStream.info.squarePixelWidth = Math.round(elementaryStream.info.width\n * spsInfo.pixelAspectRatio.num / spsInfo.pixelAspectRatio.den);\n elementaryStream.info.squarePixelHeight = elementaryStream.info.height;\n }\n else {\n elementaryStream.info.squarePixelWidth = elementaryStream.info.width;\n elementaryStream.info.squarePixelHeight = Math.round(elementaryStream.info.height\n * spsInfo.pixelAspectRatio.den / spsInfo.pixelAspectRatio.num);\n }\n elementaryStream.info.colorSpace = {\n primaries: COLOR_PRIMARIES_MAP_INVERSE[spsInfo.colourPrimaries],\n transfer: TRANSFER_CHARACTERISTICS_MAP_INVERSE[spsInfo.transferCharacteristics],\n matrix: MATRIX_COEFFICIENTS_MAP_INVERSE[spsInfo.matrixCoefficients],\n fullRange: !!spsInfo.fullRangeFlag,\n };\n elementaryStream.info.reorderSize = spsInfo.maxDecFrameBuffering;\n elementaryStream.initialized = true;\n }\n else {\n throw new Error('Unhandled.');\n }\n }\n else {\n if (elementaryStream.info.codec === 'aac') {\n const slice = FileSlice.tempFromBytes(pesPacket.data);\n const header = readAdtsFrameHeader(slice);\n if (!header) {\n throw new Error('Invalid AAC audio stream; could not read ADTS frame header from first packet.');\n }\n elementaryStream.info.aacCodecInfo = {\n isMpeg2: false,\n objectType: header.objectType,\n };\n elementaryStream.info.numberOfChannels\n = aacChannelMap[header.channelConfiguration];\n elementaryStream.info.sampleRate\n = aacFrequencyTable[header.samplingFrequencyIndex];\n elementaryStream.initialized = true;\n }\n else if (elementaryStream.info.codec === 'mp3') {\n const word = readU32Be(FileSlice.tempFromBytes(pesPacket.data));\n const result = readMp3FrameHeader(word, pesPacket.data.byteLength);\n if (!result.header) {\n throw new Error('Invalid MP3 audio stream; could not read frame header from first packet.');\n }\n elementaryStream.info.numberOfChannels = result.header.channel === 3 ? 1 : 2;\n elementaryStream.info.sampleRate = result.header.sampleRate;\n elementaryStream.initialized = true;\n }\n else if (elementaryStream.info.codec === 'ac3') {\n const frameInfo = parseAc3SyncFrame(pesPacket.data);\n if (!frameInfo) {\n throw new Error('Invalid AC-3 audio stream; could not read sync frame from first packet.');\n }\n if (frameInfo.fscod === 3) {\n throw new Error('Invalid AC-3 audio stream; reserved sample rate code found in first packet.');\n }\n elementaryStream.info.numberOfChannels\n = AC3_ACMOD_CHANNEL_COUNTS[frameInfo.acmod] + frameInfo.lfeon;\n elementaryStream.info.sampleRate = AC3_SAMPLE_RATES[frameInfo.fscod];\n elementaryStream.initialized = true;\n }\n else if (elementaryStream.info.codec === 'eac3') {\n const frameInfo = parseEac3SyncFrame(pesPacket.data);\n if (!frameInfo) {\n throw new Error('Invalid E-AC-3 audio stream; could not read sync frame from first packet.');\n }\n const sampleRate = getEac3SampleRate(frameInfo);\n if (sampleRate === null) {\n throw new Error('Invalid E-AC-3 audio stream; reserved sample rate code found in first packet.');\n }\n elementaryStream.info.numberOfChannels = getEac3ChannelCount(frameInfo);\n elementaryStream.info.sampleRate = sampleRate;\n elementaryStream.initialized = true;\n }\n else {\n throw new Error('Unhandled.');\n }\n }\n }\n }\n const isDone = hasProgramMap && this.elementaryStreams.every(x => x.initialized);\n if (isDone) {\n break;\n }\n currentPos += this.packetStride;\n }\n if (!hasProgramMap) {\n if (!hasProgramAssociationTable) {\n throw new Error('No Program Association Table found in the file.');\n }\n throw new Error('No Program Map Table found in the file.');\n }\n for (const stream of this.elementaryStreams) {\n if (stream.info.type === 'video') {\n this.tracks.push(new InputVideoTrack(this.input, new MpegTsVideoTrackBacking(stream)));\n }\n else {\n this.tracks.push(new InputAudioTrack(this.input, new MpegTsAudioTrackBacking(stream)));\n }\n }\n })();\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks;\n }\n async getMetadataTags() {\n return {}; // Nothing for now\n }\n async computeDuration() {\n const tracks = await this.getTracks();\n const trackDurations = await Promise.all(tracks.map(x => x.computeDuration()));\n return Math.max(0, ...trackDurations);\n }\n async getMimeType() {\n await this.readMetadata();\n const tracks = await this.getTracks();\n const codecStrings = await Promise.all(tracks.map(x => x.getCodecParameterString()));\n return buildMpegTsMimeType(codecStrings);\n }\n async readSection(startPos, full, contiguous = false) {\n let endPos = startPos;\n let currentPos = startPos;\n const chunks = [];\n let chunksByteLength = 0;\n let firstPacket = null;\n let mustAddSectionEnd = true;\n let randomAccessIndicator = 0;\n while (true) {\n const packet = await this.readPacket(currentPos);\n currentPos += this.packetStride;\n if (!packet) {\n break;\n }\n if (!firstPacket) {\n if (packet.payloadUnitStartIndicator === 0) {\n break;\n }\n firstPacket = packet;\n }\n else {\n if (packet.pid !== firstPacket.pid) {\n if (contiguous) {\n break; // End of section\n }\n else {\n continue; // Ignore this packet\n }\n }\n if (packet.payloadUnitStartIndicator === 1) {\n break;\n }\n }\n const hasAdaptationField = !!(packet.adaptationFieldControl & 0b10);\n const hasPayload = !!(packet.adaptationFieldControl & 0b01);\n let adaptationFieldLength = 0;\n if (hasAdaptationField) {\n adaptationFieldLength = 1 + packet.body[0];\n // Extract random_access_indicator from first packet's adaptation field\n if (packet === firstPacket && adaptationFieldLength > 1) {\n randomAccessIndicator = (packet.body[1] >> 6) & 1;\n }\n }\n if (hasPayload) {\n if (adaptationFieldLength === 0) {\n chunks.push(packet.body);\n chunksByteLength += packet.body.byteLength;\n }\n else {\n chunks.push(packet.body.subarray(adaptationFieldLength));\n chunksByteLength += packet.body.byteLength - adaptationFieldLength;\n }\n }\n endPos = currentPos;\n // 64 is just \"a bit of data\", enough for the PES packet header\n if (!full && chunksByteLength >= 64) {\n mustAddSectionEnd = false; // Not the actual section end\n break;\n }\n // Check if we already know this is a section end\n const isKnownSectionEnd = binarySearchExact(this.sectionEndPositions, endPos, x => x) !== -1;\n if (isKnownSectionEnd) {\n mustAddSectionEnd = false;\n break;\n }\n }\n if (mustAddSectionEnd) {\n const index = binarySearchLessOrEqual(this.sectionEndPositions, endPos, x => x);\n this.sectionEndPositions.splice(index + 1, 0, endPos);\n }\n if (!firstPacket) {\n return null;\n }\n let merged;\n if (chunks.length === 1) {\n merged = chunks[0];\n }\n else {\n const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);\n merged = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n merged.set(chunk, offset);\n offset += chunk.length;\n }\n }\n return {\n startPos,\n endPos: full ? endPos : null,\n pid: firstPacket.pid,\n payload: merged,\n randomAccessIndicator,\n };\n }\n async readPacketHeader(pos) {\n let slice = this.reader.requestSlice(pos, 4);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null;\n }\n const syncByte = readU8(slice);\n if (syncByte !== 0x47) {\n throw new Error('Invalid TS packet sync byte. Likely an internal bug, please report this file.');\n }\n const nextTwoBytes = readU16Be(slice);\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportErrorIndicator = nextTwoBytes >> 15;\n const payloadUnitStartIndicator = (nextTwoBytes >> 14) & 0x1;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportPriority = (nextTwoBytes >> 13) & 0x1;\n const pid = nextTwoBytes & 0x1FFF;\n const nextByte = readU8(slice);\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportScramblingControl = nextByte >> 6;\n const adaptationFieldControl = (nextByte >> 4) & 0x3;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const continuityCounter = nextByte & 0xF;\n return {\n payloadUnitStartIndicator,\n pid,\n adaptationFieldControl,\n };\n }\n async readPacket(pos) {\n // Code in here is duplicated from readPacketHeader for performance reasons\n let slice = this.reader.requestSlice(pos, TS_PACKET_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null;\n }\n const bytes = readBytes(slice, TS_PACKET_SIZE);\n const syncByte = bytes[0];\n if (syncByte !== 0x47) {\n throw new Error('Invalid TS packet sync byte. Likely an internal bug, please report this file.');\n }\n const nextTwoBytes = (bytes[1] << 8) + bytes[2];\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportErrorIndicator = nextTwoBytes >> 15;\n const payloadUnitStartIndicator = (nextTwoBytes >> 14) & 0x1;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportPriority = (nextTwoBytes >> 13) & 0x1;\n const pid = nextTwoBytes & 0x1FFF;\n const nextByte = bytes[3];\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const transportScramblingControl = nextByte >> 6;\n const adaptationFieldControl = (nextByte >> 4) & 0x3;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const continuityCounter = nextByte & 0xF;\n return {\n payloadUnitStartIndicator,\n pid,\n adaptationFieldControl,\n body: bytes.subarray(4),\n };\n }\n}\nconst readPesPacketHeader = (section) => {\n if (section.payload.byteLength < 3) {\n return null;\n }\n const bitstream = new Bitstream(section.payload);\n const startCodePrefix = bitstream.readBits(24);\n if (startCodePrefix !== 0x000001) {\n return null;\n }\n const streamId = bitstream.readBits(8);\n bitstream.skipBits(16);\n if (streamId === 0b10111100 // program_stream_map\n || streamId === 0b10111110 // padding_stream\n || streamId === 0b10111111 // private_stream_2\n || streamId === 0b11110000 // ECM\n || streamId === 0b11110001 // EMM\n || streamId === 0b11111111 // program_stream_directory\n || streamId === 0b11110010 // DSMCC_stream\n || streamId === 0b11111000 // ITU-T Rec. H.222.1 type E stream\n ) {\n return null;\n }\n bitstream.skipBits(8);\n const ptsDtsFlags = bitstream.readBits(2);\n bitstream.skipBits(14);\n let pts = 0;\n if (ptsDtsFlags === 0b10 || ptsDtsFlags === 0b11) {\n bitstream.skipBits(4);\n pts += bitstream.readBits(3) * (1 << 30);\n bitstream.skipBits(1);\n pts += bitstream.readBits(15) * (1 << 15);\n bitstream.skipBits(1);\n pts += bitstream.readBits(15);\n }\n else {\n throw new Error('PES packets without PTS are not currently supported. If you think this file should be supported,'\n + ' please report it.');\n }\n return {\n sectionStartPos: section.startPos,\n sectionEndPos: section.endPos,\n pts,\n randomAccessIndicator: section.randomAccessIndicator,\n };\n};\nconst readPesPacket = (section) => {\n assert(section.endPos !== null); // Can only read full PES packets from fully read sections\n const header = readPesPacketHeader(section);\n if (!header) {\n return null;\n }\n const bitstream = new Bitstream(section.payload);\n bitstream.skipBits(32);\n const pesPacketLength = bitstream.readBits(16);\n const BYTES_UNTIL_END_OF_PES_PACKET_LENGTH = 6;\n bitstream.skipBits(16);\n const pesHeaderDataLength = bitstream.readBits(8);\n const pesHeaderEndPos = bitstream.pos + 8 * pesHeaderDataLength;\n bitstream.pos = pesHeaderEndPos;\n const bytePos = pesHeaderEndPos / 8;\n assert(Number.isInteger(bytePos));\n const data = section.payload.subarray(bytePos, \n // \"A value of 0 indicates that the PES packet length is neither specified nor bounded and is allowed only in\n // PES packets whose payload consists of bytes from a video elementary stream contained in\n // transport stream packets.\"\n pesPacketLength > 0\n ? BYTES_UNTIL_END_OF_PES_PACKET_LENGTH + pesPacketLength\n : section.payload.byteLength);\n return {\n ...header,\n data,\n };\n};\nexport class MpegTsTrackBacking {\n constructor(elementaryStream) {\n this.elementaryStream = elementaryStream;\n this.packetBuffers = new WeakMap();\n /** Used for recreating PacketBuffers if necessary. */\n this.packetSectionStarts = new WeakMap();\n }\n getId() {\n return this.elementaryStream.pid;\n }\n getNumber() {\n const demuxer = this.elementaryStream.demuxer;\n const trackType = this.elementaryStream.info.type;\n let number = 0;\n for (const track of demuxer.tracks) {\n if (track.type === trackType) {\n number++;\n }\n assert(track._backing instanceof MpegTsTrackBacking);\n if (track._backing.elementaryStream === this.elementaryStream) {\n break;\n }\n }\n return number;\n }\n getCodec() {\n throw new Error('Not implemented on base class.');\n }\n getInternalCodecId() {\n return this.elementaryStream.streamType;\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getDisposition() {\n return DEFAULT_TRACK_DISPOSITION;\n }\n getTimeResolution() {\n return TIMESCALE;\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n async getFirstTimestamp() {\n const firstPacket = await this.getFirstPacket({ metadataOnly: true });\n return firstPacket?.timestamp ?? 0;\n }\n createEncodedPacket(suppliedPacket, duration, options) {\n let packetType;\n if (this.allPacketsAreKeyPackets()) {\n packetType = 'key';\n }\n else {\n packetType = suppliedPacket.randomAccessIndicator === 1\n ? 'key'\n : 'delta';\n }\n return new EncodedPacket(options.metadataOnly ? PLACEHOLDER_DATA : suppliedPacket.data, packetType, suppliedPacket.pts / TIMESCALE, Math.max(duration / TIMESCALE, 0), suppliedPacket.sequenceNumber, suppliedPacket.data.byteLength);\n }\n async getFirstPacket(options) {\n const section = this.elementaryStream.firstSection;\n assert(section);\n const pesPacket = readPesPacket(section);\n assert(pesPacket);\n const context = new PacketReadingContext(this.elementaryStream, pesPacket);\n const buffer = new PacketBuffer(this, context);\n const result = await buffer.readNext();\n if (!result) {\n return null;\n }\n // result.packet.randomAccessIndicator = 1; // Assume the first packet is always a key packet\n const packet = this.createEncodedPacket(result.packet, result.duration, options);\n this.packetBuffers.set(packet, buffer);\n this.packetSectionStarts.set(packet, result.packet.sectionStartPos);\n return packet;\n }\n async getNextPacket(packet, options) {\n let buffer = this.packetBuffers.get(packet);\n if (buffer) {\n // Fast path\n const result = await buffer.readNext();\n if (!result) {\n return null;\n }\n // Remove PacketBuffer access from the old packet, it belongs to the next packet now\n this.packetBuffers.delete(packet);\n const newPacket = this.createEncodedPacket(result.packet, result.duration, options);\n this.packetBuffers.set(newPacket, buffer);\n this.packetSectionStarts.set(newPacket, result.packet.sectionStartPos);\n return newPacket;\n }\n // No buffer, we gotta do some rereading\n const sectionStartPos = this.packetSectionStarts.get(packet);\n if (sectionStartPos === undefined) {\n throw new Error('Packet was not created from this track.');\n }\n const demuxer = this.elementaryStream.demuxer;\n const section = await demuxer.readSection(sectionStartPos, true);\n assert(section);\n const pesPacket = readPesPacket(section);\n assert(pesPacket);\n const context = new PacketReadingContext(this.elementaryStream, pesPacket);\n buffer = new PacketBuffer(this, context);\n // Advance until we pass the current packet's sequence number\n const targetSequenceNumber = packet.sequenceNumber;\n while (true) {\n const result = await buffer.readNext();\n if (!result) {\n return null;\n }\n if (result.packet.sequenceNumber > targetSequenceNumber) {\n // We found the next packet!\n const newPacket = this.createEncodedPacket(result.packet, result.duration, options);\n this.packetBuffers.set(newPacket, buffer);\n this.packetSectionStarts.set(newPacket, result.packet.sectionStartPos);\n return newPacket;\n }\n }\n }\n async getNextKeyPacket(packet, options) {\n let currentPacket = packet;\n // Just loop until we hit one\n while (true) {\n currentPacket = await this.getNextPacket(currentPacket, options);\n if (!currentPacket) {\n return null;\n }\n if (currentPacket.type === 'key') {\n return currentPacket;\n }\n }\n }\n getPacket(timestamp, options) {\n return this.doPacketLookup(timestamp, false, options);\n }\n getKeyPacket(timestamp, options) {\n return this.doPacketLookup(timestamp, true, options);\n }\n /**\n * Searches for the packet with the largest timestamp not larger than `timestamp` in the file, using a combination\n * of chunk-based binary search and linear refinement. The reason the coarse search is done in large chunks is to\n * make it more performant for small files and over high-latency readers such as the network.\n */\n async doPacketLookup(timestamp, keyframesOnly, options) {\n const searchPts = roundIfAlmostInteger(timestamp * TIMESCALE);\n const demuxer = this.elementaryStream.demuxer;\n const { reader, seekChunkSize } = demuxer;\n const pid = this.elementaryStream.pid;\n const findFirstPesPacketHeaderInChunk = async (startPos, endPos) => {\n let currentPos = startPos;\n while (currentPos < endPos) {\n const packetHeader = await demuxer.readPacketHeader(currentPos);\n if (!packetHeader) {\n return null;\n }\n if (packetHeader.pid === pid && packetHeader.payloadUnitStartIndicator === 1) {\n const section = await demuxer.readSection(currentPos, false);\n if (!section) {\n return null;\n }\n const pesPacketHeader = readPesPacketHeader(section);\n if (pesPacketHeader) {\n return pesPacketHeader;\n }\n }\n currentPos += demuxer.packetStride;\n }\n return null;\n };\n // Get the first PES packet of the track (always treated as a key frame candidate)\n const firstSection = this.elementaryStream.firstSection;\n assert(firstSection);\n const firstPesPacketHeader = readPesPacketHeader(firstSection);\n assert(firstPesPacketHeader);\n if (searchPts < firstPesPacketHeader.pts) {\n // We're before the first packet, definitely nothing here\n return null;\n }\n let scanStartPos;\n const referencePesPackets = this.elementaryStream.referencePesPackets;\n const referencePointIndex = binarySearchLessOrEqual(referencePesPackets, searchPts, x => x.pts);\n const referencePoint = referencePointIndex !== -1 ? referencePesPackets[referencePointIndex] : null;\n if (referencePoint && searchPts - referencePoint.pts < TIMESCALE / 2) {\n // Reference point ain't too far away, prefer it over the chunk search\n scanStartPos = referencePoint.sectionStartPos;\n }\n else {\n let startChunkIndex = 0;\n if (reader.fileSize !== null) {\n const numChunks = Math.ceil(reader.fileSize / seekChunkSize);\n if (numChunks > 1) {\n // Binary search to find the chunk with highest index whose first PES has pts <= searchPts\n let low = 0;\n let high = numChunks - 1;\n startChunkIndex = low;\n while (low <= high) {\n const mid = Math.floor((low + high) / 2);\n const chunkStartPos = floorToMultiple(mid * seekChunkSize, demuxer.packetStride)\n + firstPesPacketHeader.sectionStartPos;\n const chunkEndPos = chunkStartPos + seekChunkSize;\n const pesHeader = await findFirstPesPacketHeaderInChunk(chunkStartPos, chunkEndPos);\n if (!pesHeader) {\n // No PES packet found in this chunk, search left\n high = mid - 1;\n continue;\n }\n if (pesHeader.pts <= searchPts) {\n // This chunk's first PES is <= searchPts, it's a candidate\n startChunkIndex = mid;\n low = mid + 1; // Search right\n }\n else {\n // Search left\n high = mid - 1;\n }\n }\n }\n }\n scanStartPos = floorToMultiple(startChunkIndex * seekChunkSize, demuxer.packetStride) + firstPesPacketHeader.sectionStartPos;\n }\n // Find the first PES packet at or after scanStartPos\n let currentPesHeader = await findFirstPesPacketHeaderInChunk(scanStartPos, reader.fileSize ?? Infinity);\n if (!currentPesHeader) {\n // Fallback to first packet\n currentPesHeader = firstPesPacketHeader;\n }\n const reorderSize = this.getReorderSize();\n const retrieveEncodedPacket = async (sectionStartPos, predicate) => {\n // Load the relevant section in full\n const section = await demuxer.readSection(sectionStartPos, true);\n assert(section);\n const pesPacket = readPesPacket(section);\n assert(pesPacket);\n const context = new PacketReadingContext(this.elementaryStream, pesPacket);\n const buffer = new PacketBuffer(this, context);\n // Advance until the top-most presentation timestamp crosses or equals searchPts\n while (true) {\n const topPts = last(buffer.presentationOrderPackets)?.pts ?? -Infinity;\n if (topPts >= searchPts) {\n break;\n }\n const didRead = await buffer.readNextPacket();\n if (!didRead) {\n break;\n }\n }\n const targetIndex = findLastIndex(buffer.presentationOrderPackets, predicate);\n if (targetIndex === -1) {\n return null;\n }\n const targetPacket = buffer.presentationOrderPackets[targetIndex];\n const lastDuration = targetIndex === 0\n ? 0\n : targetPacket.pts - buffer.presentationOrderPackets[targetIndex - 1].pts;\n // Pop packets in decode order until we hit the target packet\n while (buffer.decodeOrderPackets[0] !== targetPacket) {\n buffer.decodeOrderPackets.shift();\n }\n buffer.lastDuration = lastDuration; // Kinda ugly but necessary fix\n const result = await buffer.readNext();\n assert(result);\n const packet = this.createEncodedPacket(result.packet, result.duration, options);\n this.packetBuffers.set(packet, buffer);\n this.packetSectionStarts.set(packet, result.packet.sectionStartPos);\n return packet;\n };\n if (!keyframesOnly || this.allPacketsAreKeyPackets()) {\n // Normat packet lookup case. Slightly easier since we just need to search (mostly) forward to find the\n // packet.\n // Linear scan to find the PES packet with largest pts <= searchPts. This will be used as the \"midpoint\"\n // of the next refinement step (which is needed because of B-frames).\n outer: while (true) {\n let currentPos = currentPesHeader.sectionStartPos + demuxer.packetStride;\n while (true) {\n const packetHeader = await demuxer.readPacketHeader(currentPos);\n if (!packetHeader) {\n break outer; // End of file\n }\n if (packetHeader.pid === pid && packetHeader.payloadUnitStartIndicator === 1) {\n const section = await demuxer.readSection(currentPos, false);\n if (section) {\n const nextPesHeader = readPesPacketHeader(section);\n if (nextPesHeader) {\n if (nextPesHeader.pts > searchPts) {\n break outer;\n }\n currentPesHeader = nextPesHeader;\n maybeInsertReferencePacket(this.elementaryStream, nextPesHeader);\n break;\n }\n }\n }\n currentPos += demuxer.packetStride;\n }\n }\n // Rewind by reorderSize PES packets (even for audio! To ensure proper durations)\n outer: for (let i = 0; i < reorderSize; i++) {\n let pos = currentPesHeader.sectionStartPos - demuxer.packetStride;\n while (pos >= demuxer.packetOffset) {\n const packetHeader = await demuxer.readPacketHeader(pos);\n if (!packetHeader) {\n break outer;\n }\n if (packetHeader.pid === pid && packetHeader.payloadUnitStartIndicator === 1) {\n const section = await demuxer.readSection(pos, false);\n if (section) {\n const header = readPesPacketHeader(section);\n if (header) {\n currentPesHeader = header;\n break;\n }\n }\n }\n pos -= demuxer.packetStride;\n }\n }\n return retrieveEncodedPacket(currentPesHeader.sectionStartPos, p => p.pts <= searchPts);\n }\n else {\n // Key packet lookup case. Slightly harder since the starting chunk may not have a key packet at all, which\n // means we might need to search the previous chunks until we find something.\n let currentChunkStartPos = scanStartPos;\n let nextChunkStartPos = null; // \"next\" as in later in the file, even tho we scan backwards\n while (true) {\n let bestKeyPesHeader = null;\n const isFirstChunk = currentChunkStartPos <= firstPesPacketHeader.sectionStartPos;\n let pesHeader;\n if (isFirstChunk) {\n pesHeader = firstPesPacketHeader;\n // Since we force the first packet to be seen as a key frame:\n bestKeyPesHeader = firstPesPacketHeader;\n }\n else {\n pesHeader = await findFirstPesPacketHeaderInChunk(currentChunkStartPos, reader.fileSize ?? Infinity);\n }\n let passedSearchPts = false;\n let lookaheadCount = 0;\n outer: while (pesHeader) {\n if (nextChunkStartPos !== null && pesHeader.sectionStartPos >= nextChunkStartPos) {\n // Stop at the next chunk boundary\n break;\n }\n const isKeyCandidate = pesHeader.randomAccessIndicator === 1;\n if (isKeyCandidate && pesHeader.pts <= searchPts) {\n bestKeyPesHeader = pesHeader;\n }\n if (pesHeader.pts > searchPts) {\n passedSearchPts = true;\n }\n // If we've passed searchPts, do lookahead for reorderSize-1 more packets just to be sure\n if (passedSearchPts) {\n lookaheadCount++;\n if (lookaheadCount >= reorderSize) {\n break;\n }\n }\n // Find next PES packet\n let currentPos = pesHeader.sectionStartPos + demuxer.packetStride;\n while (true) {\n const packetHeader = await demuxer.readPacketHeader(currentPos);\n if (!packetHeader) {\n break outer; // End of file\n }\n if (packetHeader.pid === pid && packetHeader.payloadUnitStartIndicator === 1) {\n const section = await demuxer.readSection(currentPos, false);\n if (section) {\n pesHeader = readPesPacketHeader(section);\n if (pesHeader) {\n maybeInsertReferencePacket(this.elementaryStream, pesHeader);\n break;\n }\n }\n }\n currentPos += demuxer.packetStride;\n }\n }\n if (bestKeyPesHeader) {\n let startPesHeader = bestKeyPesHeader;\n if (lookaheadCount === 0) {\n // Packet is at the end of stream, let's rewind a little to obtain the correct packet duration\n outer: for (let i = 0; i < reorderSize - 1; i++) {\n let pos = startPesHeader.sectionStartPos - demuxer.packetStride;\n while (pos >= demuxer.packetOffset) {\n const packetHeader = await demuxer.readPacketHeader(pos);\n if (!packetHeader) {\n break outer;\n }\n if (packetHeader.pid === pid && packetHeader.payloadUnitStartIndicator === 1) {\n const section = await demuxer.readSection(pos, false);\n if (section) {\n const header = readPesPacketHeader(section);\n if (header) {\n startPesHeader = header;\n break;\n }\n }\n }\n pos -= demuxer.packetStride;\n }\n }\n }\n const encodedPacket = await retrieveEncodedPacket(startPesHeader.sectionStartPos, p => p.pts <= searchPts && p.randomAccessIndicator === 1);\n assert(encodedPacket); // There must be one\n return encodedPacket;\n }\n assert(!isFirstChunk); // Impossible not to find a key frame in the first chunk\n // No key frame found in this chunk, move one chunk to the left\n nextChunkStartPos = currentChunkStartPos;\n currentChunkStartPos = Math.max(floorToMultiple(currentChunkStartPos - firstPesPacketHeader.sectionStartPos - seekChunkSize, demuxer.packetStride) + firstPesPacketHeader.sectionStartPos, firstPesPacketHeader.sectionStartPos);\n }\n }\n }\n}\nclass MpegTsVideoTrackBacking extends MpegTsTrackBacking {\n constructor(elementaryStream) {\n super(elementaryStream);\n this.elementaryStream = elementaryStream;\n this.decoderConfig = {\n codec: extractVideoCodecString({\n width: this.elementaryStream.info.width,\n height: this.elementaryStream.info.height,\n codec: this.elementaryStream.info.codec,\n codecDescription: null,\n colorSpace: this.elementaryStream.info.colorSpace,\n avcType: 1,\n avcCodecInfo: this.elementaryStream.info.avcCodecInfo,\n hevcCodecInfo: this.elementaryStream.info.hevcCodecInfo,\n vp9CodecInfo: null,\n av1CodecInfo: null,\n }),\n codedWidth: this.elementaryStream.info.width,\n codedHeight: this.elementaryStream.info.height,\n displayAspectWidth: this.elementaryStream.info.squarePixelWidth,\n displayAspectHeight: this.elementaryStream.info.squarePixelHeight,\n colorSpace: this.elementaryStream.info.colorSpace,\n };\n }\n getCodec() {\n return this.elementaryStream.info.codec;\n }\n getCodedWidth() {\n return this.elementaryStream.info.width;\n }\n getCodedHeight() {\n return this.elementaryStream.info.height;\n }\n getSquarePixelWidth() {\n return this.elementaryStream.info.squarePixelWidth;\n }\n getSquarePixelHeight() {\n return this.elementaryStream.info.squarePixelHeight;\n }\n getRotation() {\n return 0;\n }\n async getColorSpace() {\n return this.elementaryStream.info.colorSpace;\n }\n async canBeTransparent() {\n return false;\n }\n async getDecoderConfig() {\n return this.decoderConfig;\n }\n allPacketsAreKeyPackets() {\n return false;\n }\n getReorderSize() {\n return this.elementaryStream.info.reorderSize;\n }\n}\nclass MpegTsAudioTrackBacking extends MpegTsTrackBacking {\n constructor(elementaryStream) {\n super(elementaryStream);\n this.elementaryStream = elementaryStream;\n }\n getCodec() {\n return this.elementaryStream.info.codec;\n }\n getNumberOfChannels() {\n return this.elementaryStream.info.numberOfChannels;\n }\n getSampleRate() {\n return this.elementaryStream.info.sampleRate;\n }\n async getDecoderConfig() {\n return {\n codec: extractAudioCodecString({\n codec: this.elementaryStream.info.codec,\n codecDescription: null,\n aacCodecInfo: this.elementaryStream.info.aacCodecInfo,\n }),\n numberOfChannels: this.elementaryStream.info.numberOfChannels,\n sampleRate: this.elementaryStream.info.sampleRate,\n };\n }\n allPacketsAreKeyPackets() {\n return true;\n }\n getReorderSize() {\n return 1; // No reordering, since no B-frames because goated\n }\n}\nconst maybeInsertReferencePacket = (elementaryStream, pesPacketHeader) => {\n const referencePesPackets = elementaryStream.referencePesPackets;\n const index = binarySearchLessOrEqual(referencePesPackets, pesPacketHeader.sectionStartPos, x => x.sectionStartPos);\n if (index >= 0) {\n // Since pts and file position don't necessarily have a monotonic relationship (since pts can go crazy),\n // let's see if inserting at the given index would violate the pts order. If so, return.\n const entry = referencePesPackets[index];\n if (pesPacketHeader.pts <= entry.pts) {\n return false;\n }\n const minByteDistance = elementaryStream.demuxer.minReferencePointByteDistance;\n if (pesPacketHeader.sectionStartPos - entry.sectionStartPos < minByteDistance) {\n // Too close\n return false;\n }\n if (index < referencePesPackets.length - 1) {\n const nextEntry = referencePesPackets[index + 1];\n if (nextEntry.pts < pesPacketHeader.pts) {\n // Out of order\n return false;\n }\n if (nextEntry.sectionStartPos - pesPacketHeader.sectionStartPos < minByteDistance) {\n // Too close\n return false;\n }\n }\n }\n referencePesPackets.splice(index + 1, 0, pesPacketHeader);\n return true;\n};\nconst markNextPacket = async (context) => {\n assert(!context.suppliedPacket);\n const elementaryStream = context.elementaryStream;\n if (elementaryStream.info.type === 'video') {\n const codec = elementaryStream.info.codec;\n const CHUNK_SIZE = 1024;\n if (codec !== 'avc' && codec !== 'hevc') {\n throw new Error('Unhandled.');\n }\n let packetStartPos = null;\n while (true) {\n let remaining = context.ensureBuffered(CHUNK_SIZE);\n if (remaining instanceof Promise)\n remaining = await remaining;\n if (remaining === 0) {\n break;\n }\n const chunkStartPos = context.currentPos;\n const chunk = context.readBytes(remaining);\n const length = chunk.byteLength;\n let i = 0;\n while (i < length) {\n const zeroIndex = chunk.indexOf(0, i);\n if (zeroIndex === -1 || zeroIndex >= length) {\n break;\n }\n i = zeroIndex;\n // Check if we have enough bytes to identify a start code\n const posBeforeZero = chunkStartPos + i;\n // Need at least 4 more bytes after the 0x00 to check for start code + NAL type\n if (i + 4 >= length) {\n // Not enough data in current chunk, seek back and let the next iteration handle it\n context.seekTo(posBeforeZero);\n break;\n }\n const b1 = chunk[i + 1];\n const b2 = chunk[i + 2];\n const b3 = chunk[i + 3];\n let startCodeLength = 0;\n let nalUnitTypeByte = null;\n // Check for 4-byte start code (0x00000001)\n if (b1 === 0x00 && b2 === 0x00 && b3 === 0x01) {\n startCodeLength = 4;\n nalUnitTypeByte = chunk[i + 4];\n }\n else if (b1 === 0x00 && b2 === 0x01) {\n // 3-byte start code (0x000001)\n startCodeLength = 3;\n nalUnitTypeByte = b3;\n }\n if (startCodeLength === 0) {\n // Not a start code, continue\n i++;\n continue;\n }\n const startCodePos = posBeforeZero;\n if (packetStartPos === null) {\n // This is our first start code, mark packet start\n packetStartPos = startCodePos;\n i += startCodeLength;\n continue;\n }\n // We have a second start code. Check if it's an AUD.\n if (nalUnitTypeByte !== null) {\n const nalUnitType = codec === 'avc'\n ? extractNalUnitTypeForAvc(nalUnitTypeByte)\n : extractNalUnitTypeForHevc(nalUnitTypeByte);\n const isAud = codec === 'avc'\n ? nalUnitType === AvcNalUnitType.AUD\n : nalUnitType === HevcNalUnitType.AUD_NUT;\n if (isAud) {\n // End the packet at this start code (before the AUD)\n const packetLength = startCodePos - packetStartPos;\n context.seekTo(packetStartPos);\n return context.supplyPacket(packetLength, 0);\n }\n }\n // Not an AUD, continue searching\n i += startCodeLength;\n }\n if (remaining < CHUNK_SIZE) {\n // End of stream\n break;\n }\n }\n // End of stream - return remaining data if we have a packet start\n if (packetStartPos !== null) {\n const packetLength = context.endPos - packetStartPos;\n context.seekTo(packetStartPos);\n return context.supplyPacket(packetLength, 0);\n }\n }\n else {\n const codec = elementaryStream.info.codec;\n const CHUNK_SIZE = 128;\n while (true) {\n let remaining = context.ensureBuffered(CHUNK_SIZE);\n if (remaining instanceof Promise)\n remaining = await remaining;\n const startPos = context.currentPos;\n while (context.currentPos - startPos < remaining) {\n const byte = context.readU8();\n if (codec === 'aac') {\n if (byte !== 0xff) {\n continue;\n }\n context.skip(-1);\n const possibleHeaderStartPos = context.currentPos;\n let remaining = context.ensureBuffered(MAX_ADTS_FRAME_HEADER_SIZE);\n if (remaining instanceof Promise)\n remaining = await remaining;\n if (remaining < MAX_ADTS_FRAME_HEADER_SIZE) {\n return;\n }\n const headerBytes = context.readBytes(MAX_ADTS_FRAME_HEADER_SIZE);\n const header = readAdtsFrameHeader(FileSlice.tempFromBytes(headerBytes));\n if (header) {\n context.seekTo(possibleHeaderStartPos);\n let remaining = context.ensureBuffered(header.frameLength);\n if (remaining instanceof Promise)\n remaining = await remaining;\n return context.supplyPacket(remaining, Math.round(SAMPLES_PER_AAC_FRAME * TIMESCALE / elementaryStream.info.sampleRate));\n }\n else {\n context.seekTo(possibleHeaderStartPos + 1);\n }\n }\n else if (codec === 'mp3') {\n if (byte !== 0xff) {\n continue;\n }\n context.skip(-1);\n const possibleHeaderStartPos = context.currentPos;\n let remaining = context.ensureBuffered(MP3_FRAME_HEADER_SIZE);\n if (remaining instanceof Promise)\n remaining = await remaining;\n if (remaining < MP3_FRAME_HEADER_SIZE) {\n return;\n }\n const headerBytes = context.readBytes(MP3_FRAME_HEADER_SIZE);\n const word = toDataView(headerBytes).getUint32(0);\n const result = readMp3FrameHeader(word, null);\n if (result.header) {\n context.seekTo(possibleHeaderStartPos);\n let remaining = context.ensureBuffered(result.header.totalSize);\n if (remaining instanceof Promise)\n remaining = await remaining;\n const duration = result.header.audioSamplesInFrame * TIMESCALE\n / elementaryStream.info.sampleRate;\n return context.supplyPacket(remaining, Math.round(duration));\n }\n else {\n context.seekTo(possibleHeaderStartPos + 1);\n }\n }\n else if (codec === 'ac3') {\n if (byte !== 0x0b) {\n continue;\n }\n context.skip(-1);\n const possibleSyncPos = context.currentPos;\n // Need at least 5 bytes for sync word + CRC + fscod/frmsizecod\n let remaining = context.ensureBuffered(5);\n if (remaining instanceof Promise)\n remaining = await remaining;\n if (remaining < 5) {\n return;\n }\n const headerBytes = context.readBytes(5);\n // Verify sync word (0x0B77)\n if (headerBytes[0] !== 0x0b || headerBytes[1] !== 0x77) {\n context.seekTo(possibleSyncPos + 1);\n continue;\n }\n const fscod = headerBytes[4] >> 6;\n const frmsizecod = headerBytes[4] & 0x3f;\n if (fscod === 3 || frmsizecod > 37) {\n // Invalid\n context.seekTo(possibleSyncPos + 1);\n continue;\n }\n const frameSize = AC3_FRAME_SIZES[3 * frmsizecod + fscod];\n assert(frameSize !== undefined);\n context.seekTo(possibleSyncPos);\n remaining = context.ensureBuffered(frameSize);\n if (remaining instanceof Promise)\n remaining = await remaining;\n const duration = Math.round(AC3_SAMPLES_PER_FRAME * TIMESCALE / elementaryStream.info.sampleRate);\n return context.supplyPacket(remaining, duration);\n }\n else if (codec === 'eac3') {\n if (byte !== 0x0b) {\n continue;\n }\n context.skip(-1);\n const possibleSyncPos = context.currentPos;\n // Need at least 5 bytes for E-AC-3 header parsing (sync word + frmsiz + fscod/numblkscod)\n let remaining = context.ensureBuffered(5);\n if (remaining instanceof Promise)\n remaining = await remaining;\n if (remaining < 5) {\n return;\n }\n const headerBytes = context.readBytes(5);\n if (headerBytes[0] !== 0x0b || headerBytes[1] !== 0x77) {\n context.seekTo(possibleSyncPos + 1);\n continue;\n }\n const frmsiz = ((headerBytes[2] & 0x07) << 8) | headerBytes[3];\n const frameSize = (frmsiz + 1) * 2;\n const fscod = headerBytes[4] >> 6;\n const numblkscod = fscod === 3 ? 3 : (headerBytes[4] >> 4) & 0x03;\n const numblks = EAC3_NUMBLKS_TABLE[numblkscod];\n context.seekTo(possibleSyncPos);\n remaining = context.ensureBuffered(frameSize);\n if (remaining instanceof Promise)\n remaining = await remaining;\n // Duration = numblks * 256 samples per block\n const samplesPerFrame = numblks * 256;\n const duration = Math.round(samplesPerFrame * TIMESCALE / elementaryStream.info.sampleRate);\n return context.supplyPacket(remaining, duration);\n }\n else {\n throw new Error('Unhandled.');\n }\n }\n if (remaining < CHUNK_SIZE) {\n break;\n }\n }\n }\n};\n/** Stateful context used to extract exact encoded packets from the underlying data stream. */\nclass PacketReadingContext {\n constructor(elementaryStream, startingPesPacket) {\n this.currentPos = 0; // Relative to the data in startingPesPacket\n this.pesPackets = [];\n this.currentPesPacketIndex = 0;\n this.currentPesPacketPos = 0;\n this.endPos = 0;\n this.nextPts = 0;\n this.suppliedPacket = null;\n this.elementaryStream = elementaryStream;\n this.pid = elementaryStream.pid;\n this.demuxer = elementaryStream.demuxer;\n this.startingPesPacket = startingPesPacket;\n }\n clone() {\n const clone = new PacketReadingContext(this.elementaryStream, this.startingPesPacket);\n clone.currentPos = this.currentPos;\n clone.pesPackets = [...this.pesPackets];\n clone.currentPesPacketIndex = this.currentPesPacketIndex;\n clone.currentPesPacketPos = this.currentPesPacketPos;\n clone.endPos = this.endPos;\n clone.nextPts = this.nextPts;\n return clone;\n }\n ensureBuffered(length) {\n const remaining = this.endPos - this.currentPos;\n if (remaining >= length) {\n return length;\n }\n return this.bufferData(length - remaining)\n .then(() => Math.min(this.endPos - this.currentPos, length));\n }\n getCurrentPesPacket() {\n const packet = this.pesPackets[this.currentPesPacketIndex];\n assert(packet);\n return packet;\n }\n async bufferData(length) {\n const targetEndPos = this.endPos + length;\n while (this.endPos < targetEndPos) {\n let pesPacket;\n if (this.pesPackets.length === 0) {\n pesPacket = this.startingPesPacket;\n }\n else {\n // Find the next PES packet\n let currentPos = last(this.pesPackets).sectionEndPos;\n assert(currentPos !== null);\n while (true) {\n const packetHeader = await this.demuxer.readPacketHeader(currentPos);\n if (!packetHeader) {\n return;\n }\n if (packetHeader.pid === this.pid) {\n const nextSection = await this.demuxer.readSection(currentPos, true);\n if (!nextSection) {\n return;\n }\n const nextPesPacket = readPesPacket(nextSection);\n if (nextPesPacket) {\n pesPacket = nextPesPacket;\n break;\n }\n }\n currentPos += this.demuxer.packetStride;\n }\n }\n this.pesPackets.push(pesPacket);\n this.endPos += pesPacket.data.byteLength;\n if (this.pesPackets.length === 1) {\n // It's the first PES packet, set the PTS\n this.nextPts = pesPacket.pts;\n }\n }\n }\n readBytes(length) {\n const currentPesPacket = this.getCurrentPesPacket();\n const relativeStartOffset = this.currentPos - this.currentPesPacketPos;\n const relativeEndOffset = relativeStartOffset + length;\n this.currentPos += length;\n if (relativeEndOffset <= currentPesPacket.data.byteLength) {\n // Request can be satisfied with one PES packet\n return currentPesPacket.data.subarray(relativeStartOffset, relativeEndOffset);\n }\n // Data spans multiple PES packets, we must do some merging\n const result = new Uint8Array(length);\n result.set(currentPesPacket.data.subarray(relativeStartOffset));\n let offset = currentPesPacket.data.byteLength - relativeStartOffset;\n while (true) {\n this.advanceCurrentPacket();\n const currentPesPacket = this.getCurrentPesPacket();\n const relativeEndOffset = length - offset;\n if (relativeEndOffset <= currentPesPacket.data.byteLength) {\n result.set(currentPesPacket.data.subarray(0, relativeEndOffset), offset);\n break;\n }\n result.set(currentPesPacket.data, offset);\n offset += currentPesPacket.data.byteLength;\n }\n return result;\n }\n readU8() {\n let currentPesPacket = this.getCurrentPesPacket();\n const relativeOffset = this.currentPos - this.currentPesPacketPos;\n this.currentPos++;\n if (relativeOffset < currentPesPacket.data.byteLength) {\n return currentPesPacket.data[relativeOffset];\n }\n this.advanceCurrentPacket();\n currentPesPacket = this.getCurrentPesPacket();\n return currentPesPacket.data[0];\n }\n seekTo(pos) {\n if (pos === this.currentPos) {\n return;\n }\n if (pos < this.currentPos) {\n while (pos < this.currentPesPacketPos) {\n // Move to the previous PES packet\n this.currentPesPacketIndex--;\n const currentPacket = this.getCurrentPesPacket();\n this.currentPesPacketPos -= currentPacket.data.byteLength;\n this.nextPts = currentPacket.pts;\n }\n }\n else {\n while (true) {\n // Move to the next PES packet\n const currentPesPacket = this.getCurrentPesPacket();\n const currentEndPos = this.currentPesPacketPos + currentPesPacket.data.byteLength;\n if (pos < currentEndPos) {\n break;\n }\n this.currentPesPacketPos += currentPesPacket.data.byteLength;\n this.currentPesPacketIndex++;\n this.nextPts = this.getCurrentPesPacket().pts;\n }\n }\n this.currentPos = pos;\n }\n skip(n) {\n this.seekTo(this.currentPos + n);\n }\n advanceCurrentPacket() {\n this.currentPesPacketPos += this.getCurrentPesPacket().data.byteLength;\n this.currentPesPacketIndex++;\n this.nextPts = this.getCurrentPesPacket().pts;\n }\n /** Supplies the context with a new encoded packet, beginning at the current position. */\n supplyPacket(packetLength, intrinsicDuration) {\n const currentPesPacket = this.getCurrentPesPacket();\n maybeInsertReferencePacket(this.elementaryStream, currentPesPacket);\n const pts = this.nextPts;\n this.nextPts += intrinsicDuration;\n const sectionStartPos = currentPesPacket.sectionStartPos;\n // The sequence number is the starting position of the section the PES packet is in, PLUS the offset within the\n // PES packet where the packet starts.\n const sequenceNumber = sectionStartPos + (this.currentPos - this.currentPesPacketPos);\n const data = this.readBytes(packetLength);\n let randomAccessIndicator = currentPesPacket.randomAccessIndicator;\n assert(this.elementaryStream.firstSection);\n if (currentPesPacket.sectionStartPos === this.elementaryStream.firstSection.startPos) {\n randomAccessIndicator = 1; // Force the first PES packet to behave like a key packet always\n }\n this.suppliedPacket = {\n pts,\n data,\n sequenceNumber,\n sectionStartPos,\n randomAccessIndicator,\n };\n this.pesPackets.splice(0, this.currentPesPacketIndex);\n this.currentPesPacketIndex = 0;\n }\n}\n/**\n * A buffer that simulates decoder frame reordering to compute packet durations. Packets arrive in decode order but\n * durations are based on presentation order.\n */\nclass PacketBuffer {\n constructor(backing, context) {\n this.decodeOrderPackets = [];\n this.reorderBuffer = [];\n this.presentationOrderPackets = [];\n this.reachedEnd = false;\n this.lastDuration = 0;\n this.backing = backing;\n this.context = context;\n this.reorderSize = backing.getReorderSize();\n assert(this.reorderSize >= 0);\n }\n async readNext() {\n if (this.decodeOrderPackets.length === 0) {\n // We need the next packet\n const didRead = await this.readNextPacket();\n if (!didRead) {\n return null;\n }\n }\n // Ensure we know the next packet in presentation order so we can compute the current packet's duration\n await this.ensureCurrentPacketHasNext();\n const packet = this.decodeOrderPackets[0];\n // Let's compute the duration\n const presentationIndex = this.presentationOrderPackets.indexOf(packet);\n assert(presentationIndex !== -1);\n let duration;\n if (presentationIndex === this.presentationOrderPackets.length - 1) {\n duration = this.lastDuration; // Reasonable heuristic\n }\n else {\n const nextPacket = this.presentationOrderPackets[presentationIndex + 1];\n duration = nextPacket.pts - packet.pts;\n this.lastDuration = duration;\n }\n this.decodeOrderPackets.shift();\n // Shrink the presentation array as much as possible\n while (this.presentationOrderPackets.length > 0) {\n const first = this.presentationOrderPackets[0];\n if (this.decodeOrderPackets.includes(first)) {\n break;\n }\n this.presentationOrderPackets.shift();\n }\n return { packet, duration };\n }\n async readNextPacket() {\n if (this.reachedEnd) {\n return false;\n }\n let suppliedPacket;\n if (this.context.suppliedPacket) {\n // Small optimization: there was already a supplied packet in the context, so let's first use that one\n suppliedPacket = this.context.suppliedPacket;\n }\n else {\n await markNextPacket(this.context);\n suppliedPacket = this.context.suppliedPacket;\n }\n this.context.suppliedPacket = null;\n if (!suppliedPacket) {\n this.reachedEnd = true;\n this.flushReorderBuffer();\n return false;\n }\n this.decodeOrderPackets.push(suppliedPacket);\n this.processPacketThroughReorderBuffer(suppliedPacket);\n return true;\n }\n async ensureCurrentPacketHasNext() {\n const current = this.decodeOrderPackets[0];\n assert(current);\n while (true) {\n const presentationIndex = this.presentationOrderPackets.indexOf(current);\n // Check if current packet has a next packet\n if (presentationIndex !== -1 && presentationIndex <= this.presentationOrderPackets.length - 2) {\n break;\n }\n const didRead = await this.readNextPacket();\n if (!didRead) {\n break;\n }\n }\n }\n processPacketThroughReorderBuffer(packet) {\n this.reorderBuffer.push(packet);\n // If buffer is full, output the packet with smallest PTS\n if (this.reorderBuffer.length >= this.reorderSize) {\n let minIndex = 0;\n for (let i = 1; i < this.reorderBuffer.length; i++) {\n if (this.reorderBuffer[i].pts < this.reorderBuffer[minIndex].pts) {\n minIndex = i;\n }\n }\n const packet = this.reorderBuffer.splice(minIndex, 1)[0];\n this.presentationOrderPackets.push(packet);\n }\n }\n flushReorderBuffer() {\n this.reorderBuffer.sort((a, b) => a.pts - b.pts);\n this.presentationOrderPackets.push(...this.reorderBuffer);\n this.reorderBuffer.length = 0;\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { IsobmffDemuxer } from './isobmff/isobmff-demuxer.js';\nimport { EBMLId, MAX_HEADER_SIZE, MIN_HEADER_SIZE, readAsciiString, readElementHeader, readElementSize, readUnsignedInt, readVarIntSize, } from './matroska/ebml.js';\nimport { MatroskaDemuxer } from './matroska/matroska-demuxer.js';\nimport { Mp3Demuxer } from './mp3/mp3-demuxer.js';\nimport { FRAME_HEADER_SIZE, getXingOffset, INFO, XING } from '../shared/mp3-misc.js';\nimport { ID3_V2_HEADER_SIZE, readId3V2Header } from './id3.js';\nimport { readNextMp3FrameHeader } from './mp3/mp3-reader.js';\nimport { OggDemuxer } from './ogg/ogg-demuxer.js';\nimport { WaveDemuxer } from './wave/wave-demuxer.js';\nimport { MAX_ADTS_FRAME_HEADER_SIZE, MIN_ADTS_FRAME_HEADER_SIZE, readAdtsFrameHeader } from './adts/adts-reader.js';\nimport { AdtsDemuxer } from './adts/adts-demuxer.js';\nimport { readAscii, readBytes, readU32Be } from './reader.js';\nimport { FlacDemuxer } from './flac/flac-demuxer.js';\nimport { MpegTsDemuxer } from './mpeg-ts/mpeg-ts-demuxer.js';\nimport { TS_PACKET_SIZE } from './mpeg-ts/mpeg-ts-misc.js';\n/**\n * Base class representing an input media file format.\n * @group Input formats\n * @public\n */\nexport class InputFormat {\n}\n/**\n * Format representing files compatible with the ISO base media file format (ISOBMFF), like MP4 or MOV files.\n * @group Input formats\n * @public\n */\nexport class IsobmffInputFormat extends InputFormat {\n /** @internal */\n async _getMajorBrand(input) {\n let slice = input._reader.requestSlice(0, 12);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return null;\n slice.skip(4);\n const fourCc = readAscii(slice, 4);\n if (fourCc !== 'ftyp') {\n return null;\n }\n return readAscii(slice, 4);\n }\n /** @internal */\n _createDemuxer(input) {\n return new IsobmffDemuxer(input);\n }\n}\n/**\n * MPEG-4 Part 14 (MP4) file format.\n *\n * Do not instantiate this class; use the {@link MP4} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class Mp4InputFormat extends IsobmffInputFormat {\n /** @internal */\n async _canReadInput(input) {\n const majorBrand = await this._getMajorBrand(input);\n return !!majorBrand && majorBrand !== 'qt ';\n }\n get name() {\n return 'MP4';\n }\n get mimeType() {\n return 'video/mp4';\n }\n}\n/**\n * QuickTime File Format (QTFF), often called MOV.\n *\n * Do not instantiate this class; use the {@link QTFF} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class QuickTimeInputFormat extends IsobmffInputFormat {\n /** @internal */\n async _canReadInput(input) {\n const majorBrand = await this._getMajorBrand(input);\n return majorBrand === 'qt ';\n }\n get name() {\n return 'QuickTime File Format';\n }\n get mimeType() {\n return 'video/quicktime';\n }\n}\n/**\n * Matroska file format.\n *\n * Do not instantiate this class; use the {@link MATROSKA} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class MatroskaInputFormat extends InputFormat {\n /** @internal */\n async isSupportedEBMLOfDocType(input, desiredDocType) {\n let headerSlice = input._reader.requestSlice(0, MAX_HEADER_SIZE);\n if (headerSlice instanceof Promise)\n headerSlice = await headerSlice;\n if (!headerSlice)\n return false;\n const varIntSize = readVarIntSize(headerSlice);\n if (varIntSize === null) {\n return false;\n }\n if (varIntSize < 1 || varIntSize > 8) {\n return false;\n }\n const id = readUnsignedInt(headerSlice, varIntSize);\n if (id !== EBMLId.EBML) {\n return false;\n }\n const dataSize = readElementSize(headerSlice);\n if (typeof dataSize !== 'number') {\n return false; // Miss me with that shit\n }\n let dataSlice = input._reader.requestSlice(headerSlice.filePos, dataSize);\n if (dataSlice instanceof Promise)\n dataSlice = await dataSlice;\n if (!dataSlice)\n return false;\n const startPos = headerSlice.filePos;\n while (dataSlice.filePos <= startPos + dataSize - MIN_HEADER_SIZE) {\n const header = readElementHeader(dataSlice);\n if (!header)\n break;\n const { id, size } = header;\n const dataStartPos = dataSlice.filePos;\n if (size === undefined)\n return false;\n switch (id) {\n case EBMLId.EBMLVersion:\n {\n const ebmlVersion = readUnsignedInt(dataSlice, size);\n if (ebmlVersion !== 1) {\n return false;\n }\n }\n ;\n break;\n case EBMLId.EBMLReadVersion:\n {\n const ebmlReadVersion = readUnsignedInt(dataSlice, size);\n if (ebmlReadVersion !== 1) {\n return false;\n }\n }\n ;\n break;\n case EBMLId.DocType:\n {\n const docType = readAsciiString(dataSlice, size);\n if (docType !== desiredDocType) {\n return false;\n }\n }\n ;\n break;\n case EBMLId.DocTypeVersion:\n {\n const docTypeVersion = readUnsignedInt(dataSlice, size);\n if (docTypeVersion > 4) { // Support up to Matroska v4\n return false;\n }\n }\n ;\n break;\n }\n dataSlice.filePos = dataStartPos + size;\n }\n return true;\n }\n /** @internal */\n _canReadInput(input) {\n return this.isSupportedEBMLOfDocType(input, 'matroska');\n }\n /** @internal */\n _createDemuxer(input) {\n return new MatroskaDemuxer(input);\n }\n get name() {\n return 'Matroska';\n }\n get mimeType() {\n return 'video/x-matroska';\n }\n}\n/**\n * WebM file format, based on Matroska.\n *\n * Do not instantiate this class; use the {@link WEBM} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class WebMInputFormat extends MatroskaInputFormat {\n /** @internal */\n _canReadInput(input) {\n return this.isSupportedEBMLOfDocType(input, 'webm');\n }\n get name() {\n return 'WebM';\n }\n get mimeType() {\n return 'video/webm';\n }\n}\n/**\n * MP3 file format.\n *\n * Do not instantiate this class; use the {@link MP3} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class Mp3InputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n let currentPos = 0;\n while (true) {\n let slice = input._reader.requestSlice(currentPos, ID3_V2_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const id3V2Header = readId3V2Header(slice);\n if (!id3V2Header) {\n break;\n }\n currentPos = slice.filePos + id3V2Header.size;\n }\n const firstResult = await readNextMp3FrameHeader(input._reader, currentPos, currentPos + 4096);\n if (!firstResult) {\n return false;\n }\n const firstHeader = firstResult.header;\n const xingOffset = getXingOffset(firstHeader.mpegVersionId, firstHeader.channel);\n let slice = input._reader.requestSlice(firstResult.startPos + xingOffset, 4);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n const word = readU32Be(slice);\n const isXing = word === XING || word === INFO;\n if (isXing) {\n // Gotta be MP3\n return true;\n }\n currentPos = firstResult.startPos + firstResult.header.totalSize;\n // Fine, we found one frame header, but we're still not entirely sure this is MP3. Let's check if we can find\n // another header right after it:\n const secondResult = await readNextMp3FrameHeader(input._reader, currentPos, currentPos + FRAME_HEADER_SIZE);\n if (!secondResult) {\n return false;\n }\n const secondHeader = secondResult.header;\n // In a well-formed MP3 file, we'd expect these two frames to share some similarities:\n if (firstHeader.channel !== secondHeader.channel || firstHeader.sampleRate !== secondHeader.sampleRate) {\n return false;\n }\n // We have found two matching consecutive MP3 frames, a strong indicator that this is an MP3 file\n return true;\n }\n /** @internal */\n _createDemuxer(input) {\n return new Mp3Demuxer(input);\n }\n get name() {\n return 'MP3';\n }\n get mimeType() {\n return 'audio/mpeg';\n }\n}\n/**\n * WAVE file format, based on RIFF.\n *\n * Do not instantiate this class; use the {@link WAVE} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class WaveInputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n let slice = input._reader.requestSlice(0, 12);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n const riffType = readAscii(slice, 4);\n if (riffType !== 'RIFF' && riffType !== 'RIFX' && riffType !== 'RF64') {\n return false;\n }\n slice.skip(4);\n const format = readAscii(slice, 4);\n return format === 'WAVE';\n }\n /** @internal */\n _createDemuxer(input) {\n return new WaveDemuxer(input);\n }\n get name() {\n return 'WAVE';\n }\n get mimeType() {\n return 'audio/wav';\n }\n}\n/**\n * Ogg file format.\n *\n * Do not instantiate this class; use the {@link OGG} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class OggInputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n let slice = input._reader.requestSlice(0, 4);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n return readAscii(slice, 4) === 'OggS';\n }\n /** @internal */\n _createDemuxer(input) {\n return new OggDemuxer(input);\n }\n get name() {\n return 'Ogg';\n }\n get mimeType() {\n return 'application/ogg';\n }\n}\n/**\n * FLAC file format.\n *\n * Do not instantiate this class; use the {@link FLAC} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class FlacInputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n let slice = input._reader.requestSlice(0, 4);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n return readAscii(slice, 4) === 'fLaC';\n }\n get name() {\n return 'FLAC';\n }\n get mimeType() {\n return 'audio/flac';\n }\n /** @internal */\n _createDemuxer(input) {\n return new FlacDemuxer(input);\n }\n}\n/**\n * ADTS file format.\n *\n * Do not instantiate this class; use the {@link ADTS} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class AdtsInputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n let currentPos = 0;\n while (true) {\n let slice = input._reader.requestSlice(currentPos, ID3_V2_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const id3V2Header = readId3V2Header(slice);\n if (!id3V2Header) {\n break;\n }\n currentPos = slice.filePos + id3V2Header.size;\n }\n let slice = input._reader.requestSliceRange(currentPos, MIN_ADTS_FRAME_HEADER_SIZE, MAX_ADTS_FRAME_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n const firstHeader = readAdtsFrameHeader(slice);\n if (!firstHeader) {\n return false;\n }\n currentPos += firstHeader.frameLength;\n slice = input._reader.requestSliceRange(currentPos, MIN_ADTS_FRAME_HEADER_SIZE, MAX_ADTS_FRAME_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n const secondHeader = readAdtsFrameHeader(slice);\n if (!secondHeader) {\n return false;\n }\n return firstHeader.objectType === secondHeader.objectType\n && firstHeader.samplingFrequencyIndex === secondHeader.samplingFrequencyIndex\n && firstHeader.channelConfiguration === secondHeader.channelConfiguration;\n }\n /** @internal */\n _createDemuxer(input) {\n return new AdtsDemuxer(input);\n }\n get name() {\n return 'ADTS';\n }\n get mimeType() {\n return 'audio/aac';\n }\n}\n/**\n * MPEG Transport Stream (MPEG-TS) file format.\n *\n * Do not instantiate this class; use the {@link MPEG_TS} singleton instead.\n *\n * @group Input formats\n * @public\n */\nexport class MpegTsInputFormat extends InputFormat {\n /** @internal */\n async _canReadInput(input) {\n const lengthToCheck = TS_PACKET_SIZE + 16 + 1;\n let slice = input._reader.requestSlice(0, lengthToCheck);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return false;\n const bytes = readBytes(slice, lengthToCheck);\n if (bytes[0] === 0x47 && bytes[TS_PACKET_SIZE] === 0x47) {\n // Regular MPEG-TS\n return true;\n }\n else if (bytes[0] === 0x47 && bytes[TS_PACKET_SIZE + 16] === 0x47) {\n // MPEG-TS with Forward Error Correction\n return true;\n }\n else if (bytes[4] === 0x47 && bytes[4 + TS_PACKET_SIZE] === 0x47) {\n // MPEG-2-TS (DVHS)\n return true;\n }\n return false;\n }\n /** @internal */\n _createDemuxer(input) {\n return new MpegTsDemuxer(input);\n }\n get name() {\n return 'MPEG Transport Stream';\n }\n get mimeType() {\n return 'video/MP2T';\n }\n}\n/**\n * MP4 input format singleton.\n * @group Input formats\n * @public\n */\nexport const MP4 = /* #__PURE__ */ new Mp4InputFormat();\n/**\n * QuickTime File Format input format singleton.\n * @group Input formats\n * @public\n */\nexport const QTFF = /* #__PURE__ */ new QuickTimeInputFormat();\n/**\n * Matroska input format singleton.\n * @group Input formats\n * @public\n */\nexport const MATROSKA = /* #__PURE__ */ new MatroskaInputFormat();\n/**\n * WebM input format singleton.\n * @group Input formats\n * @public\n */\nexport const WEBM = /* #__PURE__ */ new WebMInputFormat();\n/**\n * MP3 input format singleton.\n * @group Input formats\n * @public\n */\nexport const MP3 = /* #__PURE__ */ new Mp3InputFormat();\n/**\n * WAVE input format singleton.\n * @group Input formats\n * @public\n */\nexport const WAVE = /* #__PURE__ */ new WaveInputFormat();\n/**\n * Ogg input format singleton.\n * @group Input formats\n * @public\n */\nexport const OGG = /* #__PURE__ */ new OggInputFormat();\n/**\n * ADTS input format singleton.\n * @group Input formats\n * @public\n */\nexport const ADTS = /* #__PURE__ */ new AdtsInputFormat();\n/**\n * FLAC input format singleton.\n * @group Input formats\n * @public\n */\nexport const FLAC = /* #__PURE__ */ new FlacInputFormat();\n/**\n * MPEG-TS input format singleton.\n * @group Input formats\n * @public\n */\nexport const MPEG_TS = /* #__PURE__ */ new MpegTsInputFormat();\n/**\n * List of all input format singletons. If you don't need to support all input formats, you should specify the\n * formats individually for better tree shaking.\n * @group Input formats\n * @public\n */\nexport const ALL_FORMATS = [MP4, QTFF, MATROSKA, WEBM, WAVE, OGG, FLAC, MP3, ADTS, MPEG_TS];\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { determineVideoPacketType } from './codec-data.js';\nimport { customAudioDecoders, customVideoDecoders } from './custom-coder.js';\nimport { EncodedPacketSink } from './media-sink.js';\nimport { assert, simplifyRational } from './misc.js';\nimport { EncodedPacket } from './packet.js';\n/**\n * Represents a media track in an input file.\n * @group Input files & tracks\n * @public\n */\nexport class InputTrack {\n /** @internal */\n constructor(input, backing) {\n this.input = input;\n this._backing = backing;\n }\n /** Returns true if and only if this track is a video track. */\n isVideoTrack() {\n return this instanceof InputVideoTrack;\n }\n /** Returns true if and only if this track is an audio track. */\n isAudioTrack() {\n return this instanceof InputAudioTrack;\n }\n /** The unique ID of this track in the input file. */\n get id() {\n return this._backing.getId();\n }\n /**\n * The 1-based index of this track among all tracks of the same type in the input file. For example, the first\n * video track has number 1, the second video track has number 2, and so on. The index refers to the order in\n * which the tracks are returned by {@link Input.getTracks}.\n */\n get number() {\n return this._backing.getNumber();\n }\n /**\n * The identifier of the codec used internally by the container. It is not homogenized by Mediabunny\n * and depends entirely on the container format.\n *\n * This field can be used to determine the codec of a track in case Mediabunny doesn't know that codec.\n *\n * - For ISOBMFF files, this field returns the name of the Sample Description Box (e.g. `'avc1'`).\n * - For Matroska files, this field returns the value of the `CodecID` element.\n * - For WAVE files, this field returns the value of the format tag in the `'fmt '` chunk.\n * - For ADTS files, this field contains the `MPEG-4 Audio Object Type`.\n * - For MPEG-TS files, this field contains the `streamType` value from the Program Map Table.\n * - In all other cases, this field is `null`.\n */\n get internalCodecId() {\n return this._backing.getInternalCodecId();\n }\n /**\n * The ISO 639-2/T language code for this track. If the language is unknown, this field is `'und'` (undetermined).\n */\n get languageCode() {\n return this._backing.getLanguageCode();\n }\n /** A user-defined name for this track. */\n get name() {\n return this._backing.getName();\n }\n /**\n * A positive number x such that all timestamps and durations of all packets of this track are\n * integer multiples of 1/x.\n */\n get timeResolution() {\n return this._backing.getTimeResolution();\n }\n /** The track's disposition, i.e. information about its intended usage. */\n get disposition() {\n return this._backing.getDisposition();\n }\n /**\n * Returns the start timestamp of the first packet of this track, in seconds. While often near zero, this value\n * may be positive or even negative. A negative starting timestamp means the track's timing has been offset. Samples\n * with a negative timestamp should not be presented.\n */\n getFirstTimestamp() {\n return this._backing.getFirstTimestamp();\n }\n /** Returns the end timestamp of the last packet of this track, in seconds. */\n computeDuration() {\n return this._backing.computeDuration();\n }\n /**\n * Computes aggregate packet statistics for this track, such as average packet rate or bitrate.\n *\n * @param targetPacketCount - This optional parameter sets a target for how many packets this method must have\n * looked at before it can return early; this means, you can use it to aggregate only a subset (prefix) of all\n * packets. This is very useful for getting a great estimate of video frame rate without having to scan through the\n * entire file.\n */\n async computePacketStats(targetPacketCount = Infinity) {\n const sink = new EncodedPacketSink(this);\n let startTimestamp = Infinity;\n let endTimestamp = -Infinity;\n let packetCount = 0;\n let totalPacketBytes = 0;\n for await (const packet of sink.packets(undefined, undefined, { metadataOnly: true })) {\n if (packetCount >= targetPacketCount\n // This additional condition is needed to produce correct results with out-of-presentation-order packets\n && packet.timestamp >= endTimestamp) {\n break;\n }\n startTimestamp = Math.min(startTimestamp, packet.timestamp);\n endTimestamp = Math.max(endTimestamp, packet.timestamp + packet.duration);\n packetCount++;\n totalPacketBytes += packet.byteLength;\n }\n return {\n packetCount,\n averagePacketRate: packetCount\n ? Number((packetCount / (endTimestamp - startTimestamp)).toPrecision(16))\n : 0,\n averageBitrate: packetCount\n ? Number((8 * totalPacketBytes / (endTimestamp - startTimestamp)).toPrecision(16))\n : 0,\n };\n }\n}\n/**\n * Represents a video track in an input file.\n * @group Input files & tracks\n * @public\n */\nexport class InputVideoTrack extends InputTrack {\n /** @internal */\n constructor(input, backing) {\n super(input, backing);\n this._backing = backing;\n this.pixelAspectRatio = simplifyRational({\n num: this._backing.getSquarePixelWidth() * this._backing.getCodedHeight(),\n den: this._backing.getSquarePixelHeight() * this._backing.getCodedWidth(),\n });\n }\n get type() {\n return 'video';\n }\n get codec() {\n return this._backing.getCodec();\n }\n /** The width in pixels of the track's coded samples, before any transformations or rotations. */\n get codedWidth() {\n return this._backing.getCodedWidth();\n }\n /** The height in pixels of the track's coded samples, before any transformations or rotations. */\n get codedHeight() {\n return this._backing.getCodedHeight();\n }\n /** The angle in degrees by which the track's frames should be rotated (clockwise). */\n get rotation() {\n return this._backing.getRotation();\n }\n /** The width of the track's frames in square pixels, adjusted for pixel aspect ratio but before rotation. */\n get squarePixelWidth() {\n return this._backing.getSquarePixelWidth();\n }\n /** The height of the track's frames in square pixels, adjusted for pixel aspect ratio but before rotation. */\n get squarePixelHeight() {\n return this._backing.getSquarePixelHeight();\n }\n /** The display width of the track's frames in pixels, after aspect ratio adjustment and rotation. */\n get displayWidth() {\n const rotation = this._backing.getRotation();\n return rotation % 180 === 0 ? this.squarePixelWidth : this.squarePixelHeight;\n }\n /** The display height of the track's frames in pixels, after aspect ratio adjustment and rotation. */\n get displayHeight() {\n const rotation = this._backing.getRotation();\n return rotation % 180 === 0 ? this.squarePixelHeight : this.squarePixelWidth;\n }\n /** Returns the color space of the track's samples. */\n getColorSpace() {\n return this._backing.getColorSpace();\n }\n /** If this method returns true, the track's samples use a high dynamic range (HDR). */\n async hasHighDynamicRange() {\n const colorSpace = await this._backing.getColorSpace();\n return colorSpace.primaries === 'bt2020' || colorSpace.primaries === 'smpte432'\n || colorSpace.transfer === 'pg' || colorSpace.transfer === 'hlg'\n || colorSpace.matrix === 'bt2020-ncl';\n }\n /** Checks if this track may contain transparent samples with alpha data. */\n canBeTransparent() {\n return this._backing.canBeTransparent();\n }\n /**\n * Returns the [decoder configuration](https://www.w3.org/TR/webcodecs/#video-decoder-config) for decoding the\n * track's packets using a [`VideoDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/VideoDecoder). Returns\n * null if the track's codec is unknown.\n */\n getDecoderConfig() {\n return this._backing.getDecoderConfig();\n }\n async getCodecParameterString() {\n const decoderConfig = await this._backing.getDecoderConfig();\n return decoderConfig?.codec ?? null;\n }\n async canDecode() {\n try {\n const decoderConfig = await this._backing.getDecoderConfig();\n if (!decoderConfig) {\n return false;\n }\n const codec = this._backing.getCodec();\n assert(codec !== null);\n if (customVideoDecoders.some(x => x.supports(codec, decoderConfig))) {\n return true;\n }\n if (typeof VideoDecoder === 'undefined') {\n return false;\n }\n const support = await VideoDecoder.isConfigSupported(decoderConfig);\n return support.supported === true;\n }\n catch (error) {\n console.error('Error during decodability check:', error);\n return false;\n }\n }\n async determinePacketType(packet) {\n if (!(packet instanceof EncodedPacket)) {\n throw new TypeError('packet must be an EncodedPacket.');\n }\n if (packet.isMetadataOnly) {\n throw new TypeError('packet must not be metadata-only to determine its type.');\n }\n if (this.codec === null) {\n return null;\n }\n const decoderConfig = await this.getDecoderConfig();\n assert(decoderConfig);\n return determineVideoPacketType(this.codec, decoderConfig, packet.data);\n }\n}\n/**\n * Represents an audio track in an input file.\n * @group Input files & tracks\n * @public\n */\nexport class InputAudioTrack extends InputTrack {\n /** @internal */\n constructor(input, backing) {\n super(input, backing);\n this._backing = backing;\n }\n get type() {\n return 'audio';\n }\n get codec() {\n return this._backing.getCodec();\n }\n /** The number of audio channels in the track. */\n get numberOfChannels() {\n return this._backing.getNumberOfChannels();\n }\n /** The track's audio sample rate in hertz. */\n get sampleRate() {\n return this._backing.getSampleRate();\n }\n /**\n * Returns the [decoder configuration](https://www.w3.org/TR/webcodecs/#audio-decoder-config) for decoding the\n * track's packets using an [`AudioDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/AudioDecoder). Returns\n * null if the track's codec is unknown.\n */\n getDecoderConfig() {\n return this._backing.getDecoderConfig();\n }\n async getCodecParameterString() {\n const decoderConfig = await this._backing.getDecoderConfig();\n return decoderConfig?.codec ?? null;\n }\n async canDecode() {\n try {\n const decoderConfig = await this._backing.getDecoderConfig();\n if (!decoderConfig) {\n return false;\n }\n const codec = this._backing.getCodec();\n assert(codec !== null);\n if (customAudioDecoders.some(x => x.supports(codec, decoderConfig))) {\n return true;\n }\n if (decoderConfig.codec.startsWith('pcm-')) {\n return true; // Since we decode it ourselves\n }\n else {\n if (typeof AudioDecoder === 'undefined') {\n return false;\n }\n const support = await AudioDecoder.isConfigSupported(decoderConfig);\n return support.supported === true;\n }\n }\n catch (error) {\n console.error('Error during decodability check:', error);\n return false;\n }\n }\n async determinePacketType(packet) {\n if (!(packet instanceof EncodedPacket)) {\n throw new TypeError('packet must be an EncodedPacket.');\n }\n if (this.codec === null) {\n return null;\n }\n return 'key'; // No audio codec with delta packets\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { InputFormat } from './input-format.js';\nimport { assert, polyfillSymbolDispose } from './misc.js';\nimport { Reader } from './reader.js';\nimport { Source } from './source.js';\npolyfillSymbolDispose();\n/**\n * Represents an input media file. This is the root object from which all media read operations start.\n * @group Input files & tracks\n * @public\n */\nexport class Input {\n /** True if the input has been disposed. */\n get disposed() {\n return this._disposed;\n }\n /**\n * Creates a new input file from the specified options. No reading operations will be performed until methods are\n * called on this instance.\n */\n constructor(options) {\n /** @internal */\n this._demuxerPromise = null;\n /** @internal */\n this._format = null;\n /** @internal */\n this._disposed = false;\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (!Array.isArray(options.formats) || options.formats.some(x => !(x instanceof InputFormat))) {\n throw new TypeError('options.formats must be an array of InputFormat.');\n }\n if (!(options.source instanceof Source)) {\n throw new TypeError('options.source must be a Source.');\n }\n if (options.source._disposed) {\n throw new Error('options.source must not be disposed.');\n }\n this._formats = options.formats;\n this._source = options.source;\n this._reader = new Reader(options.source);\n }\n /** @internal */\n _getDemuxer() {\n return this._demuxerPromise ??= (async () => {\n this._reader.fileSize = await this._source.getSizeOrNull();\n for (const format of this._formats) {\n const canRead = await format._canReadInput(this);\n if (canRead) {\n this._format = format;\n return format._createDemuxer(this);\n }\n }\n throw new Error('Input has an unsupported or unrecognizable format.');\n })();\n }\n /**\n * Returns the source from which this input file reads its data. This is the same source that was passed to the\n * constructor.\n */\n get source() {\n return this._source;\n }\n /**\n * Returns the format of the input file. You can compare this result directly to the {@link InputFormat} singletons\n * or use `instanceof` checks for subset-aware logic (for example, `format instanceof MatroskaInputFormat` is true\n * for both MKV and WebM).\n */\n async getFormat() {\n await this._getDemuxer();\n assert(this._format);\n return this._format;\n }\n /**\n * Computes the duration of the input file, in seconds. More precisely, returns the largest end timestamp among\n * all tracks.\n */\n async computeDuration() {\n const demuxer = await this._getDemuxer();\n return demuxer.computeDuration();\n }\n /**\n * Returns the timestamp at which the input file starts. More precisely, returns the smallest starting timestamp\n * among all tracks.\n */\n async getFirstTimestamp() {\n const tracks = await this.getTracks();\n if (tracks.length === 0) {\n return 0;\n }\n const firstTimestamps = await Promise.all(tracks.map(x => x.getFirstTimestamp()));\n return Math.min(...firstTimestamps);\n }\n /** Returns the list of all tracks of this input file. */\n async getTracks() {\n const demuxer = await this._getDemuxer();\n return demuxer.getTracks();\n }\n /** Returns the list of all video tracks of this input file. */\n async getVideoTracks() {\n const tracks = await this.getTracks();\n return tracks.filter(x => x.isVideoTrack());\n }\n /** Returns the list of all audio tracks of this input file. */\n async getAudioTracks() {\n const tracks = await this.getTracks();\n return tracks.filter(x => x.isAudioTrack());\n }\n /** Returns the primary video track of this input file, or null if there are no video tracks. */\n async getPrimaryVideoTrack() {\n const tracks = await this.getTracks();\n return tracks.find(x => x.isVideoTrack()) ?? null;\n }\n /** Returns the primary audio track of this input file, or null if there are no audio tracks. */\n async getPrimaryAudioTrack() {\n const tracks = await this.getTracks();\n return tracks.find(x => x.isAudioTrack()) ?? null;\n }\n /** Returns the full MIME type of this input file, including track codecs. */\n async getMimeType() {\n const demuxer = await this._getDemuxer();\n return demuxer.getMimeType();\n }\n /**\n * Returns descriptive metadata tags about the media file, such as title, author, date, cover art, or other\n * attached files.\n */\n async getMetadataTags() {\n const demuxer = await this._getDemuxer();\n return demuxer.getMetadataTags();\n }\n /**\n * Disposes this input and frees connected resources. When an input is disposed, ongoing read operations will be\n * canceled, all future read operations will fail, any open decoders will be closed, and all ongoing media sink\n * operations will be canceled. Disallowed and canceled operations will throw an {@link InputDisposedError}.\n *\n * You are expected not to use an input after disposing it. While some operations may still work, it is not\n * specified and may change in any future update.\n */\n dispose() {\n if (this._disposed) {\n return;\n }\n this._disposed = true;\n this._source._disposed = true;\n this._source._dispose();\n }\n /**\n * Calls `.dispose()` on the input, implementing the `Disposable` interface for use with\n * JavaScript Explicit Resource Management features.\n */\n [Symbol.dispose]() {\n this.dispose();\n }\n}\n/**\n * Thrown when an operation was prevented because the corresponding {@link Input} has been disposed.\n * @group Input files & tracks\n * @public\n */\nexport class InputDisposedError extends Error {\n /** Creates a new {@link InputDisposedError}. */\n constructor(message = 'Input has been disposed.') {\n super(message);\n this.name = 'InputDisposedError';\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport const buildIsobmffMimeType = (info) => {\n const base = info.hasVideo\n ? 'video/'\n : info.hasAudio\n ? 'audio/'\n : 'application/';\n let string = base + (info.isQuickTime ? 'quicktime' : 'mp4');\n if (info.codecStrings.length > 0) {\n const uniqueCodecMimeTypes = [...new Set(info.codecStrings)];\n string += `; codecs=\"${uniqueCodecMimeTypes.join(', ')}\"`;\n }\n return string;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { RichImageData } from '../metadata.js';\nimport { textDecoder } from '../misc.js';\nimport { readAscii, readBytes, readI32Be, readU16Be, readU32Be, readU64Be, readU8 } from '../reader.js';\nexport const MIN_BOX_HEADER_SIZE = 8;\nexport const MAX_BOX_HEADER_SIZE = 16;\nexport const readBoxHeader = (slice) => {\n let totalSize = readU32Be(slice);\n const name = readAscii(slice, 4);\n let headerSize = 8;\n const hasLargeSize = totalSize === 1;\n if (hasLargeSize) {\n totalSize = readU64Be(slice);\n headerSize = 16;\n }\n const contentSize = totalSize - headerSize;\n if (contentSize < 0) {\n return null; // Hardly a box is it\n }\n return { name, totalSize, headerSize, contentSize };\n};\nexport const readFixed_16_16 = (slice) => {\n return readI32Be(slice) / 0x10000;\n};\nexport const readFixed_2_30 = (slice) => {\n return readI32Be(slice) / 0x40000000;\n};\nexport const readIsomVariableInteger = (slice) => {\n let result = 0;\n for (let i = 0; i < 4; i++) {\n result <<= 7;\n const nextByte = readU8(slice);\n result |= nextByte & 0x7f;\n if ((nextByte & 0x80) === 0) {\n break;\n }\n }\n return result;\n};\nexport const readMetadataStringShort = (slice) => {\n let stringLength = readU16Be(slice);\n slice.skip(2); // Language\n stringLength = Math.min(stringLength, slice.remainingLength);\n return textDecoder.decode(readBytes(slice, stringLength));\n};\nexport const readDataBox = (slice) => {\n const header = readBoxHeader(slice);\n if (!header || header.name !== 'data') {\n return null;\n }\n if (slice.remainingLength < 8) {\n // Box is too small\n return null;\n }\n const typeIndicator = readU32Be(slice);\n slice.skip(4); // Locale indicator\n const data = readBytes(slice, header.contentSize - 8);\n switch (typeIndicator) {\n case 1: return textDecoder.decode(data); // UTF-8\n case 2: return new TextDecoder('utf-16be').decode(data); // UTF-16-BE\n case 13: return new RichImageData(data, 'image/jpeg');\n case 14: return new RichImageData(data, 'image/png');\n case 27: return new RichImageData(data, 'image/bmp');\n default: return data;\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { assert, assertNever, textDecoder, textEncoder } from '../misc.js';\nimport { readBytes, readF32Be, readF64Be, readU8 } from '../reader.js';\n/** Wrapper around a number to be able to differentiate it in the writer. */\nexport class EBMLFloat32 {\n constructor(value) {\n this.value = value;\n }\n}\n/** Wrapper around a number to be able to differentiate it in the writer. */\nexport class EBMLFloat64 {\n constructor(value) {\n this.value = value;\n }\n}\n/** Wrapper around a number to be able to differentiate it in the writer. */\nexport class EBMLSignedInt {\n constructor(value) {\n this.value = value;\n }\n}\nexport class EBMLUnicodeString {\n constructor(value) {\n this.value = value;\n }\n}\n/** Defines some of the EBML IDs used by Matroska files. */\nexport var EBMLId;\n(function (EBMLId) {\n EBMLId[EBMLId[\"EBML\"] = 440786851] = \"EBML\";\n EBMLId[EBMLId[\"EBMLVersion\"] = 17030] = \"EBMLVersion\";\n EBMLId[EBMLId[\"EBMLReadVersion\"] = 17143] = \"EBMLReadVersion\";\n EBMLId[EBMLId[\"EBMLMaxIDLength\"] = 17138] = \"EBMLMaxIDLength\";\n EBMLId[EBMLId[\"EBMLMaxSizeLength\"] = 17139] = \"EBMLMaxSizeLength\";\n EBMLId[EBMLId[\"DocType\"] = 17026] = \"DocType\";\n EBMLId[EBMLId[\"DocTypeVersion\"] = 17031] = \"DocTypeVersion\";\n EBMLId[EBMLId[\"DocTypeReadVersion\"] = 17029] = \"DocTypeReadVersion\";\n EBMLId[EBMLId[\"Void\"] = 236] = \"Void\";\n EBMLId[EBMLId[\"Segment\"] = 408125543] = \"Segment\";\n EBMLId[EBMLId[\"SeekHead\"] = 290298740] = \"SeekHead\";\n EBMLId[EBMLId[\"Seek\"] = 19899] = \"Seek\";\n EBMLId[EBMLId[\"SeekID\"] = 21419] = \"SeekID\";\n EBMLId[EBMLId[\"SeekPosition\"] = 21420] = \"SeekPosition\";\n EBMLId[EBMLId[\"Duration\"] = 17545] = \"Duration\";\n EBMLId[EBMLId[\"Info\"] = 357149030] = \"Info\";\n EBMLId[EBMLId[\"TimestampScale\"] = 2807729] = \"TimestampScale\";\n EBMLId[EBMLId[\"MuxingApp\"] = 19840] = \"MuxingApp\";\n EBMLId[EBMLId[\"WritingApp\"] = 22337] = \"WritingApp\";\n EBMLId[EBMLId[\"Tracks\"] = 374648427] = \"Tracks\";\n EBMLId[EBMLId[\"TrackEntry\"] = 174] = \"TrackEntry\";\n EBMLId[EBMLId[\"TrackNumber\"] = 215] = \"TrackNumber\";\n EBMLId[EBMLId[\"TrackUID\"] = 29637] = \"TrackUID\";\n EBMLId[EBMLId[\"TrackType\"] = 131] = \"TrackType\";\n EBMLId[EBMLId[\"FlagEnabled\"] = 185] = \"FlagEnabled\";\n EBMLId[EBMLId[\"FlagDefault\"] = 136] = \"FlagDefault\";\n EBMLId[EBMLId[\"FlagForced\"] = 21930] = \"FlagForced\";\n EBMLId[EBMLId[\"FlagOriginal\"] = 21934] = \"FlagOriginal\";\n EBMLId[EBMLId[\"FlagHearingImpaired\"] = 21931] = \"FlagHearingImpaired\";\n EBMLId[EBMLId[\"FlagVisualImpaired\"] = 21932] = \"FlagVisualImpaired\";\n EBMLId[EBMLId[\"FlagCommentary\"] = 21935] = \"FlagCommentary\";\n EBMLId[EBMLId[\"FlagLacing\"] = 156] = \"FlagLacing\";\n EBMLId[EBMLId[\"Name\"] = 21358] = \"Name\";\n EBMLId[EBMLId[\"Language\"] = 2274716] = \"Language\";\n EBMLId[EBMLId[\"LanguageBCP47\"] = 2274717] = \"LanguageBCP47\";\n EBMLId[EBMLId[\"CodecID\"] = 134] = \"CodecID\";\n EBMLId[EBMLId[\"CodecPrivate\"] = 25506] = \"CodecPrivate\";\n EBMLId[EBMLId[\"CodecDelay\"] = 22186] = \"CodecDelay\";\n EBMLId[EBMLId[\"SeekPreRoll\"] = 22203] = \"SeekPreRoll\";\n EBMLId[EBMLId[\"DefaultDuration\"] = 2352003] = \"DefaultDuration\";\n EBMLId[EBMLId[\"Video\"] = 224] = \"Video\";\n EBMLId[EBMLId[\"PixelWidth\"] = 176] = \"PixelWidth\";\n EBMLId[EBMLId[\"PixelHeight\"] = 186] = \"PixelHeight\";\n EBMLId[EBMLId[\"DisplayWidth\"] = 21680] = \"DisplayWidth\";\n EBMLId[EBMLId[\"DisplayHeight\"] = 21690] = \"DisplayHeight\";\n EBMLId[EBMLId[\"DisplayUnit\"] = 21682] = \"DisplayUnit\";\n EBMLId[EBMLId[\"AlphaMode\"] = 21440] = \"AlphaMode\";\n EBMLId[EBMLId[\"Audio\"] = 225] = \"Audio\";\n EBMLId[EBMLId[\"SamplingFrequency\"] = 181] = \"SamplingFrequency\";\n EBMLId[EBMLId[\"Channels\"] = 159] = \"Channels\";\n EBMLId[EBMLId[\"BitDepth\"] = 25188] = \"BitDepth\";\n EBMLId[EBMLId[\"SimpleBlock\"] = 163] = \"SimpleBlock\";\n EBMLId[EBMLId[\"BlockGroup\"] = 160] = \"BlockGroup\";\n EBMLId[EBMLId[\"Block\"] = 161] = \"Block\";\n EBMLId[EBMLId[\"BlockAdditions\"] = 30113] = \"BlockAdditions\";\n EBMLId[EBMLId[\"BlockMore\"] = 166] = \"BlockMore\";\n EBMLId[EBMLId[\"BlockAdditional\"] = 165] = \"BlockAdditional\";\n EBMLId[EBMLId[\"BlockAddID\"] = 238] = \"BlockAddID\";\n EBMLId[EBMLId[\"BlockDuration\"] = 155] = \"BlockDuration\";\n EBMLId[EBMLId[\"ReferenceBlock\"] = 251] = \"ReferenceBlock\";\n EBMLId[EBMLId[\"Cluster\"] = 524531317] = \"Cluster\";\n EBMLId[EBMLId[\"Timestamp\"] = 231] = \"Timestamp\";\n EBMLId[EBMLId[\"Cues\"] = 475249515] = \"Cues\";\n EBMLId[EBMLId[\"CuePoint\"] = 187] = \"CuePoint\";\n EBMLId[EBMLId[\"CueTime\"] = 179] = \"CueTime\";\n EBMLId[EBMLId[\"CueTrackPositions\"] = 183] = \"CueTrackPositions\";\n EBMLId[EBMLId[\"CueTrack\"] = 247] = \"CueTrack\";\n EBMLId[EBMLId[\"CueClusterPosition\"] = 241] = \"CueClusterPosition\";\n EBMLId[EBMLId[\"Colour\"] = 21936] = \"Colour\";\n EBMLId[EBMLId[\"MatrixCoefficients\"] = 21937] = \"MatrixCoefficients\";\n EBMLId[EBMLId[\"TransferCharacteristics\"] = 21946] = \"TransferCharacteristics\";\n EBMLId[EBMLId[\"Primaries\"] = 21947] = \"Primaries\";\n EBMLId[EBMLId[\"Range\"] = 21945] = \"Range\";\n EBMLId[EBMLId[\"Projection\"] = 30320] = \"Projection\";\n EBMLId[EBMLId[\"ProjectionType\"] = 30321] = \"ProjectionType\";\n EBMLId[EBMLId[\"ProjectionPoseRoll\"] = 30325] = \"ProjectionPoseRoll\";\n EBMLId[EBMLId[\"Attachments\"] = 423732329] = \"Attachments\";\n EBMLId[EBMLId[\"AttachedFile\"] = 24999] = \"AttachedFile\";\n EBMLId[EBMLId[\"FileDescription\"] = 18046] = \"FileDescription\";\n EBMLId[EBMLId[\"FileName\"] = 18030] = \"FileName\";\n EBMLId[EBMLId[\"FileMediaType\"] = 18016] = \"FileMediaType\";\n EBMLId[EBMLId[\"FileData\"] = 18012] = \"FileData\";\n EBMLId[EBMLId[\"FileUID\"] = 18094] = \"FileUID\";\n EBMLId[EBMLId[\"Chapters\"] = 272869232] = \"Chapters\";\n EBMLId[EBMLId[\"Tags\"] = 307544935] = \"Tags\";\n EBMLId[EBMLId[\"Tag\"] = 29555] = \"Tag\";\n EBMLId[EBMLId[\"Targets\"] = 25536] = \"Targets\";\n EBMLId[EBMLId[\"TargetTypeValue\"] = 26826] = \"TargetTypeValue\";\n EBMLId[EBMLId[\"TargetType\"] = 25546] = \"TargetType\";\n EBMLId[EBMLId[\"TagTrackUID\"] = 25541] = \"TagTrackUID\";\n EBMLId[EBMLId[\"TagEditionUID\"] = 25545] = \"TagEditionUID\";\n EBMLId[EBMLId[\"TagChapterUID\"] = 25540] = \"TagChapterUID\";\n EBMLId[EBMLId[\"TagAttachmentUID\"] = 25542] = \"TagAttachmentUID\";\n EBMLId[EBMLId[\"SimpleTag\"] = 26568] = \"SimpleTag\";\n EBMLId[EBMLId[\"TagName\"] = 17827] = \"TagName\";\n EBMLId[EBMLId[\"TagLanguage\"] = 17530] = \"TagLanguage\";\n EBMLId[EBMLId[\"TagString\"] = 17543] = \"TagString\";\n EBMLId[EBMLId[\"TagBinary\"] = 17541] = \"TagBinary\";\n EBMLId[EBMLId[\"ContentEncodings\"] = 28032] = \"ContentEncodings\";\n EBMLId[EBMLId[\"ContentEncoding\"] = 25152] = \"ContentEncoding\";\n EBMLId[EBMLId[\"ContentEncodingOrder\"] = 20529] = \"ContentEncodingOrder\";\n EBMLId[EBMLId[\"ContentEncodingScope\"] = 20530] = \"ContentEncodingScope\";\n EBMLId[EBMLId[\"ContentCompression\"] = 20532] = \"ContentCompression\";\n EBMLId[EBMLId[\"ContentCompAlgo\"] = 16980] = \"ContentCompAlgo\";\n EBMLId[EBMLId[\"ContentCompSettings\"] = 16981] = \"ContentCompSettings\";\n EBMLId[EBMLId[\"ContentEncryption\"] = 20533] = \"ContentEncryption\";\n})(EBMLId || (EBMLId = {}));\nexport const LEVEL_0_EBML_IDS = [\n EBMLId.EBML,\n EBMLId.Segment,\n];\n// All the stuff that can appear in a segment, basically\nexport const LEVEL_1_EBML_IDS = [\n EBMLId.SeekHead,\n EBMLId.Info,\n EBMLId.Cluster,\n EBMLId.Tracks,\n EBMLId.Cues,\n EBMLId.Attachments,\n EBMLId.Chapters,\n EBMLId.Tags,\n];\nexport const LEVEL_0_AND_1_EBML_IDS = [\n ...LEVEL_0_EBML_IDS,\n ...LEVEL_1_EBML_IDS,\n];\nexport const measureUnsignedInt = (value) => {\n if (value < (1 << 8)) {\n return 1;\n }\n else if (value < (1 << 16)) {\n return 2;\n }\n else if (value < (1 << 24)) {\n return 3;\n }\n else if (value < 2 ** 32) {\n return 4;\n }\n else if (value < 2 ** 40) {\n return 5;\n }\n else {\n return 6;\n }\n};\nexport const measureUnsignedBigInt = (value) => {\n if (value < (1n << 8n)) {\n return 1;\n }\n else if (value < (1n << 16n)) {\n return 2;\n }\n else if (value < (1n << 24n)) {\n return 3;\n }\n else if (value < (1n << 32n)) {\n return 4;\n }\n else if (value < (1n << 40n)) {\n return 5;\n }\n else if (value < (1n << 48n)) {\n return 6;\n }\n else if (value < (1n << 56n)) {\n return 7;\n }\n else {\n return 8;\n }\n};\nexport const measureSignedInt = (value) => {\n if (value >= -(1 << 6) && value < (1 << 6)) {\n return 1;\n }\n else if (value >= -(1 << 13) && value < (1 << 13)) {\n return 2;\n }\n else if (value >= -(1 << 20) && value < (1 << 20)) {\n return 3;\n }\n else if (value >= -(1 << 27) && value < (1 << 27)) {\n return 4;\n }\n else if (value >= -(2 ** 34) && value < 2 ** 34) {\n return 5;\n }\n else {\n return 6;\n }\n};\nexport const measureVarInt = (value) => {\n if (value < (1 << 7) - 1) {\n /** Top bit is set, leaving 7 bits to hold the integer, but we can't store\n * 127 because \"all bits set to one\" is a reserved value. Same thing for the\n * other cases below:\n */\n return 1;\n }\n else if (value < (1 << 14) - 1) {\n return 2;\n }\n else if (value < (1 << 21) - 1) {\n return 3;\n }\n else if (value < (1 << 28) - 1) {\n return 4;\n }\n else if (value < 2 ** 35 - 1) {\n return 5;\n }\n else if (value < 2 ** 42 - 1) {\n return 6;\n }\n else {\n throw new Error('EBML varint size not supported ' + value);\n }\n};\nexport class EBMLWriter {\n constructor(writer) {\n this.writer = writer;\n this.helper = new Uint8Array(8);\n this.helperView = new DataView(this.helper.buffer);\n /**\n * Stores the position from the start of the file to where EBML elements have been written. This is used to\n * rewrite/edit elements that were already added before, and to measure sizes of things.\n */\n this.offsets = new WeakMap();\n /** Same as offsets, but stores position where the element's data starts (after ID and size fields). */\n this.dataOffsets = new WeakMap();\n }\n writeByte(value) {\n this.helperView.setUint8(0, value);\n this.writer.write(this.helper.subarray(0, 1));\n }\n writeFloat32(value) {\n this.helperView.setFloat32(0, value, false);\n this.writer.write(this.helper.subarray(0, 4));\n }\n writeFloat64(value) {\n this.helperView.setFloat64(0, value, false);\n this.writer.write(this.helper);\n }\n writeUnsignedInt(value, width = measureUnsignedInt(value)) {\n let pos = 0;\n // Each case falls through:\n switch (width) {\n case 6:\n // Need to use division to access >32 bits of floating point var\n this.helperView.setUint8(pos++, (value / 2 ** 40) | 0);\n // eslint-disable-next-line no-fallthrough\n case 5:\n this.helperView.setUint8(pos++, (value / 2 ** 32) | 0);\n // eslint-disable-next-line no-fallthrough\n case 4:\n this.helperView.setUint8(pos++, value >> 24);\n // eslint-disable-next-line no-fallthrough\n case 3:\n this.helperView.setUint8(pos++, value >> 16);\n // eslint-disable-next-line no-fallthrough\n case 2:\n this.helperView.setUint8(pos++, value >> 8);\n // eslint-disable-next-line no-fallthrough\n case 1:\n this.helperView.setUint8(pos++, value);\n break;\n default:\n throw new Error('Bad unsigned int size ' + width);\n }\n this.writer.write(this.helper.subarray(0, pos));\n }\n writeUnsignedBigInt(value, width = measureUnsignedBigInt(value)) {\n let pos = 0;\n for (let i = width - 1; i >= 0; i--) {\n this.helperView.setUint8(pos++, Number((value >> BigInt(i * 8)) & 0xffn));\n }\n this.writer.write(this.helper.subarray(0, pos));\n }\n writeSignedInt(value, width = measureSignedInt(value)) {\n if (value < 0) {\n // Two's complement stuff\n value += 2 ** (width * 8);\n }\n this.writeUnsignedInt(value, width);\n }\n writeVarInt(value, width = measureVarInt(value)) {\n let pos = 0;\n switch (width) {\n case 1:\n this.helperView.setUint8(pos++, (1 << 7) | value);\n break;\n case 2:\n this.helperView.setUint8(pos++, (1 << 6) | (value >> 8));\n this.helperView.setUint8(pos++, value);\n break;\n case 3:\n this.helperView.setUint8(pos++, (1 << 5) | (value >> 16));\n this.helperView.setUint8(pos++, value >> 8);\n this.helperView.setUint8(pos++, value);\n break;\n case 4:\n this.helperView.setUint8(pos++, (1 << 4) | (value >> 24));\n this.helperView.setUint8(pos++, value >> 16);\n this.helperView.setUint8(pos++, value >> 8);\n this.helperView.setUint8(pos++, value);\n break;\n case 5:\n /**\n * JavaScript converts its doubles to 32-bit integers for bitwise\n * operations, so we need to do a division by 2^32 instead of a\n * right-shift of 32 to retain those top 3 bits\n */\n this.helperView.setUint8(pos++, (1 << 3) | ((value / 2 ** 32) & 0x7));\n this.helperView.setUint8(pos++, value >> 24);\n this.helperView.setUint8(pos++, value >> 16);\n this.helperView.setUint8(pos++, value >> 8);\n this.helperView.setUint8(pos++, value);\n break;\n case 6:\n this.helperView.setUint8(pos++, (1 << 2) | ((value / 2 ** 40) & 0x3));\n this.helperView.setUint8(pos++, (value / 2 ** 32) | 0);\n this.helperView.setUint8(pos++, value >> 24);\n this.helperView.setUint8(pos++, value >> 16);\n this.helperView.setUint8(pos++, value >> 8);\n this.helperView.setUint8(pos++, value);\n break;\n default:\n throw new Error('Bad EBML varint size ' + width);\n }\n this.writer.write(this.helper.subarray(0, pos));\n }\n writeAsciiString(str) {\n this.writer.write(new Uint8Array(str.split('').map(x => x.charCodeAt(0))));\n }\n writeEBML(data) {\n if (data === null)\n return;\n if (data instanceof Uint8Array) {\n this.writer.write(data);\n }\n else if (Array.isArray(data)) {\n for (const elem of data) {\n this.writeEBML(elem);\n }\n }\n else {\n this.offsets.set(data, this.writer.getPos());\n this.writeUnsignedInt(data.id); // ID field\n if (Array.isArray(data.data)) {\n const sizePos = this.writer.getPos();\n const sizeSize = data.size === -1 ? 1 : (data.size ?? 4);\n if (data.size === -1) {\n // Write the reserved all-one-bits marker for unknown/unbounded size.\n this.writeByte(0xff);\n }\n else {\n this.writer.seek(this.writer.getPos() + sizeSize);\n }\n const startPos = this.writer.getPos();\n this.dataOffsets.set(data, startPos);\n this.writeEBML(data.data);\n if (data.size !== -1) {\n const size = this.writer.getPos() - startPos;\n const endPos = this.writer.getPos();\n this.writer.seek(sizePos);\n this.writeVarInt(size, sizeSize);\n this.writer.seek(endPos);\n }\n }\n else if (typeof data.data === 'number') {\n const size = data.size ?? measureUnsignedInt(data.data);\n this.writeVarInt(size);\n this.writeUnsignedInt(data.data, size);\n }\n else if (typeof data.data === 'bigint') {\n const size = data.size ?? measureUnsignedBigInt(data.data);\n this.writeVarInt(size);\n this.writeUnsignedBigInt(data.data, size);\n }\n else if (typeof data.data === 'string') {\n this.writeVarInt(data.data.length);\n this.writeAsciiString(data.data);\n }\n else if (data.data instanceof Uint8Array) {\n this.writeVarInt(data.data.byteLength, data.size);\n this.writer.write(data.data);\n }\n else if (data.data instanceof EBMLFloat32) {\n this.writeVarInt(4);\n this.writeFloat32(data.data.value);\n }\n else if (data.data instanceof EBMLFloat64) {\n this.writeVarInt(8);\n this.writeFloat64(data.data.value);\n }\n else if (data.data instanceof EBMLSignedInt) {\n const size = data.size ?? measureSignedInt(data.data.value);\n this.writeVarInt(size);\n this.writeSignedInt(data.data.value, size);\n }\n else if (data.data instanceof EBMLUnicodeString) {\n const bytes = textEncoder.encode(data.data.value);\n this.writeVarInt(bytes.length);\n this.writer.write(bytes);\n }\n else {\n assertNever(data.data);\n }\n }\n }\n}\nexport const MAX_VAR_INT_SIZE = 8;\nexport const MIN_HEADER_SIZE = 2; // 1-byte ID and 1-byte size\nexport const MAX_HEADER_SIZE = 2 * MAX_VAR_INT_SIZE; // 8-byte ID and 8-byte size\nexport const readVarIntSize = (slice) => {\n if (slice.remainingLength < 1) {\n return null;\n }\n const firstByte = readU8(slice);\n slice.skip(-1);\n if (firstByte === 0) {\n return null; // Invalid VINT\n }\n let width = 1;\n let mask = 0x80;\n while ((firstByte & mask) === 0) {\n width++;\n mask >>= 1;\n }\n // Check if we have enough bytes to read the full varint\n if (slice.remainingLength < width) {\n return null;\n }\n return width;\n};\nexport const readVarInt = (slice) => {\n if (slice.remainingLength < 1) {\n return null;\n }\n // Read the first byte to determine the width of the variable-length integer\n const firstByte = readU8(slice);\n if (firstByte === 0) {\n return null; // Invalid VINT\n }\n // Find the position of VINT_MARKER, which determines the width\n let width = 1;\n let mask = 1 << 7;\n while ((firstByte & mask) === 0) {\n width++;\n mask >>= 1;\n }\n if (slice.remainingLength < width - 1) {\n // Not enough bytes\n return null;\n }\n // First byte's value needs the marker bit cleared\n let value = firstByte & (mask - 1);\n // Read remaining bytes\n for (let i = 1; i < width; i++) {\n value *= 1 << 8;\n value += readU8(slice);\n }\n return value;\n};\nexport const readUnsignedInt = (slice, width) => {\n if (width < 1 || width > 8) {\n throw new Error('Bad unsigned int size ' + width);\n }\n let value = 0;\n // Read bytes from most significant to least significant\n for (let i = 0; i < width; i++) {\n value *= 1 << 8;\n value += readU8(slice);\n }\n return value;\n};\nexport const readUnsignedBigInt = (slice, width) => {\n if (width < 1) {\n throw new Error('Bad unsigned int size ' + width);\n }\n let value = 0n;\n for (let i = 0; i < width; i++) {\n value <<= 8n;\n value += BigInt(readU8(slice));\n }\n return value;\n};\nexport const readSignedInt = (slice, width) => {\n let value = readUnsignedInt(slice, width);\n // If the highest bit is set, convert from two's complement\n if (value & (1 << (width * 8 - 1))) {\n value -= 2 ** (width * 8);\n }\n return value;\n};\nexport const readElementId = (slice) => {\n const size = readVarIntSize(slice);\n if (size === null) {\n return null;\n }\n if (slice.remainingLength < size) {\n return null; // It don't fit\n }\n const id = readUnsignedInt(slice, size);\n return id;\n};\n/** Returns `undefined` to indicate the EBML undefined size. Returns `null` if the size couldn't be read. */\nexport const readElementSize = (slice) => {\n // Need at least 1 byte to read the size\n if (slice.remainingLength < 1) {\n return null;\n }\n const firstByte = readU8(slice);\n if (firstByte === 0xff) {\n return undefined;\n }\n slice.skip(-1);\n const size = readVarInt(slice);\n if (size === null) {\n return null;\n }\n // In some (livestreamed) files, this is the value of the size field. While this technically is just a very\n // large number, it is intended to behave like the reserved size 0xFF, meaning the size is undefined. We\n // catch the number here. Note that it cannot be perfectly represented as a double, but the comparison works\n // nonetheless.\n // eslint-disable-next-line no-loss-of-precision\n if (size === 0x00ffffffffffffff) {\n return undefined;\n }\n return size;\n};\nexport const readElementHeader = (slice) => {\n assert(slice.remainingLength >= MIN_HEADER_SIZE);\n const id = readElementId(slice);\n if (id === null) {\n return null;\n }\n const size = readElementSize(slice);\n if (size === null) {\n return null;\n }\n return { id, size };\n};\nexport const readAsciiString = (slice, length) => {\n const bytes = readBytes(slice, length);\n // Actual string length might be shorter due to null terminators\n let strLength = 0;\n while (strLength < length && bytes[strLength] !== 0) {\n strLength += 1;\n }\n return String.fromCharCode(...bytes.subarray(0, strLength));\n};\nexport const readUnicodeString = (slice, length) => {\n const bytes = readBytes(slice, length);\n // Actual string length might be shorter due to null terminators\n let strLength = 0;\n while (strLength < length && bytes[strLength] !== 0) {\n strLength += 1;\n }\n return textDecoder.decode(bytes.subarray(0, strLength));\n};\nexport const readFloat = (slice, width) => {\n if (width === 0) {\n return 0;\n }\n if (width !== 4 && width !== 8) {\n throw new Error('Bad float size ' + width);\n }\n return width === 4 ? readF32Be(slice) : readF64Be(slice);\n};\n/** Returns the byte offset in the file of the next element with a matching ID. */\nexport const searchForNextElementId = async (reader, startPos, ids, until) => {\n const idsSet = new Set(ids);\n let currentPos = startPos;\n while (until === null || currentPos < until) {\n let slice = reader.requestSliceRange(currentPos, MIN_HEADER_SIZE, MAX_HEADER_SIZE);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const elementHeader = readElementHeader(slice);\n if (!elementHeader) {\n break;\n }\n if (idsSet.has(elementHeader.id)) {\n return { pos: currentPos, found: true };\n }\n assertDefinedSize(elementHeader.size);\n currentPos = slice.filePos + elementHeader.size;\n }\n return { pos: (until !== null && until > currentPos) ? until : currentPos, found: false };\n};\n/** Searches for the next occurrence of an element ID using a naive byte-wise search. */\nexport const resync = async (reader, startPos, ids, until) => {\n const CHUNK_SIZE = 2 ** 16; // So we don't need to grab thousands of slices\n const idsSet = new Set(ids);\n let currentPos = startPos;\n while (currentPos < until) {\n let slice = reader.requestSliceRange(currentPos, 0, Math.min(CHUNK_SIZE, until - currentPos));\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n if (slice.length < MAX_VAR_INT_SIZE)\n break;\n for (let i = 0; i < slice.length - MAX_VAR_INT_SIZE; i++) {\n slice.filePos = currentPos;\n const elementId = readElementId(slice);\n if (elementId !== null && idsSet.has(elementId)) {\n return currentPos;\n }\n currentPos++;\n }\n }\n return null;\n};\nexport const CODEC_STRING_MAP = {\n 'avc': 'V_MPEG4/ISO/AVC',\n 'hevc': 'V_MPEGH/ISO/HEVC',\n 'vp8': 'V_VP8',\n 'vp9': 'V_VP9',\n 'av1': 'V_AV1',\n 'aac': 'A_AAC',\n 'mp3': 'A_MPEG/L3',\n 'opus': 'A_OPUS',\n 'vorbis': 'A_VORBIS',\n 'flac': 'A_FLAC',\n 'ac3': 'A_AC3',\n 'eac3': 'A_EAC3',\n 'pcm-u8': 'A_PCM/INT/LIT',\n 'pcm-s16': 'A_PCM/INT/LIT',\n 'pcm-s16be': 'A_PCM/INT/BIG',\n 'pcm-s24': 'A_PCM/INT/LIT',\n 'pcm-s24be': 'A_PCM/INT/BIG',\n 'pcm-s32': 'A_PCM/INT/LIT',\n 'pcm-s32be': 'A_PCM/INT/BIG',\n 'pcm-f32': 'A_PCM/FLOAT/IEEE',\n 'pcm-f64': 'A_PCM/FLOAT/IEEE',\n 'webvtt': 'S_TEXT/WEBVTT',\n};\nexport function assertDefinedSize(size) {\n if (size === undefined) {\n throw new Error('Undefined element size is used in a place where it is not supported.');\n }\n}\n;\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport const buildMatroskaMimeType = (info) => {\n const base = info.hasVideo\n ? 'video/'\n : info.hasAudio\n ? 'audio/'\n : 'application/';\n let string = base + (info.isWebM ? 'webm' : 'x-matroska');\n if (info.codecStrings.length > 0) {\n const uniqueCodecMimeTypes = [...new Set(info.codecStrings.filter(Boolean))];\n string += `; codecs=\"${uniqueCodecMimeTypes.join(', ')}\"`;\n }\n return string;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { parsePcmCodec, PCM_AUDIO_CODECS } from './codec.js';\nimport { concatAvcNalUnits, deserializeAvcDecoderConfigurationRecord, determineVideoPacketType, extractNalUnitTypeForAvc, extractNalUnitTypeForHevc, HevcNalUnitType, iterateAvcNalUnits, iterateHevcNalUnits, parseAvcSps, } from './codec-data.js';\nimport { customVideoDecoders, customAudioDecoders } from './custom-coder.js';\nimport { InputDisposedError } from './input.js';\nimport { InputAudioTrack, InputTrack, InputVideoTrack } from './input-track.js';\nimport { assert, assertNever, CallSerializer, getInt24, getUint24, insertSorted, isChromium, isFirefox, isNumber, isWebKit, last, mapAsyncGenerator, promiseWithResolvers, toAsyncIterator, toDataView, toUint8Array, validateAnyIterable, } from './misc.js';\nimport { EncodedPacket } from './packet.js';\nimport { fromAlaw, fromUlaw } from './pcm.js';\nimport { AudioSample, clampCropRectangle, validateCropRectangle, VideoSample } from './sample.js';\nconst validatePacketRetrievalOptions = (options) => {\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.metadataOnly !== undefined && typeof options.metadataOnly !== 'boolean') {\n throw new TypeError('options.metadataOnly, when defined, must be a boolean.');\n }\n if (options.verifyKeyPackets !== undefined && typeof options.verifyKeyPackets !== 'boolean') {\n throw new TypeError('options.verifyKeyPackets, when defined, must be a boolean.');\n }\n if (options.verifyKeyPackets && options.metadataOnly) {\n throw new TypeError('options.verifyKeyPackets and options.metadataOnly cannot be enabled together.');\n }\n};\nconst validateTimestamp = (timestamp) => {\n if (!isNumber(timestamp)) {\n throw new TypeError('timestamp must be a number.'); // It can be non-finite, that's fine\n }\n};\nconst maybeFixPacketType = (track, promise, options) => {\n if (options.verifyKeyPackets) {\n return promise.then(async (packet) => {\n if (!packet || packet.type === 'delta') {\n return packet;\n }\n const determinedType = await track.determinePacketType(packet);\n if (determinedType) {\n // @ts-expect-error Technically readonly\n packet.type = determinedType;\n }\n return packet;\n });\n }\n else {\n return promise;\n }\n};\n/**\n * Sink for retrieving encoded packets from an input track.\n * @group Media sinks\n * @public\n */\nexport class EncodedPacketSink {\n /** Creates a new {@link EncodedPacketSink} for the given {@link InputTrack}. */\n constructor(track) {\n if (!(track instanceof InputTrack)) {\n throw new TypeError('track must be an InputTrack.');\n }\n this._track = track;\n }\n /**\n * Retrieves the track's first packet (in decode order), or null if it has no packets. The first packet is very\n * likely to be a key packet.\n */\n getFirstPacket(options = {}) {\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n return maybeFixPacketType(this._track, this._track._backing.getFirstPacket(options), options);\n }\n /**\n * Retrieves the packet corresponding to the given timestamp, in seconds. More specifically, returns the last packet\n * (in presentation order) with a start timestamp less than or equal to the given timestamp. This method can be\n * used to retrieve a track's last packet using `getPacket(Infinity)`. The method returns null if the timestamp\n * is before the first packet in the track.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n getPacket(timestamp, options = {}) {\n validateTimestamp(timestamp);\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n return maybeFixPacketType(this._track, this._track._backing.getPacket(timestamp, options), options);\n }\n /**\n * Retrieves the packet following the given packet (in decode order), or null if the given packet is the\n * last packet.\n */\n getNextPacket(packet, options = {}) {\n if (!(packet instanceof EncodedPacket)) {\n throw new TypeError('packet must be an EncodedPacket.');\n }\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n return maybeFixPacketType(this._track, this._track._backing.getNextPacket(packet, options), options);\n }\n /**\n * Retrieves the key packet corresponding to the given timestamp, in seconds. More specifically, returns the last\n * key packet (in presentation order) with a start timestamp less than or equal to the given timestamp. A key packet\n * is a packet that doesn't require previous packets to be decoded. This method can be used to retrieve a track's\n * last key packet using `getKeyPacket(Infinity)`. The method returns null if the timestamp is before the first\n * key packet in the track.\n *\n * To ensure that the returned packet is guaranteed to be a real key frame, enable `options.verifyKeyPackets`.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n async getKeyPacket(timestamp, options = {}) {\n validateTimestamp(timestamp);\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n if (!options.verifyKeyPackets) {\n return this._track._backing.getKeyPacket(timestamp, options);\n }\n const packet = await this._track._backing.getKeyPacket(timestamp, options);\n if (!packet) {\n return packet;\n }\n assert(packet.type === 'key');\n const determinedType = await this._track.determinePacketType(packet);\n if (determinedType === 'delta') {\n // Try returning the previous key packet (in hopes that it's actually a key packet)\n return this.getKeyPacket(packet.timestamp - 1 / this._track.timeResolution, options);\n }\n return packet;\n }\n /**\n * Retrieves the key packet following the given packet (in decode order), or null if the given packet is the last\n * key packet.\n *\n * To ensure that the returned packet is guaranteed to be a real key frame, enable `options.verifyKeyPackets`.\n */\n async getNextKeyPacket(packet, options = {}) {\n if (!(packet instanceof EncodedPacket)) {\n throw new TypeError('packet must be an EncodedPacket.');\n }\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n if (!options.verifyKeyPackets) {\n return this._track._backing.getNextKeyPacket(packet, options);\n }\n const nextPacket = await this._track._backing.getNextKeyPacket(packet, options);\n if (!nextPacket) {\n return nextPacket;\n }\n assert(nextPacket.type === 'key');\n const determinedType = await this._track.determinePacketType(nextPacket);\n if (determinedType === 'delta') {\n // Try returning the next key packet (in hopes that it's actually a key packet)\n return this.getNextKeyPacket(nextPacket, options);\n }\n return nextPacket;\n }\n /**\n * Creates an async iterator that yields the packets in this track in decode order. To enable fast iteration, this\n * method will intelligently preload packets based on the speed of the consumer.\n *\n * @param startPacket - (optional) The packet from which iteration should begin. This packet will also be yielded.\n * @param endTimestamp - (optional) The timestamp at which iteration should end. This packet will _not_ be yielded.\n */\n packets(startPacket, endPacket, options = {}) {\n if (startPacket !== undefined && !(startPacket instanceof EncodedPacket)) {\n throw new TypeError('startPacket must be an EncodedPacket.');\n }\n if (startPacket !== undefined && startPacket.isMetadataOnly && !options?.metadataOnly) {\n throw new TypeError('startPacket can only be metadata-only if options.metadataOnly is enabled.');\n }\n if (endPacket !== undefined && !(endPacket instanceof EncodedPacket)) {\n throw new TypeError('endPacket must be an EncodedPacket.');\n }\n validatePacketRetrievalOptions(options);\n if (this._track.input._disposed) {\n throw new InputDisposedError();\n }\n const packetQueue = [];\n let { promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers();\n let { promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers();\n let ended = false;\n let terminated = false;\n // This stores errors that are \"out of band\" in the sense that they didn't occur in the normal flow of this\n // method but instead in a different context. This error should not go unnoticed and must be bubbled up to\n // the consumer.\n let outOfBandError = null;\n const timestamps = [];\n // The queue should always be big enough to hold 1 second worth of packets\n const maxQueueSize = () => Math.max(2, timestamps.length);\n // The following is the \"pump\" process that keeps pumping packets into the queue\n (async () => {\n let packet = startPacket ?? await this.getFirstPacket(options);\n while (packet && !terminated && !this._track.input._disposed) {\n if (endPacket && packet.sequenceNumber >= endPacket?.sequenceNumber) {\n break;\n }\n if (packetQueue.length > maxQueueSize()) {\n ({ promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers());\n await queueDequeue;\n continue;\n }\n packetQueue.push(packet);\n onQueueNotEmpty();\n ({ promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers());\n packet = await this.getNextPacket(packet, options);\n }\n ended = true;\n onQueueNotEmpty();\n })().catch((error) => {\n if (!outOfBandError) {\n outOfBandError = error;\n onQueueNotEmpty();\n }\n });\n const track = this._track;\n return {\n async next() {\n while (true) {\n if (track.input._disposed) {\n throw new InputDisposedError();\n }\n else if (terminated) {\n return { value: undefined, done: true };\n }\n else if (outOfBandError) {\n throw outOfBandError;\n }\n else if (packetQueue.length > 0) {\n const value = packetQueue.shift();\n const now = performance.now();\n timestamps.push(now);\n while (timestamps.length > 0 && now - timestamps[0] >= 1000) {\n timestamps.shift();\n }\n onQueueDequeue();\n return { value, done: false };\n }\n else if (ended) {\n return { value: undefined, done: true };\n }\n else {\n await queueNotEmpty;\n }\n }\n },\n async return() {\n terminated = true;\n onQueueDequeue();\n onQueueNotEmpty();\n return { value: undefined, done: true };\n },\n async throw(error) {\n throw error;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n }\n}\nclass DecoderWrapper {\n constructor(onSample, onError) {\n this.onSample = onSample;\n this.onError = onError;\n }\n}\n/**\n * Base class for decoded media sample sinks.\n * @group Media sinks\n * @public\n */\nexport class BaseMediaSampleSink {\n /** @internal */\n mediaSamplesInRange(startTimestamp = 0, endTimestamp = Infinity) {\n validateTimestamp(startTimestamp);\n validateTimestamp(endTimestamp);\n const sampleQueue = [];\n let firstSampleQueued = false;\n let lastSample = null;\n let { promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers();\n let { promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers();\n let decoderIsFlushed = false;\n let ended = false;\n let terminated = false;\n // This stores errors that are \"out of band\" in the sense that they didn't occur in the normal flow of this\n // method but instead in a different context. This error should not go unnoticed and must be bubbled up to\n // the consumer.\n let outOfBandError = null;\n // The following is the \"pump\" process that keeps pumping packets into the decoder\n (async () => {\n const decoder = await this._createDecoder((sample) => {\n onQueueDequeue();\n if (sample.timestamp >= endTimestamp) {\n ended = true;\n }\n if (ended) {\n sample.close();\n return;\n }\n if (lastSample) {\n if (sample.timestamp > startTimestamp) {\n // We don't know ahead of time what the first first is. This is because the first first is the\n // last first whose timestamp is less than or equal to the start timestamp. Therefore we need to\n // wait for the first first after the start timestamp, and then we'll know that the previous\n // first was the first first.\n sampleQueue.push(lastSample);\n firstSampleQueued = true;\n }\n else {\n lastSample.close();\n }\n }\n if (sample.timestamp >= startTimestamp) {\n sampleQueue.push(sample);\n firstSampleQueued = true;\n }\n lastSample = firstSampleQueued ? null : sample;\n if (sampleQueue.length > 0) {\n onQueueNotEmpty();\n ({ promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers());\n }\n }, (error) => {\n if (!outOfBandError) {\n outOfBandError = error;\n onQueueNotEmpty();\n }\n });\n const packetSink = this._createPacketSink();\n const keyPacket = await packetSink.getKeyPacket(startTimestamp, { verifyKeyPackets: true })\n ?? await packetSink.getFirstPacket();\n let currentPacket = keyPacket;\n // B-frames make it exceedingly difficult to properly define an upper bound for packet iteration if an end\n // timestamp is set, so we just don't do it. The case that makes it especially tricky is when the frames\n // following a key frame have a lower timestamp than the keyframe; something that quite frequently happens\n // in HEVC streams. The price to pay for not upper-bounding the packet iterator is a slight increase in\n // decoder work at the end of the range, but the added correctness and reliability makes this tradeoff worth\n // it.\n const endPacket = undefined;\n const packets = packetSink.packets(keyPacket ?? undefined, endPacket);\n await packets.next(); // Skip the start packet as we already have it\n while (currentPacket && !ended && !this._track.input._disposed) {\n const maxQueueSize = computeMaxQueueSize(sampleQueue.length);\n if (sampleQueue.length + decoder.getDecodeQueueSize() > maxQueueSize) {\n ({ promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers());\n await queueDequeue;\n continue;\n }\n decoder.decode(currentPacket);\n const packetResult = await packets.next();\n if (packetResult.done) {\n break;\n }\n currentPacket = packetResult.value;\n }\n await packets.return();\n if (!terminated && !this._track.input._disposed) {\n await decoder.flush();\n }\n decoder.close();\n if (!firstSampleQueued && lastSample) {\n sampleQueue.push(lastSample);\n }\n decoderIsFlushed = true;\n onQueueNotEmpty(); // To unstuck the generator\n })().catch((error) => {\n if (!outOfBandError) {\n outOfBandError = error;\n onQueueNotEmpty();\n }\n });\n const track = this._track;\n const closeSamples = () => {\n lastSample?.close();\n for (const sample of sampleQueue) {\n sample.close();\n }\n };\n return {\n async next() {\n while (true) {\n if (track.input._disposed) {\n closeSamples();\n throw new InputDisposedError();\n }\n else if (terminated) {\n return { value: undefined, done: true };\n }\n else if (outOfBandError) {\n closeSamples();\n throw outOfBandError;\n }\n else if (sampleQueue.length > 0) {\n const value = sampleQueue.shift();\n onQueueDequeue();\n return { value, done: false };\n }\n else if (!decoderIsFlushed) {\n await queueNotEmpty;\n }\n else {\n return { value: undefined, done: true };\n }\n }\n },\n async return() {\n terminated = true;\n ended = true;\n onQueueDequeue();\n onQueueNotEmpty();\n closeSamples();\n return { value: undefined, done: true };\n },\n async throw(error) {\n throw error;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n }\n /** @internal */\n mediaSamplesAtTimestamps(timestamps) {\n validateAnyIterable(timestamps);\n const timestampIterator = toAsyncIterator(timestamps);\n const timestampsOfInterest = [];\n const sampleQueue = [];\n let { promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers();\n let { promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers();\n let decoderIsFlushed = false;\n let terminated = false;\n // This stores errors that are \"out of band\" in the sense that they didn't occur in the normal flow of this\n // method but instead in a different context. This error should not go unnoticed and must be bubbled up to\n // the consumer.\n let outOfBandError = null;\n const pushToQueue = (sample) => {\n sampleQueue.push(sample);\n onQueueNotEmpty();\n ({ promise: queueNotEmpty, resolve: onQueueNotEmpty } = promiseWithResolvers());\n };\n // The following is the \"pump\" process that keeps pumping packets into the decoder\n (async () => {\n const decoder = await this._createDecoder((sample) => {\n onQueueDequeue();\n if (terminated) {\n sample.close();\n return;\n }\n let sampleUses = 0;\n while (timestampsOfInterest.length > 0\n && sample.timestamp - timestampsOfInterest[0] > -1e-10 // Give it a little epsilon\n ) {\n sampleUses++;\n timestampsOfInterest.shift();\n }\n if (sampleUses > 0) {\n for (let i = 0; i < sampleUses; i++) {\n // Clone the sample if we need to emit it multiple times\n pushToQueue((i < sampleUses - 1 ? sample.clone() : sample));\n }\n }\n else {\n sample.close();\n }\n }, (error) => {\n if (!outOfBandError) {\n outOfBandError = error;\n onQueueNotEmpty();\n }\n });\n const packetSink = this._createPacketSink();\n let lastPacket = null;\n let lastKeyPacket = null;\n // The end sequence number (inclusive) in the next batch of packets that will be decoded. The batch starts\n // at the last key frame and goes until this sequence number.\n let maxSequenceNumber = -1;\n const decodePackets = async () => {\n assert(lastKeyPacket);\n // Start at the current key packet\n let currentPacket = lastKeyPacket;\n decoder.decode(currentPacket);\n while (currentPacket.sequenceNumber < maxSequenceNumber) {\n const maxQueueSize = computeMaxQueueSize(sampleQueue.length);\n while (sampleQueue.length + decoder.getDecodeQueueSize() > maxQueueSize && !terminated) {\n ({ promise: queueDequeue, resolve: onQueueDequeue } = promiseWithResolvers());\n await queueDequeue;\n }\n if (terminated) {\n break;\n }\n const nextPacket = await packetSink.getNextPacket(currentPacket);\n assert(nextPacket);\n decoder.decode(nextPacket);\n currentPacket = nextPacket;\n }\n maxSequenceNumber = -1;\n };\n const flushDecoder = async () => {\n await decoder.flush();\n // We don't expect this list to have any elements in it anymore, but in case it does, let's emit\n // nulls for every remaining element, then clear it.\n for (let i = 0; i < timestampsOfInterest.length; i++) {\n pushToQueue(null);\n }\n timestampsOfInterest.length = 0;\n };\n for await (const timestamp of timestampIterator) {\n validateTimestamp(timestamp);\n if (terminated || this._track.input._disposed) {\n break;\n }\n const targetPacket = await packetSink.getPacket(timestamp);\n const keyPacket = targetPacket && await packetSink.getKeyPacket(timestamp, { verifyKeyPackets: true });\n if (!keyPacket) {\n if (maxSequenceNumber !== -1) {\n await decodePackets();\n await flushDecoder();\n }\n pushToQueue(null);\n lastPacket = null;\n continue;\n }\n // Check if the key packet has changed or if we're going back in time\n if (lastPacket\n && (keyPacket.sequenceNumber !== lastKeyPacket.sequenceNumber\n || targetPacket.timestamp < lastPacket.timestamp)) {\n await decodePackets();\n await flushDecoder(); // Always flush here, improves decoder compatibility\n }\n timestampsOfInterest.push(targetPacket.timestamp);\n maxSequenceNumber = Math.max(targetPacket.sequenceNumber, maxSequenceNumber);\n lastPacket = targetPacket;\n lastKeyPacket = keyPacket;\n }\n if (!terminated && !this._track.input._disposed) {\n if (maxSequenceNumber !== -1) {\n // We still need to decode packets\n await decodePackets();\n }\n await flushDecoder();\n }\n decoder.close();\n decoderIsFlushed = true;\n onQueueNotEmpty(); // To unstuck the generator\n })().catch((error) => {\n if (!outOfBandError) {\n outOfBandError = error;\n onQueueNotEmpty();\n }\n });\n const track = this._track;\n const closeSamples = () => {\n for (const sample of sampleQueue) {\n sample?.close();\n }\n };\n return {\n async next() {\n while (true) {\n if (track.input._disposed) {\n closeSamples();\n throw new InputDisposedError();\n }\n else if (terminated) {\n return { value: undefined, done: true };\n }\n else if (outOfBandError) {\n closeSamples();\n throw outOfBandError;\n }\n else if (sampleQueue.length > 0) {\n const value = sampleQueue.shift();\n assert(value !== undefined);\n onQueueDequeue();\n return { value, done: false };\n }\n else if (!decoderIsFlushed) {\n await queueNotEmpty;\n }\n else {\n return { value: undefined, done: true };\n }\n }\n },\n async return() {\n terminated = true;\n onQueueDequeue();\n onQueueNotEmpty();\n closeSamples();\n return { value: undefined, done: true };\n },\n async throw(error) {\n throw error;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n }\n}\nconst computeMaxQueueSize = (decodedSampleQueueSize) => {\n // If we have decoded samples lying around, limit the total queue size to a small value (decoded samples can use up\n // a lot of memory). If not, we're fine with a much bigger queue of encoded packets waiting to be decoded. In fact,\n // some decoders only start flushing out decoded chunks when the packet queue is large enough.\n return decodedSampleQueueSize === 0 ? 40 : 8;\n};\nclass VideoDecoderWrapper extends DecoderWrapper {\n constructor(onSample, onError, codec, decoderConfig, rotation, timeResolution) {\n super(onSample, onError);\n this.codec = codec;\n this.decoderConfig = decoderConfig;\n this.rotation = rotation;\n this.timeResolution = timeResolution;\n this.decoder = null;\n this.customDecoder = null;\n this.customDecoderCallSerializer = new CallSerializer();\n this.customDecoderQueueSize = 0;\n this.inputTimestamps = []; // Timestamps input into the decoder, sorted.\n this.sampleQueue = []; // Safari-specific thing, check usage.\n this.currentPacketIndex = 0;\n this.raslSkipped = false; // For HEVC stuff\n // Alpha stuff\n this.alphaDecoder = null;\n this.alphaHadKeyframe = false;\n this.colorQueue = [];\n this.alphaQueue = [];\n this.merger = null;\n this.mergerCreationFailed = false;\n this.decodedAlphaChunkCount = 0;\n this.alphaDecoderQueueSize = 0;\n /** Each value is the number of decoded alpha chunks at which a null alpha frame should be added. */\n this.nullAlphaFrameQueue = [];\n this.currentAlphaPacketIndex = 0;\n this.alphaRaslSkipped = false; // For HEVC stuff\n const MatchingCustomDecoder = customVideoDecoders.find(x => x.supports(codec, decoderConfig));\n if (MatchingCustomDecoder) {\n // @ts-expect-error \"Can't create instance of abstract class 🤓\"\n this.customDecoder = new MatchingCustomDecoder();\n // @ts-expect-error It's technically readonly\n this.customDecoder.codec = codec;\n // @ts-expect-error It's technically readonly\n this.customDecoder.config = decoderConfig;\n // @ts-expect-error It's technically readonly\n this.customDecoder.onSample = (sample) => {\n if (!(sample instanceof VideoSample)) {\n throw new TypeError('The argument passed to onSample must be a VideoSample.');\n }\n this.finalizeAndEmitSample(sample);\n };\n void this.customDecoderCallSerializer.call(() => this.customDecoder.init());\n }\n else {\n const colorHandler = (frame) => {\n if (this.alphaQueue.length > 0) {\n // Even when no alpha data is present (most of the time), there will be nulls in this queue\n const alphaFrame = this.alphaQueue.shift();\n assert(alphaFrame !== undefined);\n this.mergeAlpha(frame, alphaFrame);\n }\n else {\n this.colorQueue.push(frame);\n }\n };\n if (codec === 'avc' && this.decoderConfig.description && isChromium()) {\n // Chromium has/had a bug with playing interlaced AVC (https://issues.chromium.org/issues/456919096)\n // which can be worked around by requesting that software decoding be used. So, here we peek into the\n // AVC description, if present, and switch to software decoding if we find interlaced content.\n const record = deserializeAvcDecoderConfigurationRecord(toUint8Array(this.decoderConfig.description));\n if (record && record.sequenceParameterSets.length > 0) {\n const sps = parseAvcSps(record.sequenceParameterSets[0]);\n if (sps && sps.frameMbsOnlyFlag === 0) {\n this.decoderConfig = {\n ...this.decoderConfig,\n hardwareAcceleration: 'prefer-software',\n };\n }\n }\n }\n const stack = new Error('Decoding error').stack;\n this.decoder = new VideoDecoder({\n output: (frame) => {\n try {\n colorHandler(frame);\n }\n catch (error) {\n this.onError(error);\n }\n },\n error: (error) => {\n error.stack = stack; // Provide a more useful stack trace, the default one sucks\n this.onError(error);\n },\n });\n this.decoder.configure(this.decoderConfig);\n }\n }\n getDecodeQueueSize() {\n if (this.customDecoder) {\n return this.customDecoderQueueSize;\n }\n else {\n assert(this.decoder);\n return Math.max(this.decoder.decodeQueueSize, this.alphaDecoder?.decodeQueueSize ?? 0);\n }\n }\n decode(packet) {\n if (this.codec === 'hevc' && this.currentPacketIndex > 0 && !this.raslSkipped) {\n if (this.hasHevcRaslPicture(packet.data)) {\n return; // Drop\n }\n this.raslSkipped = true;\n }\n if (this.customDecoder) {\n this.customDecoderQueueSize++;\n void this.customDecoderCallSerializer\n .call(() => this.customDecoder.decode(packet))\n .then(() => this.customDecoderQueueSize--);\n }\n else {\n assert(this.decoder);\n if (!isWebKit()) {\n insertSorted(this.inputTimestamps, packet.timestamp, x => x);\n }\n // Workaround for https://issues.chromium.org/issues/470109459\n if (isChromium() && this.currentPacketIndex === 0 && this.codec === 'avc') {\n const filteredNalUnits = [];\n for (const loc of iterateAvcNalUnits(packet.data, this.decoderConfig)) {\n const type = extractNalUnitTypeForAvc(packet.data[loc.offset]);\n // These trip up Chromium's key frame detection, so let's strip them\n if (!(type >= 20 && type <= 31)) {\n filteredNalUnits.push(packet.data.subarray(loc.offset, loc.offset + loc.length));\n }\n }\n const newData = concatAvcNalUnits(filteredNalUnits, this.decoderConfig);\n packet = new EncodedPacket(newData, packet.type, packet.timestamp, packet.duration);\n }\n this.decoder.decode(packet.toEncodedVideoChunk());\n this.decodeAlphaData(packet);\n }\n this.currentPacketIndex++;\n }\n decodeAlphaData(packet) {\n if (!packet.sideData.alpha || this.mergerCreationFailed) {\n // No alpha side data in the packet, most common case\n this.pushNullAlphaFrame();\n return;\n }\n if (!this.merger) {\n try {\n this.merger = new ColorAlphaMerger();\n }\n catch (error) {\n console.error('Due to an error, only color data will be decoded.', error);\n this.mergerCreationFailed = true;\n this.decodeAlphaData(packet); // Go again\n return;\n }\n }\n // Check if we need to set up the alpha decoder\n if (!this.alphaDecoder) {\n const alphaHandler = (frame) => {\n this.alphaDecoderQueueSize--;\n if (this.colorQueue.length > 0) {\n const colorFrame = this.colorQueue.shift();\n assert(colorFrame !== undefined);\n this.mergeAlpha(colorFrame, frame);\n }\n else {\n this.alphaQueue.push(frame);\n }\n // Check if any null frames have been queued for this point\n this.decodedAlphaChunkCount++;\n while (this.nullAlphaFrameQueue.length > 0\n && this.nullAlphaFrameQueue[0] === this.decodedAlphaChunkCount) {\n this.nullAlphaFrameQueue.shift();\n if (this.colorQueue.length > 0) {\n const colorFrame = this.colorQueue.shift();\n assert(colorFrame !== undefined);\n this.mergeAlpha(colorFrame, null);\n }\n else {\n this.alphaQueue.push(null);\n }\n }\n };\n const stack = new Error('Decoding error').stack;\n this.alphaDecoder = new VideoDecoder({\n output: (frame) => {\n try {\n alphaHandler(frame);\n }\n catch (error) {\n this.onError(error);\n }\n },\n error: (error) => {\n error.stack = stack; // Provide a more useful stack trace, the default one sucks\n this.onError(error);\n },\n });\n this.alphaDecoder.configure(this.decoderConfig);\n }\n const type = determineVideoPacketType(this.codec, this.decoderConfig, packet.sideData.alpha);\n // Alpha packets might follow a different key frame rhythm than the main packets. Therefore, before we start\n // decoding, we must first find a packet that's actually a key frame. Until then, we treat the image as opaque.\n if (!this.alphaHadKeyframe) {\n this.alphaHadKeyframe = type === 'key';\n }\n if (this.alphaHadKeyframe) {\n // Same RASL skipping logic as for color, unlikely to be hit (since who uses HEVC with separate alpha??) but\n // here for symmetry.\n if (this.codec === 'hevc' && this.currentAlphaPacketIndex > 0 && !this.alphaRaslSkipped) {\n if (this.hasHevcRaslPicture(packet.sideData.alpha)) {\n this.pushNullAlphaFrame();\n return;\n }\n this.alphaRaslSkipped = true;\n }\n this.currentAlphaPacketIndex++;\n this.alphaDecoder.decode(packet.alphaToEncodedVideoChunk(type ?? packet.type));\n this.alphaDecoderQueueSize++;\n }\n else {\n this.pushNullAlphaFrame();\n }\n }\n pushNullAlphaFrame() {\n if (this.alphaDecoderQueueSize === 0) {\n // Easy\n this.alphaQueue.push(null);\n }\n else {\n // There are still alpha chunks being decoded, so pushing `null` immediately would result in out-of-order\n // data and be incorrect. Instead, we need to enqueue a \"null frame\" for when the current decoder workload\n // has finished.\n this.nullAlphaFrameQueue.push(this.decodedAlphaChunkCount + this.alphaDecoderQueueSize);\n }\n }\n /**\n * If we're using HEVC, we need to make sure to skip any RASL slices that follow a non-IDR key frame such as\n * CRA_NUT. This is because RASL slices cannot be decoded without data before the CRA_NUT. Browsers behave\n * differently here: Chromium drops the packets, Safari throws a decoder error. Either way, it's not good\n * and causes bugs upstream. So, let's take the dropping into our own hands.\n */\n hasHevcRaslPicture(packetData) {\n for (const loc of iterateHevcNalUnits(packetData, this.decoderConfig)) {\n const type = extractNalUnitTypeForHevc(packetData[loc.offset]);\n if (type === HevcNalUnitType.RASL_N || type === HevcNalUnitType.RASL_R) {\n return true;\n }\n }\n return false;\n }\n /** Handler for the WebCodecs VideoDecoder for ironing out browser differences. */\n sampleHandler(sample) {\n if (isWebKit()) {\n // For correct B-frame handling, we don't just hand over the frames directly but instead add them to\n // a queue, because we want to ensure frames are emitted in presentation order. We flush the queue\n // each time we receive a frame with a timestamp larger than the highest we've seen so far, as we\n // can sure that is not a B-frame. Typically, WebCodecs automatically guarantees that frames are\n // emitted in presentation order, but Safari doesn't always follow this rule.\n if (this.sampleQueue.length > 0 && (sample.timestamp >= last(this.sampleQueue).timestamp)) {\n for (const sample of this.sampleQueue) {\n this.finalizeAndEmitSample(sample);\n }\n this.sampleQueue.length = 0;\n }\n insertSorted(this.sampleQueue, sample, x => x.timestamp);\n }\n else {\n // Assign it the next earliest timestamp from the input. We do this because browsers, by spec, are\n // required to emit decoded frames in presentation order *while* retaining the timestamp of their\n // originating EncodedVideoChunk. For files with B-frames but no out-of-order timestamps (like a\n // missing ctts box, for example), this causes a mismatch. We therefore fix the timestamps and\n // ensure they are sorted by doing this.\n const timestamp = this.inputTimestamps.shift();\n // There's no way we'd have more decoded frames than encoded packets we passed in. Actually, the\n // correspondence should be 1:1.\n assert(timestamp !== undefined);\n sample.setTimestamp(timestamp);\n this.finalizeAndEmitSample(sample);\n }\n }\n finalizeAndEmitSample(sample) {\n // Round the timestamps to the time resolution\n sample.setTimestamp(Math.round(sample.timestamp * this.timeResolution) / this.timeResolution);\n sample.setDuration(Math.round(sample.duration * this.timeResolution) / this.timeResolution);\n sample.setRotation(this.rotation);\n this.onSample(sample);\n }\n mergeAlpha(color, alpha) {\n if (!alpha) {\n // Nothing needs to be merged\n const finalSample = new VideoSample(color);\n this.sampleHandler(finalSample);\n return;\n }\n assert(this.merger);\n this.merger.update(color, alpha);\n color.close();\n alpha.close();\n const finalFrame = new VideoFrame(this.merger.canvas, {\n timestamp: color.timestamp,\n duration: color.duration ?? undefined,\n });\n const finalSample = new VideoSample(finalFrame);\n this.sampleHandler(finalSample);\n }\n async flush() {\n if (this.customDecoder) {\n await this.customDecoderCallSerializer.call(() => this.customDecoder.flush());\n }\n else {\n assert(this.decoder);\n await Promise.all([\n this.decoder.flush(),\n this.alphaDecoder?.flush(),\n ]);\n this.colorQueue.forEach(x => x.close());\n this.colorQueue.length = 0;\n this.alphaQueue.forEach(x => x?.close());\n this.alphaQueue.length = 0;\n this.alphaHadKeyframe = false;\n this.decodedAlphaChunkCount = 0;\n this.alphaDecoderQueueSize = 0;\n this.nullAlphaFrameQueue.length = 0;\n this.currentAlphaPacketIndex = 0;\n this.alphaRaslSkipped = false;\n }\n if (isWebKit()) {\n for (const sample of this.sampleQueue) {\n this.finalizeAndEmitSample(sample);\n }\n this.sampleQueue.length = 0;\n }\n this.currentPacketIndex = 0;\n this.raslSkipped = false;\n }\n close() {\n if (this.customDecoder) {\n void this.customDecoderCallSerializer.call(() => this.customDecoder.close());\n }\n else {\n assert(this.decoder);\n this.decoder.close();\n this.alphaDecoder?.close();\n this.colorQueue.forEach(x => x.close());\n this.colorQueue.length = 0;\n this.alphaQueue.forEach(x => x?.close());\n this.alphaQueue.length = 0;\n this.merger?.close();\n }\n for (const sample of this.sampleQueue) {\n sample.close();\n }\n this.sampleQueue.length = 0;\n }\n}\n/** Utility class that merges together color and alpha information using simple WebGL 2 shaders. */\nclass ColorAlphaMerger {\n constructor() {\n // Canvas will be resized later\n if (typeof OffscreenCanvas !== 'undefined') {\n // Prefer OffscreenCanvas for Worker environments\n this.canvas = new OffscreenCanvas(300, 150);\n }\n else {\n this.canvas = document.createElement('canvas');\n }\n const gl = this.canvas.getContext('webgl2', {\n premultipliedAlpha: false,\n }); // Casting because of some TypeScript weirdness\n if (!gl) {\n throw new Error('Couldn\\'t acquire WebGL 2 context.');\n }\n this.gl = gl;\n this.program = this.createProgram();\n this.vao = this.createVAO();\n this.colorTexture = this.createTexture();\n this.alphaTexture = this.createTexture();\n this.gl.useProgram(this.program);\n this.gl.uniform1i(this.gl.getUniformLocation(this.program, 'u_colorTexture'), 0);\n this.gl.uniform1i(this.gl.getUniformLocation(this.program, 'u_alphaTexture'), 1);\n }\n createProgram() {\n const vertexShader = this.createShader(this.gl.VERTEX_SHADER, `#version 300 es\n\t\t\tin vec2 a_position;\n\t\t\tin vec2 a_texCoord;\n\t\t\tout vec2 v_texCoord;\n\t\t\t\n\t\t\tvoid main() {\n\t\t\t\tgl_Position = vec4(a_position, 0.0, 1.0);\n\t\t\t\tv_texCoord = a_texCoord;\n\t\t\t}\n\t\t`);\n const fragmentShader = this.createShader(this.gl.FRAGMENT_SHADER, `#version 300 es\n\t\t\tprecision highp float;\n\t\t\t\n\t\t\tuniform sampler2D u_colorTexture;\n\t\t\tuniform sampler2D u_alphaTexture;\n\t\t\tin vec2 v_texCoord;\n\t\t\tout vec4 fragColor;\n\t\t\t\n\t\t\tvoid main() {\n\t\t\t\tvec3 color = texture(u_colorTexture, v_texCoord).rgb;\n\t\t\t\tfloat alpha = texture(u_alphaTexture, v_texCoord).r;\n\t\t\t\tfragColor = vec4(color, alpha);\n\t\t\t}\n\t\t`);\n const program = this.gl.createProgram();\n this.gl.attachShader(program, vertexShader);\n this.gl.attachShader(program, fragmentShader);\n this.gl.linkProgram(program);\n return program;\n }\n createShader(type, source) {\n const shader = this.gl.createShader(type);\n this.gl.shaderSource(shader, source);\n this.gl.compileShader(shader);\n return shader;\n }\n createVAO() {\n const vao = this.gl.createVertexArray();\n this.gl.bindVertexArray(vao);\n const vertices = new Float32Array([\n -1, -1, 0, 1,\n 1, -1, 1, 1,\n -1, 1, 0, 0,\n 1, 1, 1, 0,\n ]);\n const buffer = this.gl.createBuffer();\n this.gl.bindBuffer(this.gl.ARRAY_BUFFER, buffer);\n this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);\n const positionLocation = this.gl.getAttribLocation(this.program, 'a_position');\n const texCoordLocation = this.gl.getAttribLocation(this.program, 'a_texCoord');\n this.gl.enableVertexAttribArray(positionLocation);\n this.gl.vertexAttribPointer(positionLocation, 2, this.gl.FLOAT, false, 16, 0);\n this.gl.enableVertexAttribArray(texCoordLocation);\n this.gl.vertexAttribPointer(texCoordLocation, 2, this.gl.FLOAT, false, 16, 8);\n return vao;\n }\n createTexture() {\n const texture = this.gl.createTexture();\n this.gl.bindTexture(this.gl.TEXTURE_2D, texture);\n this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);\n this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);\n this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);\n this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);\n return texture;\n }\n update(color, alpha) {\n if (color.displayWidth !== this.canvas.width || color.displayHeight !== this.canvas.height) {\n this.canvas.width = color.displayWidth;\n this.canvas.height = color.displayHeight;\n }\n this.gl.activeTexture(this.gl.TEXTURE0);\n this.gl.bindTexture(this.gl.TEXTURE_2D, this.colorTexture);\n this.gl.texImage2D(this.gl.TEXTURE_2D, 0, this.gl.RGBA, this.gl.RGBA, this.gl.UNSIGNED_BYTE, color);\n this.gl.activeTexture(this.gl.TEXTURE1);\n this.gl.bindTexture(this.gl.TEXTURE_2D, this.alphaTexture);\n this.gl.texImage2D(this.gl.TEXTURE_2D, 0, this.gl.RGBA, this.gl.RGBA, this.gl.UNSIGNED_BYTE, alpha);\n this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);\n this.gl.clear(this.gl.COLOR_BUFFER_BIT);\n this.gl.bindVertexArray(this.vao);\n this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);\n }\n close() {\n this.gl.getExtension('WEBGL_lose_context')?.loseContext();\n this.gl = null;\n }\n}\n/**\n * A sink that retrieves decoded video samples (video frames) from a video track.\n * @group Media sinks\n * @public\n */\nexport class VideoSampleSink extends BaseMediaSampleSink {\n /** Creates a new {@link VideoSampleSink} for the given {@link InputVideoTrack}. */\n constructor(videoTrack) {\n if (!(videoTrack instanceof InputVideoTrack)) {\n throw new TypeError('videoTrack must be an InputVideoTrack.');\n }\n super();\n this._track = videoTrack;\n }\n /** @internal */\n async _createDecoder(onSample, onError) {\n if (!(await this._track.canDecode())) {\n throw new Error('This video track cannot be decoded by this browser. Make sure to check decodability before using'\n + ' a track.');\n }\n const codec = this._track.codec;\n const rotation = this._track.rotation;\n const decoderConfig = await this._track.getDecoderConfig();\n const timeResolution = this._track.timeResolution;\n assert(codec && decoderConfig);\n return new VideoDecoderWrapper(onSample, onError, codec, decoderConfig, rotation, timeResolution);\n }\n /** @internal */\n _createPacketSink() {\n return new EncodedPacketSink(this._track);\n }\n /**\n * Retrieves the video sample (frame) corresponding to the given timestamp, in seconds. More specifically, returns\n * the last video sample (in presentation order) with a start timestamp less than or equal to the given timestamp.\n * Returns null if the timestamp is before the track's first timestamp.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n async getSample(timestamp) {\n validateTimestamp(timestamp);\n for await (const sample of this.mediaSamplesAtTimestamps([timestamp])) {\n return sample;\n }\n throw new Error('Internal error: Iterator returned nothing.');\n }\n /**\n * Creates an async iterator that yields the video samples (frames) of this track in presentation order. This method\n * will intelligently pre-decode a few frames ahead to enable fast iteration.\n *\n * @param startTimestamp - The timestamp in seconds at which to start yielding samples (inclusive).\n * @param endTimestamp - The timestamp in seconds at which to stop yielding samples (exclusive).\n */\n samples(startTimestamp = 0, endTimestamp = Infinity) {\n return this.mediaSamplesInRange(startTimestamp, endTimestamp);\n }\n /**\n * Creates an async iterator that yields a video sample (frame) for each timestamp in the argument. This method\n * uses an optimized decoding pipeline if these timestamps are monotonically sorted, decoding each packet at most\n * once, and is therefore more efficient than manually getting the sample for every timestamp. The iterator may\n * yield null if no frame is available for a given timestamp.\n *\n * @param timestamps - An iterable or async iterable of timestamps in seconds.\n */\n samplesAtTimestamps(timestamps) {\n return this.mediaSamplesAtTimestamps(timestamps);\n }\n}\n/**\n * A sink that renders video samples (frames) of the given video track to canvases. This is often more useful than\n * directly retrieving frames, as it comes with common preprocessing steps such as resizing or applying rotation\n * metadata.\n *\n * This sink will yield `HTMLCanvasElement`s when in a DOM context, and `OffscreenCanvas`es otherwise.\n *\n * @group Media sinks\n * @public\n */\nexport class CanvasSink {\n /** Creates a new {@link CanvasSink} for the given {@link InputVideoTrack}. */\n constructor(videoTrack, options = {}) {\n /** @internal */\n this._nextCanvasIndex = 0;\n if (!(videoTrack instanceof InputVideoTrack)) {\n throw new TypeError('videoTrack must be an InputVideoTrack.');\n }\n if (options && typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.alpha !== undefined && typeof options.alpha !== 'boolean') {\n throw new TypeError('options.alpha, when provided, must be a boolean.');\n }\n if (options.width !== undefined && (!Number.isInteger(options.width) || options.width <= 0)) {\n throw new TypeError('options.width, when defined, must be a positive integer.');\n }\n if (options.height !== undefined && (!Number.isInteger(options.height) || options.height <= 0)) {\n throw new TypeError('options.height, when defined, must be a positive integer.');\n }\n if (options.fit !== undefined && !['fill', 'contain', 'cover'].includes(options.fit)) {\n throw new TypeError('options.fit, when provided, must be one of \"fill\", \"contain\", or \"cover\".');\n }\n if (options.width !== undefined\n && options.height !== undefined\n && options.fit === undefined) {\n throw new TypeError('When both options.width and options.height are provided, options.fit must also be provided.');\n }\n if (options.rotation !== undefined && ![0, 90, 180, 270].includes(options.rotation)) {\n throw new TypeError('options.rotation, when provided, must be 0, 90, 180 or 270.');\n }\n if (options.crop !== undefined) {\n validateCropRectangle(options.crop, 'options.');\n }\n if (options.poolSize !== undefined\n && (typeof options.poolSize !== 'number' || !Number.isInteger(options.poolSize) || options.poolSize < 0)) {\n throw new TypeError('poolSize must be a non-negative integer.');\n }\n const rotation = options.rotation ?? videoTrack.rotation;\n const [rotatedWidth, rotatedHeight] = rotation % 180 === 0\n ? [videoTrack.squarePixelWidth, videoTrack.squarePixelHeight]\n : [videoTrack.squarePixelHeight, videoTrack.squarePixelWidth];\n const crop = options.crop;\n if (crop) {\n clampCropRectangle(crop, rotatedWidth, rotatedHeight);\n }\n let [width, height] = crop\n ? [crop.width, crop.height]\n : [rotatedWidth, rotatedHeight];\n const originalAspectRatio = width / height;\n // If width and height aren't defined together, deduce the missing value using the aspect ratio\n if (options.width !== undefined && options.height === undefined) {\n width = options.width;\n height = Math.round(width / originalAspectRatio);\n }\n else if (options.width === undefined && options.height !== undefined) {\n height = options.height;\n width = Math.round(height * originalAspectRatio);\n }\n else if (options.width !== undefined && options.height !== undefined) {\n width = options.width;\n height = options.height;\n }\n this._videoTrack = videoTrack;\n this._alpha = options.alpha ?? false;\n this._width = width;\n this._height = height;\n this._rotation = rotation;\n this._crop = crop;\n this._fit = options.fit ?? 'fill';\n this._videoSampleSink = new VideoSampleSink(videoTrack);\n this._canvasPool = Array.from({ length: options.poolSize ?? 0 }, () => null);\n }\n /** @internal */\n _videoSampleToWrappedCanvas(sample) {\n let canvas = this._canvasPool[this._nextCanvasIndex];\n let canvasIsNew = false;\n if (!canvas) {\n if (typeof document !== 'undefined') {\n // Prefer an HTMLCanvasElement\n canvas = document.createElement('canvas');\n canvas.width = this._width;\n canvas.height = this._height;\n }\n else {\n canvas = new OffscreenCanvas(this._width, this._height);\n }\n if (this._canvasPool.length > 0) {\n this._canvasPool[this._nextCanvasIndex] = canvas;\n }\n canvasIsNew = true;\n }\n if (this._canvasPool.length > 0) {\n this._nextCanvasIndex = (this._nextCanvasIndex + 1) % this._canvasPool.length;\n }\n const context = canvas.getContext('2d', {\n alpha: this._alpha || isFirefox(), // Firefox has VideoFrame glitches with opaque canvases\n });\n assert(context);\n context.resetTransform();\n if (!canvasIsNew) {\n if (!this._alpha && isFirefox()) {\n context.fillStyle = 'black';\n context.fillRect(0, 0, this._width, this._height);\n }\n else {\n context.clearRect(0, 0, this._width, this._height);\n }\n }\n sample.drawWithFit(context, {\n fit: this._fit,\n rotation: this._rotation,\n crop: this._crop,\n });\n const result = {\n canvas,\n timestamp: sample.timestamp,\n duration: sample.duration,\n };\n sample.close();\n return result;\n }\n /**\n * Retrieves a canvas with the video frame corresponding to the given timestamp, in seconds. More specifically,\n * returns the last video frame (in presentation order) with a start timestamp less than or equal to the given\n * timestamp. Returns null if the timestamp is before the track's first timestamp.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n async getCanvas(timestamp) {\n validateTimestamp(timestamp);\n const sample = await this._videoSampleSink.getSample(timestamp);\n return sample && this._videoSampleToWrappedCanvas(sample);\n }\n /**\n * Creates an async iterator that yields canvases with the video frames of this track in presentation order. This\n * method will intelligently pre-decode a few frames ahead to enable fast iteration.\n *\n * @param startTimestamp - The timestamp in seconds at which to start yielding canvases (inclusive).\n * @param endTimestamp - The timestamp in seconds at which to stop yielding canvases (exclusive).\n */\n canvases(startTimestamp = 0, endTimestamp = Infinity) {\n return mapAsyncGenerator(this._videoSampleSink.samples(startTimestamp, endTimestamp), sample => this._videoSampleToWrappedCanvas(sample));\n }\n /**\n * Creates an async iterator that yields a canvas for each timestamp in the argument. This method uses an optimized\n * decoding pipeline if these timestamps are monotonically sorted, decoding each packet at most once, and is\n * therefore more efficient than manually getting the canvas for every timestamp. The iterator may yield null if\n * no frame is available for a given timestamp.\n *\n * @param timestamps - An iterable or async iterable of timestamps in seconds.\n */\n canvasesAtTimestamps(timestamps) {\n return mapAsyncGenerator(this._videoSampleSink.samplesAtTimestamps(timestamps), sample => sample && this._videoSampleToWrappedCanvas(sample));\n }\n}\nclass AudioDecoderWrapper extends DecoderWrapper {\n constructor(onSample, onError, codec, decoderConfig) {\n super(onSample, onError);\n this.decoder = null;\n this.customDecoder = null;\n this.customDecoderCallSerializer = new CallSerializer();\n this.customDecoderQueueSize = 0;\n // Internal state to accumulate a precise current timestamp based on audio durations, not the (potentially\n // inaccurate) packet timestamps.\n this.currentTimestamp = null;\n const sampleHandler = (sample) => {\n if (this.currentTimestamp === null\n || Math.abs(sample.timestamp - this.currentTimestamp) >= sample.duration) {\n // We need to sync with the sample timestamp again\n this.currentTimestamp = sample.timestamp;\n }\n const preciseTimestamp = this.currentTimestamp;\n this.currentTimestamp += sample.duration;\n if (sample.numberOfFrames === 0) {\n // We skip zero-data (empty) AudioSamples. These are sometimes emitted, for example, by Firefox when it\n // decodes Vorbis (at the start).\n sample.close();\n return;\n }\n // Round the timestamp to the sample rate\n const sampleRate = decoderConfig.sampleRate;\n sample.setTimestamp(Math.round(preciseTimestamp * sampleRate) / sampleRate);\n onSample(sample);\n };\n const MatchingCustomDecoder = customAudioDecoders.find(x => x.supports(codec, decoderConfig));\n if (MatchingCustomDecoder) {\n // @ts-expect-error \"Can't create instance of abstract class 🤓\"\n this.customDecoder = new MatchingCustomDecoder();\n // @ts-expect-error It's technically readonly\n this.customDecoder.codec = codec;\n // @ts-expect-error It's technically readonly\n this.customDecoder.config = decoderConfig;\n // @ts-expect-error It's technically readonly\n this.customDecoder.onSample = (sample) => {\n if (!(sample instanceof AudioSample)) {\n throw new TypeError('The argument passed to onSample must be an AudioSample.');\n }\n sampleHandler(sample);\n };\n void this.customDecoderCallSerializer.call(() => this.customDecoder.init());\n }\n else {\n const stack = new Error('Decoding error').stack;\n this.decoder = new AudioDecoder({\n output: (data) => {\n try {\n sampleHandler(new AudioSample(data));\n }\n catch (error) {\n this.onError(error);\n }\n },\n error: (error) => {\n error.stack = stack; // Provide a more useful stack trace, the default one sucks\n this.onError(error);\n },\n });\n this.decoder.configure(decoderConfig);\n }\n }\n getDecodeQueueSize() {\n if (this.customDecoder) {\n return this.customDecoderQueueSize;\n }\n else {\n assert(this.decoder);\n return this.decoder.decodeQueueSize;\n }\n }\n decode(packet) {\n if (this.customDecoder) {\n this.customDecoderQueueSize++;\n void this.customDecoderCallSerializer\n .call(() => this.customDecoder.decode(packet))\n .then(() => this.customDecoderQueueSize--);\n }\n else {\n assert(this.decoder);\n this.decoder.decode(packet.toEncodedAudioChunk());\n }\n }\n flush() {\n if (this.customDecoder) {\n return this.customDecoderCallSerializer.call(() => this.customDecoder.flush());\n }\n else {\n assert(this.decoder);\n return this.decoder.flush();\n }\n }\n close() {\n if (this.customDecoder) {\n void this.customDecoderCallSerializer.call(() => this.customDecoder.close());\n }\n else {\n assert(this.decoder);\n this.decoder.close();\n }\n }\n}\n// There are a lot of PCM variants not natively supported by the browser and by AudioData. Therefore we need a simple\n// decoder that maps any input PCM format into a PCM format supported by the browser.\nclass PcmAudioDecoderWrapper extends DecoderWrapper {\n constructor(onSample, onError, decoderConfig) {\n super(onSample, onError);\n this.decoderConfig = decoderConfig;\n // Internal state to accumulate a precise current timestamp based on audio durations, not the (potentially\n // inaccurate) packet timestamps.\n this.currentTimestamp = null;\n assert(PCM_AUDIO_CODECS.includes(decoderConfig.codec));\n this.codec = decoderConfig.codec;\n const { dataType, sampleSize, littleEndian } = parsePcmCodec(this.codec);\n this.inputSampleSize = sampleSize;\n switch (sampleSize) {\n case 1:\n {\n if (dataType === 'unsigned') {\n this.readInputValue = (view, byteOffset) => view.getUint8(byteOffset) - 2 ** 7;\n }\n else if (dataType === 'signed') {\n this.readInputValue = (view, byteOffset) => view.getInt8(byteOffset);\n }\n else if (dataType === 'ulaw') {\n this.readInputValue = (view, byteOffset) => fromUlaw(view.getUint8(byteOffset));\n }\n else if (dataType === 'alaw') {\n this.readInputValue = (view, byteOffset) => fromAlaw(view.getUint8(byteOffset));\n }\n else {\n assert(false);\n }\n }\n ;\n break;\n case 2:\n {\n if (dataType === 'unsigned') {\n this.readInputValue = (view, byteOffset) => view.getUint16(byteOffset, littleEndian) - 2 ** 15;\n }\n else if (dataType === 'signed') {\n this.readInputValue = (view, byteOffset) => view.getInt16(byteOffset, littleEndian);\n }\n else {\n assert(false);\n }\n }\n ;\n break;\n case 3:\n {\n if (dataType === 'unsigned') {\n this.readInputValue = (view, byteOffset) => getUint24(view, byteOffset, littleEndian) - 2 ** 23;\n }\n else if (dataType === 'signed') {\n this.readInputValue = (view, byteOffset) => getInt24(view, byteOffset, littleEndian);\n }\n else {\n assert(false);\n }\n }\n ;\n break;\n case 4:\n {\n if (dataType === 'unsigned') {\n this.readInputValue = (view, byteOffset) => view.getUint32(byteOffset, littleEndian) - 2 ** 31;\n }\n else if (dataType === 'signed') {\n this.readInputValue = (view, byteOffset) => view.getInt32(byteOffset, littleEndian);\n }\n else if (dataType === 'float') {\n this.readInputValue = (view, byteOffset) => view.getFloat32(byteOffset, littleEndian);\n }\n else {\n assert(false);\n }\n }\n ;\n break;\n case 8:\n {\n if (dataType === 'float') {\n this.readInputValue = (view, byteOffset) => view.getFloat64(byteOffset, littleEndian);\n }\n else {\n assert(false);\n }\n }\n ;\n break;\n default:\n {\n assertNever(sampleSize);\n assert(false);\n }\n ;\n }\n switch (sampleSize) {\n case 1:\n {\n if (dataType === 'ulaw' || dataType === 'alaw') {\n this.outputSampleSize = 2;\n this.outputFormat = 's16';\n this.writeOutputValue = (view, byteOffset, value) => view.setInt16(byteOffset, value, true);\n }\n else {\n this.outputSampleSize = 1;\n this.outputFormat = 'u8';\n this.writeOutputValue = (view, byteOffset, value) => view.setUint8(byteOffset, value + 2 ** 7);\n }\n }\n ;\n break;\n case 2:\n {\n this.outputSampleSize = 2;\n this.outputFormat = 's16';\n this.writeOutputValue = (view, byteOffset, value) => view.setInt16(byteOffset, value, true);\n }\n ;\n break;\n case 3:\n {\n this.outputSampleSize = 4;\n this.outputFormat = 's32';\n // From https://www.w3.org/TR/webcodecs:\n // AudioData containing 24-bit samples SHOULD store those samples in s32 or f32. When samples are\n // stored in s32, each sample MUST be left-shifted by 8 bits.\n this.writeOutputValue = (view, byteOffset, value) => view.setInt32(byteOffset, value << 8, true);\n }\n ;\n break;\n case 4:\n {\n this.outputSampleSize = 4;\n if (dataType === 'float') {\n this.outputFormat = 'f32';\n this.writeOutputValue = (view, byteOffset, value) => view.setFloat32(byteOffset, value, true);\n }\n else {\n this.outputFormat = 's32';\n this.writeOutputValue = (view, byteOffset, value) => view.setInt32(byteOffset, value, true);\n }\n }\n ;\n break;\n case 8:\n {\n this.outputSampleSize = 4;\n this.outputFormat = 'f32';\n this.writeOutputValue = (view, byteOffset, value) => view.setFloat32(byteOffset, value, true);\n }\n ;\n break;\n default:\n {\n assertNever(sampleSize);\n assert(false);\n }\n ;\n }\n ;\n }\n getDecodeQueueSize() {\n return 0;\n }\n decode(packet) {\n const inputView = toDataView(packet.data);\n const numberOfFrames = packet.byteLength / this.decoderConfig.numberOfChannels / this.inputSampleSize;\n const outputBufferSize = numberOfFrames * this.decoderConfig.numberOfChannels * this.outputSampleSize;\n const outputBuffer = new ArrayBuffer(outputBufferSize);\n const outputView = new DataView(outputBuffer);\n for (let i = 0; i < numberOfFrames * this.decoderConfig.numberOfChannels; i++) {\n const inputIndex = i * this.inputSampleSize;\n const outputIndex = i * this.outputSampleSize;\n const value = this.readInputValue(inputView, inputIndex);\n this.writeOutputValue(outputView, outputIndex, value);\n }\n const preciseDuration = numberOfFrames / this.decoderConfig.sampleRate;\n if (this.currentTimestamp === null || Math.abs(packet.timestamp - this.currentTimestamp) >= preciseDuration) {\n // We need to sync with the packet timestamp again\n this.currentTimestamp = packet.timestamp;\n }\n const preciseTimestamp = this.currentTimestamp;\n this.currentTimestamp += preciseDuration;\n const audioSample = new AudioSample({\n format: this.outputFormat,\n data: outputBuffer,\n numberOfChannels: this.decoderConfig.numberOfChannels,\n sampleRate: this.decoderConfig.sampleRate,\n numberOfFrames,\n timestamp: preciseTimestamp,\n });\n this.onSample(audioSample);\n }\n async flush() {\n // Do nothing\n }\n close() {\n // Do nothing\n }\n}\n/**\n * Sink for retrieving decoded audio samples from an audio track.\n * @group Media sinks\n * @public\n */\nexport class AudioSampleSink extends BaseMediaSampleSink {\n /** Creates a new {@link AudioSampleSink} for the given {@link InputAudioTrack}. */\n constructor(audioTrack) {\n if (!(audioTrack instanceof InputAudioTrack)) {\n throw new TypeError('audioTrack must be an InputAudioTrack.');\n }\n super();\n this._track = audioTrack;\n }\n /** @internal */\n async _createDecoder(onSample, onError) {\n if (!(await this._track.canDecode())) {\n throw new Error('This audio track cannot be decoded by this browser. Make sure to check decodability before using'\n + ' a track.');\n }\n const codec = this._track.codec;\n const decoderConfig = await this._track.getDecoderConfig();\n assert(codec && decoderConfig);\n if (PCM_AUDIO_CODECS.includes(decoderConfig.codec)) {\n return new PcmAudioDecoderWrapper(onSample, onError, decoderConfig);\n }\n else {\n return new AudioDecoderWrapper(onSample, onError, codec, decoderConfig);\n }\n }\n /** @internal */\n _createPacketSink() {\n return new EncodedPacketSink(this._track);\n }\n /**\n * Retrieves the audio sample corresponding to the given timestamp, in seconds. More specifically, returns\n * the last audio sample (in presentation order) with a start timestamp less than or equal to the given timestamp.\n * Returns null if the timestamp is before the track's first timestamp.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n async getSample(timestamp) {\n validateTimestamp(timestamp);\n for await (const sample of this.mediaSamplesAtTimestamps([timestamp])) {\n return sample;\n }\n throw new Error('Internal error: Iterator returned nothing.');\n }\n /**\n * Creates an async iterator that yields the audio samples of this track in presentation order. This method\n * will intelligently pre-decode a few samples ahead to enable fast iteration.\n *\n * @param startTimestamp - The timestamp in seconds at which to start yielding samples (inclusive).\n * @param endTimestamp - The timestamp in seconds at which to stop yielding samples (exclusive).\n */\n samples(startTimestamp = 0, endTimestamp = Infinity) {\n return this.mediaSamplesInRange(startTimestamp, endTimestamp);\n }\n /**\n * Creates an async iterator that yields an audio sample for each timestamp in the argument. This method\n * uses an optimized decoding pipeline if these timestamps are monotonically sorted, decoding each packet at most\n * once, and is therefore more efficient than manually getting the sample for every timestamp. The iterator may\n * yield null if no sample is available for a given timestamp.\n *\n * @param timestamps - An iterable or async iterable of timestamps in seconds.\n */\n samplesAtTimestamps(timestamps) {\n return this.mediaSamplesAtTimestamps(timestamps);\n }\n}\n/**\n * A sink that retrieves decoded audio samples from an audio track and converts them to `AudioBuffer` instances. This is\n * often more useful than directly retrieving audio samples, as audio buffers can be directly used with the\n * Web Audio API.\n * @group Media sinks\n * @public\n */\nexport class AudioBufferSink {\n /** Creates a new {@link AudioBufferSink} for the given {@link InputAudioTrack}. */\n constructor(audioTrack) {\n if (!(audioTrack instanceof InputAudioTrack)) {\n throw new TypeError('audioTrack must be an InputAudioTrack.');\n }\n this._audioSampleSink = new AudioSampleSink(audioTrack);\n }\n /** @internal */\n _audioSampleToWrappedArrayBuffer(sample) {\n const result = {\n buffer: sample.toAudioBuffer(),\n timestamp: sample.timestamp,\n duration: sample.duration,\n };\n sample.close();\n return result;\n }\n /**\n * Retrieves the audio buffer corresponding to the given timestamp, in seconds. More specifically, returns\n * the last audio buffer (in presentation order) with a start timestamp less than or equal to the given timestamp.\n * Returns null if the timestamp is before the track's first timestamp.\n *\n * @param timestamp - The timestamp used for retrieval, in seconds.\n */\n async getBuffer(timestamp) {\n validateTimestamp(timestamp);\n const data = await this._audioSampleSink.getSample(timestamp);\n return data && this._audioSampleToWrappedArrayBuffer(data);\n }\n /**\n * Creates an async iterator that yields audio buffers of this track in presentation order. This method\n * will intelligently pre-decode a few buffers ahead to enable fast iteration.\n *\n * @param startTimestamp - The timestamp in seconds at which to start yielding buffers (inclusive).\n * @param endTimestamp - The timestamp in seconds at which to stop yielding buffers (exclusive).\n */\n buffers(startTimestamp = 0, endTimestamp = Infinity) {\n return mapAsyncGenerator(this._audioSampleSink.samples(startTimestamp, endTimestamp), data => this._audioSampleToWrappedArrayBuffer(data));\n }\n /**\n * Creates an async iterator that yields an audio buffer for each timestamp in the argument. This method\n * uses an optimized decoding pipeline if these timestamps are monotonically sorted, decoding each packet at most\n * once, and is therefore more efficient than manually getting the buffer for every timestamp. The iterator may\n * yield null if no buffer is available for a given timestamp.\n *\n * @param timestamps - An iterable or async iterable of timestamps in seconds.\n */\n buffersAtTimestamps(timestamps) {\n return mapAsyncGenerator(this._audioSampleSink.samplesAtTimestamps(timestamps), data => data && this._audioSampleToWrappedArrayBuffer(data));\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\n/**\n * Image data with additional metadata.\n *\n * @group Metadata tags\n * @public\n */\nexport class RichImageData {\n /** Creates a new {@link RichImageData}. */\n constructor(\n /** The raw image data. */\n data, \n /** An RFC 6838 MIME type (e.g. image/jpeg, image/png, etc.) */\n mimeType) {\n this.data = data;\n this.mimeType = mimeType;\n if (!(data instanceof Uint8Array)) {\n throw new TypeError('data must be a Uint8Array.');\n }\n if (typeof mimeType !== 'string') {\n throw new TypeError('mimeType must be a string.');\n }\n }\n}\n/**\n * A file attached to a media file.\n *\n * @group Metadata tags\n * @public\n */\nexport class AttachedFile {\n /** Creates a new {@link AttachedFile}. */\n constructor(\n /** The raw file data. */\n data, \n /** An RFC 6838 MIME type (e.g. image/jpeg, image/png, font/ttf, etc.) */\n mimeType, \n /** The name of the file. */\n name, \n /** A description of the file. */\n description) {\n this.data = data;\n this.mimeType = mimeType;\n this.name = name;\n this.description = description;\n if (!(data instanceof Uint8Array)) {\n throw new TypeError('data must be a Uint8Array.');\n }\n if (mimeType !== undefined && typeof mimeType !== 'string') {\n throw new TypeError('mimeType, when provided, must be a string.');\n }\n if (name !== undefined && typeof name !== 'string') {\n throw new TypeError('name, when provided, must be a string.');\n }\n if (description !== undefined && typeof description !== 'string') {\n throw new TypeError('description, when provided, must be a string.');\n }\n }\n}\n;\nexport const validateMetadataTags = (tags) => {\n if (!tags || typeof tags !== 'object') {\n throw new TypeError('tags must be an object.');\n }\n if (tags.title !== undefined && typeof tags.title !== 'string') {\n throw new TypeError('tags.title, when provided, must be a string.');\n }\n if (tags.description !== undefined && typeof tags.description !== 'string') {\n throw new TypeError('tags.description, when provided, must be a string.');\n }\n if (tags.artist !== undefined && typeof tags.artist !== 'string') {\n throw new TypeError('tags.artist, when provided, must be a string.');\n }\n if (tags.album !== undefined && typeof tags.album !== 'string') {\n throw new TypeError('tags.album, when provided, must be a string.');\n }\n if (tags.albumArtist !== undefined && typeof tags.albumArtist !== 'string') {\n throw new TypeError('tags.albumArtist, when provided, must be a string.');\n }\n if (tags.trackNumber !== undefined && (!Number.isInteger(tags.trackNumber) || tags.trackNumber <= 0)) {\n throw new TypeError('tags.trackNumber, when provided, must be a positive integer.');\n }\n if (tags.tracksTotal !== undefined\n && (!Number.isInteger(tags.tracksTotal) || tags.tracksTotal <= 0)) {\n throw new TypeError('tags.tracksTotal, when provided, must be a positive integer.');\n }\n if (tags.discNumber !== undefined && (!Number.isInteger(tags.discNumber) || tags.discNumber <= 0)) {\n throw new TypeError('tags.discNumber, when provided, must be a positive integer.');\n }\n if (tags.discsTotal !== undefined\n && (!Number.isInteger(tags.discsTotal) || tags.discsTotal <= 0)) {\n throw new TypeError('tags.discsTotal, when provided, must be a positive integer.');\n }\n if (tags.genre !== undefined && typeof tags.genre !== 'string') {\n throw new TypeError('tags.genre, when provided, must be a string.');\n }\n if (tags.date !== undefined && (!(tags.date instanceof Date) || Number.isNaN(tags.date.getTime()))) {\n throw new TypeError('tags.date, when provided, must be a valid Date.');\n }\n if (tags.lyrics !== undefined && typeof tags.lyrics !== 'string') {\n throw new TypeError('tags.lyrics, when provided, must be a string.');\n }\n if (tags.images !== undefined) {\n if (!Array.isArray(tags.images)) {\n throw new TypeError('tags.images, when provided, must be an array.');\n }\n for (const image of tags.images) {\n if (!image || typeof image !== 'object') {\n throw new TypeError('Each image in tags.images must be an object.');\n }\n if (!(image.data instanceof Uint8Array)) {\n throw new TypeError('Each image.data must be a Uint8Array.');\n }\n if (typeof image.mimeType !== 'string') {\n throw new TypeError('Each image.mimeType must be a string.');\n }\n if (!['coverFront', 'coverBack', 'unknown'].includes(image.kind)) {\n throw new TypeError('Each image.kind must be \\'coverFront\\', \\'coverBack\\', or \\'unknown\\'.');\n }\n }\n }\n if (tags.comment !== undefined && typeof tags.comment !== 'string') {\n throw new TypeError('tags.comment, when provided, must be a string.');\n }\n if (tags.raw !== undefined) {\n if (!tags.raw || typeof tags.raw !== 'object') {\n throw new TypeError('tags.raw, when provided, must be an object.');\n }\n for (const value of Object.values(tags.raw)) {\n if (value !== null\n && typeof value !== 'string'\n && !(value instanceof Uint8Array)\n && !(value instanceof RichImageData)\n && !(value instanceof AttachedFile)) {\n throw new TypeError('Each value in tags.raw must be a string, Uint8Array, RichImageData, AttachedFile, or null.');\n }\n }\n }\n};\nexport const metadataTagsAreEmpty = (tags) => {\n return tags.title === undefined\n && tags.description === undefined\n && tags.artist === undefined\n && tags.album === undefined\n && tags.albumArtist === undefined\n && tags.trackNumber === undefined\n && tags.tracksTotal === undefined\n && tags.discNumber === undefined\n && tags.discsTotal === undefined\n && tags.genre === undefined\n && tags.date === undefined\n && tags.lyrics === undefined\n && (!tags.images || tags.images.length === 0)\n && tags.comment === undefined\n && (tags.raw === undefined || Object.keys(tags.raw).length === 0);\n};\nexport const DEFAULT_TRACK_DISPOSITION = {\n default: true,\n forced: false,\n original: false,\n commentary: false,\n hearingImpaired: false,\n visuallyImpaired: false,\n};\nexport const validateTrackDisposition = (disposition) => {\n if (!disposition || typeof disposition !== 'object') {\n throw new TypeError('disposition must be an object.');\n }\n if (disposition.default !== undefined && typeof disposition.default !== 'boolean') {\n throw new TypeError('disposition.default must be a boolean.');\n }\n if (disposition.forced !== undefined && typeof disposition.forced !== 'boolean') {\n throw new TypeError('disposition.forced must be a boolean.');\n }\n if (disposition.original !== undefined && typeof disposition.original !== 'boolean') {\n throw new TypeError('disposition.original must be a boolean.');\n }\n if (disposition.commentary !== undefined && typeof disposition.commentary !== 'boolean') {\n throw new TypeError('disposition.commentary must be a boolean.');\n }\n if (disposition.hearingImpaired !== undefined && typeof disposition.hearingImpaired !== 'boolean') {\n throw new TypeError('disposition.hearingImpaired must be a boolean.');\n }\n if (disposition.visuallyImpaired !== undefined && typeof disposition.visuallyImpaired !== 'boolean') {\n throw new TypeError('disposition.visuallyImpaired must be a boolean.');\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport function assert(x) {\n if (!x) {\n throw new Error('Assertion failed.');\n }\n}\nexport const normalizeRotation = (rotation) => {\n const mappedRotation = (rotation % 360 + 360) % 360;\n if (mappedRotation === 0 || mappedRotation === 90 || mappedRotation === 180 || mappedRotation === 270) {\n return mappedRotation;\n }\n else {\n throw new Error(`Invalid rotation ${rotation}.`);\n }\n};\nexport const last = (arr) => {\n return arr && arr[arr.length - 1];\n};\nexport const isU32 = (value) => {\n return value >= 0 && value < 2 ** 32;\n};\n/** Reads an exponential-Golomb universal code from a Bitstream. */\nexport const readExpGolomb = (bitstream) => {\n let leadingZeroBits = 0;\n while (bitstream.readBits(1) === 0 && leadingZeroBits < 32) {\n leadingZeroBits++;\n }\n if (leadingZeroBits >= 32) {\n throw new Error('Invalid exponential-Golomb code.');\n }\n const result = (1 << leadingZeroBits) - 1 + bitstream.readBits(leadingZeroBits);\n return result;\n};\n/** Reads a signed exponential-Golomb universal code from a Bitstream. */\nexport const readSignedExpGolomb = (bitstream) => {\n const codeNum = readExpGolomb(bitstream);\n return ((codeNum & 1) === 0)\n ? -(codeNum >> 1)\n : ((codeNum + 1) >> 1);\n};\nexport const writeBits = (bytes, start, end, value) => {\n for (let i = start; i < end; i++) {\n const byteIndex = Math.floor(i / 8);\n let byte = bytes[byteIndex];\n const bitIndex = 0b111 - (i & 0b111);\n byte &= ~(1 << bitIndex);\n byte |= ((value & (1 << (end - i - 1))) >> (end - i - 1)) << bitIndex;\n bytes[byteIndex] = byte;\n }\n};\nexport const toUint8Array = (source) => {\n if (source.constructor === Uint8Array) { // We want a true Uint8Array, not something that extends it like Buffer\n return source;\n }\n else if (ArrayBuffer.isView(source)) {\n return new Uint8Array(source.buffer, source.byteOffset, source.byteLength);\n }\n else {\n return new Uint8Array(source);\n }\n};\nexport const toDataView = (source) => {\n if (source.constructor === DataView) {\n return source;\n }\n else if (ArrayBuffer.isView(source)) {\n return new DataView(source.buffer, source.byteOffset, source.byteLength);\n }\n else {\n return new DataView(source);\n }\n};\nexport const textDecoder = /* #__PURE__ */ new TextDecoder();\nexport const textEncoder = /* #__PURE__ */ new TextEncoder();\nexport const isIso88591Compatible = (text) => {\n for (let i = 0; i < text.length; i++) {\n const code = text.charCodeAt(i);\n if (code > 255) {\n return false;\n }\n }\n return true;\n};\nconst invertObject = (object) => {\n return Object.fromEntries(Object.entries(object).map(([key, value]) => [value, key]));\n};\n// For the color space mappings, see Rec. ITU-T H.273.\nexport const COLOR_PRIMARIES_MAP = {\n bt709: 1, // ITU-R BT.709\n bt470bg: 5, // ITU-R BT.470BG\n smpte170m: 6, // ITU-R BT.601 525 - SMPTE 170M\n bt2020: 9, // ITU-R BT.202\n smpte432: 12, // SMPTE EG 432-1\n};\nexport const COLOR_PRIMARIES_MAP_INVERSE = /* #__PURE__ */ invertObject(COLOR_PRIMARIES_MAP);\nexport const TRANSFER_CHARACTERISTICS_MAP = {\n 'bt709': 1, // ITU-R BT.709\n 'smpte170m': 6, // SMPTE 170M\n 'linear': 8, // Linear transfer characteristics\n 'iec61966-2-1': 13, // IEC 61966-2-1\n 'pq': 16, // Rec. ITU-R BT.2100-2 perceptual quantization (PQ) system\n 'hlg': 18, // Rec. ITU-R BT.2100-2 hybrid loggamma (HLG) system\n};\nexport const TRANSFER_CHARACTERISTICS_MAP_INVERSE = /* #__PURE__ */ invertObject(TRANSFER_CHARACTERISTICS_MAP);\nexport const MATRIX_COEFFICIENTS_MAP = {\n 'rgb': 0, // Identity\n 'bt709': 1, // ITU-R BT.709\n 'bt470bg': 5, // ITU-R BT.470BG\n 'smpte170m': 6, // SMPTE 170M\n 'bt2020-ncl': 9, // ITU-R BT.2020-2 (non-constant luminance)\n};\nexport const MATRIX_COEFFICIENTS_MAP_INVERSE = /* #__PURE__ */ invertObject(MATRIX_COEFFICIENTS_MAP);\nexport const colorSpaceIsComplete = (colorSpace) => {\n return (!!colorSpace\n && !!colorSpace.primaries\n && !!colorSpace.transfer\n && !!colorSpace.matrix\n && colorSpace.fullRange !== undefined);\n};\nexport const isAllowSharedBufferSource = (x) => {\n return (x instanceof ArrayBuffer\n || (typeof SharedArrayBuffer !== 'undefined' && x instanceof SharedArrayBuffer)\n || ArrayBuffer.isView(x));\n};\nexport class AsyncMutex {\n constructor() {\n this.currentPromise = Promise.resolve();\n this.pending = 0;\n }\n async acquire() {\n let resolver;\n const nextPromise = new Promise((resolve) => {\n let resolved = false;\n resolver = () => {\n if (resolved) {\n return;\n }\n resolve();\n this.pending--;\n resolved = true;\n };\n });\n const currentPromiseAlias = this.currentPromise;\n this.currentPromise = nextPromise;\n this.pending++;\n await currentPromiseAlias;\n return resolver;\n }\n}\nexport const bytesToHexString = (bytes) => {\n return [...bytes].map(x => x.toString(16).padStart(2, '0')).join('');\n};\nexport const reverseBitsU32 = (x) => {\n x = ((x >> 1) & 0x55555555) | ((x & 0x55555555) << 1);\n x = ((x >> 2) & 0x33333333) | ((x & 0x33333333) << 2);\n x = ((x >> 4) & 0x0f0f0f0f) | ((x & 0x0f0f0f0f) << 4);\n x = ((x >> 8) & 0x00ff00ff) | ((x & 0x00ff00ff) << 8);\n x = ((x >> 16) & 0x0000ffff) | ((x & 0x0000ffff) << 16);\n return x >>> 0; // Ensure it's treated as an unsigned 32-bit integer\n};\n/** Returns the smallest index i such that val[i] === key, or -1 if no such index exists. */\nexport const binarySearchExact = (arr, key, valueGetter) => {\n let low = 0;\n let high = arr.length - 1;\n let ans = -1;\n while (low <= high) {\n const mid = (low + high) >> 1;\n const midVal = valueGetter(arr[mid]);\n if (midVal === key) {\n ans = mid;\n high = mid - 1; // Continue searching left to find the lowest index\n }\n else if (midVal < key) {\n low = mid + 1;\n }\n else {\n high = mid - 1;\n }\n }\n return ans;\n};\n/** Returns the largest index i such that val[i] <= key, or -1 if no such index exists. */\nexport const binarySearchLessOrEqual = (arr, key, valueGetter) => {\n let low = 0;\n let high = arr.length - 1;\n let ans = -1;\n while (low <= high) {\n const mid = (low + (high - low + 1) / 2) | 0;\n const midVal = valueGetter(arr[mid]);\n if (midVal <= key) {\n ans = mid;\n low = mid + 1;\n }\n else {\n high = mid - 1;\n }\n }\n return ans;\n};\n/** Assumes the array is already sorted. */\nexport const insertSorted = (arr, item, valueGetter) => {\n const insertionIndex = binarySearchLessOrEqual(arr, valueGetter(item), valueGetter);\n arr.splice(insertionIndex + 1, 0, item); // This even behaves correctly for the -1 case\n};\nexport const promiseWithResolvers = () => {\n let resolve;\n let reject;\n const promise = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n return { promise, resolve: resolve, reject: reject };\n};\nexport const removeItem = (arr, item) => {\n const index = arr.indexOf(item);\n if (index !== -1) {\n arr.splice(index, 1);\n }\n};\nexport const findLast = (arr, predicate) => {\n for (let i = arr.length - 1; i >= 0; i--) {\n if (predicate(arr[i])) {\n return arr[i];\n }\n }\n return undefined;\n};\nexport const findLastIndex = (arr, predicate) => {\n for (let i = arr.length - 1; i >= 0; i--) {\n if (predicate(arr[i])) {\n return i;\n }\n }\n return -1;\n};\nexport const toAsyncIterator = async function* (source) {\n if (Symbol.iterator in source) {\n // @ts-expect-error Trust me\n yield* source[Symbol.iterator]();\n }\n else {\n // @ts-expect-error Trust me\n yield* source[Symbol.asyncIterator]();\n }\n};\nexport const validateAnyIterable = (iterable) => {\n if (!(Symbol.iterator in iterable) && !(Symbol.asyncIterator in iterable)) {\n throw new TypeError('Argument must be an iterable or async iterable.');\n }\n};\nexport const assertNever = (x) => {\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw new Error(`Unexpected value: ${x}`);\n};\nexport const getUint24 = (view, byteOffset, littleEndian) => {\n const byte1 = view.getUint8(byteOffset);\n const byte2 = view.getUint8(byteOffset + 1);\n const byte3 = view.getUint8(byteOffset + 2);\n if (littleEndian) {\n return byte1 | (byte2 << 8) | (byte3 << 16);\n }\n else {\n return (byte1 << 16) | (byte2 << 8) | byte3;\n }\n};\nexport const getInt24 = (view, byteOffset, littleEndian) => {\n // The left shift pushes the most significant bit into the sign bit region, and the subsequent right shift\n // then correctly interprets the sign bit.\n return getUint24(view, byteOffset, littleEndian) << 8 >> 8;\n};\nexport const setUint24 = (view, byteOffset, value, littleEndian) => {\n // Ensure the value is within 24-bit unsigned range (0 to 16777215)\n value = value >>> 0; // Convert to unsigned 32-bit\n value = value & 0xFFFFFF; // Mask to 24 bits\n if (littleEndian) {\n view.setUint8(byteOffset, value & 0xFF);\n view.setUint8(byteOffset + 1, (value >>> 8) & 0xFF);\n view.setUint8(byteOffset + 2, (value >>> 16) & 0xFF);\n }\n else {\n view.setUint8(byteOffset, (value >>> 16) & 0xFF);\n view.setUint8(byteOffset + 1, (value >>> 8) & 0xFF);\n view.setUint8(byteOffset + 2, value & 0xFF);\n }\n};\nexport const setInt24 = (view, byteOffset, value, littleEndian) => {\n // Ensure the value is within 24-bit signed range (-8388608 to 8388607)\n value = clamp(value, -8388608, 8388607);\n // Convert negative values to their 24-bit representation\n if (value < 0) {\n value = (value + 0x1000000) & 0xFFFFFF;\n }\n setUint24(view, byteOffset, value, littleEndian);\n};\nexport const setInt64 = (view, byteOffset, value, littleEndian) => {\n if (littleEndian) {\n view.setUint32(byteOffset + 0, value, true);\n view.setInt32(byteOffset + 4, Math.floor(value / 2 ** 32), true);\n }\n else {\n view.setInt32(byteOffset + 0, Math.floor(value / 2 ** 32), true);\n view.setUint32(byteOffset + 4, value, true);\n }\n};\n/**\n * Calls a function on each value spat out by an async generator. The reason for writing this manually instead of\n * using a generator function is that the generator function queues return() calls - here, we forward them immediately.\n */\nexport const mapAsyncGenerator = (generator, map) => {\n return {\n async next() {\n const result = await generator.next();\n if (result.done) {\n return { value: undefined, done: true };\n }\n else {\n return { value: map(result.value), done: false };\n }\n },\n return() {\n return generator.return();\n },\n throw(error) {\n return generator.throw(error);\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n};\nexport const clamp = (value, min, max) => {\n return Math.max(min, Math.min(max, value));\n};\nexport const UNDETERMINED_LANGUAGE = 'und';\nexport const roundIfAlmostInteger = (value) => {\n const rounded = Math.round(value);\n if (Math.abs(value / rounded - 1) < 10 * Number.EPSILON) {\n return rounded;\n }\n else {\n return value;\n }\n};\nexport const roundToMultiple = (value, multiple) => {\n return Math.round(value / multiple) * multiple;\n};\nexport const floorToMultiple = (value, multiple) => {\n return Math.floor(value / multiple) * multiple;\n};\nexport const ilog = (x) => {\n let ret = 0;\n while (x) {\n ret++;\n x >>= 1;\n }\n return ret;\n};\nconst ISO_639_2_REGEX = /^[a-z]{3}$/;\nexport const isIso639Dash2LanguageCode = (x) => {\n return ISO_639_2_REGEX.test(x);\n};\n// Since the result will be truncated, add a bit of eps to compensate for floating point errors\nexport const SECOND_TO_MICROSECOND_FACTOR = 1e6 * (1 + Number.EPSILON);\n/**\n * Merges two RequestInit objects with special handling for headers.\n * Headers are merged case-insensitively, but original casing is preserved.\n * init2 headers take precedence and will override case-insensitive matches from init1.\n */\nexport const mergeRequestInit = (init1, init2) => {\n const merged = { ...init1, ...init2 };\n // Special handling for headers\n if (init1.headers || init2.headers) {\n const headers1 = init1.headers ? normalizeHeaders(init1.headers) : {};\n const headers2 = init2.headers ? normalizeHeaders(init2.headers) : {};\n const mergedHeaders = { ...headers1 };\n // For each header in headers2, check if a case-insensitive match exists in mergedHeaders\n Object.entries(headers2).forEach(([key2, value2]) => {\n const existingKey = Object.keys(mergedHeaders).find(key1 => key1.toLowerCase() === key2.toLowerCase());\n if (existingKey) {\n delete mergedHeaders[existingKey];\n }\n mergedHeaders[key2] = value2;\n });\n merged.headers = mergedHeaders;\n }\n return merged;\n};\n/** Normalizes HeadersInit to a Record<string, string> format. */\nconst normalizeHeaders = (headers) => {\n if (headers instanceof Headers) {\n const result = {};\n headers.forEach((value, key) => {\n result[key] = value;\n });\n return result;\n }\n if (Array.isArray(headers)) {\n const result = {};\n headers.forEach(([key, value]) => {\n result[key] = value;\n });\n return result;\n }\n return headers;\n};\nexport const retriedFetch = async (fetchFn, url, requestInit, getRetryDelay, shouldStop) => {\n let attempts = 0;\n while (true) {\n try {\n return await fetchFn(url, requestInit);\n }\n catch (error) {\n if (shouldStop()) {\n throw error;\n }\n attempts++;\n const retryDelayInSeconds = getRetryDelay(attempts, error, url);\n if (retryDelayInSeconds === null) {\n throw error;\n }\n console.error('Retrying failed fetch. Error:', error);\n if (!Number.isFinite(retryDelayInSeconds) || retryDelayInSeconds < 0) {\n throw new TypeError('Retry delay must be a non-negative finite number.');\n }\n if (retryDelayInSeconds > 0) {\n await new Promise(resolve => setTimeout(resolve, 1000 * retryDelayInSeconds));\n }\n if (shouldStop()) {\n throw error;\n }\n }\n }\n};\nexport const computeRationalApproximation = (x, maxDenominator) => {\n // Handle negative numbers\n const sign = x < 0 ? -1 : 1;\n x = Math.abs(x);\n let prevNumerator = 0, prevDenominator = 1;\n let currNumerator = 1, currDenominator = 0;\n // Continued fraction algorithm\n let remainder = x;\n while (true) {\n const integer = Math.floor(remainder);\n // Calculate next convergent\n const nextNumerator = integer * currNumerator + prevNumerator;\n const nextDenominator = integer * currDenominator + prevDenominator;\n if (nextDenominator > maxDenominator) {\n return {\n numerator: sign * currNumerator,\n denominator: currDenominator,\n };\n }\n prevNumerator = currNumerator;\n prevDenominator = currDenominator;\n currNumerator = nextNumerator;\n currDenominator = nextDenominator;\n remainder = 1 / (remainder - integer);\n // Guard against precision issues\n if (!isFinite(remainder)) {\n break;\n }\n }\n return {\n numerator: sign * currNumerator,\n denominator: currDenominator,\n };\n};\nexport class CallSerializer {\n constructor() {\n this.currentPromise = Promise.resolve();\n }\n call(fn) {\n return this.currentPromise = this.currentPromise.then(fn);\n }\n}\nlet isWebKitCache = null;\nexport const isWebKit = () => {\n if (isWebKitCache !== null) {\n return isWebKitCache;\n }\n // This even returns true for WebKit-wrapping browsers such as Chrome on iOS\n return isWebKitCache = !!(typeof navigator !== 'undefined'\n && (navigator.vendor?.match(/apple/i)\n // Or, in workers:\n || (/AppleWebKit/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgent))\n || /\\b(iPad|iPhone|iPod)\\b/.test(navigator.userAgent)));\n};\nlet isFirefoxCache = null;\nexport const isFirefox = () => {\n if (isFirefoxCache !== null) {\n return isFirefoxCache;\n }\n return isFirefoxCache = typeof navigator !== 'undefined' && navigator.userAgent?.includes('Firefox');\n};\nlet isChromiumCache = null;\nexport const isChromium = () => {\n if (isChromiumCache !== null) {\n return isChromiumCache;\n }\n return isChromiumCache = !!(typeof navigator !== 'undefined'\n && (navigator.vendor?.includes('Google Inc') || /Chrome/.test(navigator.userAgent)));\n};\nlet chromiumVersionCache = null;\nexport const getChromiumVersion = () => {\n if (chromiumVersionCache !== null) {\n return chromiumVersionCache;\n }\n if (typeof navigator === 'undefined') {\n return null;\n }\n const match = /\\bChrome\\/(\\d+)/.exec(navigator.userAgent);\n if (!match) {\n return null;\n }\n return chromiumVersionCache = Number(match[1]);\n};\n/** Acts like `??` except the condition is -1 and not null/undefined. */\nexport const coalesceIndex = (a, b) => {\n return a !== -1 ? a : b;\n};\nexport const closedIntervalsOverlap = (startA, endA, startB, endB) => {\n return startA <= endB && startB <= endA;\n};\nexport const keyValueIterator = function* (object) {\n for (const key in object) {\n const value = object[key];\n if (value === undefined) {\n continue;\n }\n yield { key, value };\n }\n};\nexport const imageMimeTypeToExtension = (mimeType) => {\n switch (mimeType.toLowerCase()) {\n case 'image/jpeg':\n case 'image/jpg':\n return '.jpg';\n case 'image/png':\n return '.png';\n case 'image/gif':\n return '.gif';\n case 'image/webp':\n return '.webp';\n case 'image/bmp':\n return '.bmp';\n case 'image/svg+xml':\n return '.svg';\n case 'image/tiff':\n return '.tiff';\n case 'image/avif':\n return '.avif';\n case 'image/x-icon':\n case 'image/vnd.microsoft.icon':\n return '.ico';\n default:\n return null;\n }\n};\nexport const base64ToBytes = (base64) => {\n const decoded = atob(base64);\n const bytes = new Uint8Array(decoded.length);\n for (let i = 0; i < decoded.length; i++) {\n bytes[i] = decoded.charCodeAt(i);\n }\n return bytes;\n};\nexport const bytesToBase64 = (bytes) => {\n let string = '';\n for (let i = 0; i < bytes.length; i++) {\n string += String.fromCharCode(bytes[i]);\n }\n return btoa(string);\n};\nexport const uint8ArraysAreEqual = (a, b) => {\n if (a.length !== b.length) {\n return false;\n }\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) {\n return false;\n }\n }\n return true;\n};\nexport const polyfillSymbolDispose = () => {\n // https://www.typescriptlang.org/docs/handbook/release-notes/typescript-5-2.html\n // @ts-expect-error Readonly\n Symbol.dispose ??= Symbol('Symbol.dispose');\n};\nexport const isNumber = (x) => {\n return typeof x === 'number' && !Number.isNaN(x);\n};\nexport const simplifyRational = (rational) => {\n assert(rational.den !== 0);\n let a = Math.abs(rational.num);\n let b = Math.abs(rational.den);\n // Euclidean algorithm\n while (b !== 0) {\n const t = a % b;\n a = b;\n b = t;\n }\n const gcd = a || 1;\n return {\n num: rational.num / gcd,\n den: rational.den / gcd,\n };\n};\nexport const validateRectangle = (rect, propertyPath) => {\n if (typeof rect !== 'object' || !rect) {\n throw new TypeError(`${propertyPath} must be an object.`);\n }\n if (!Number.isInteger(rect.left) || rect.left < 0) {\n throw new TypeError(`${propertyPath}.left must be a non-negative integer.`);\n }\n if (!Number.isInteger(rect.top) || rect.top < 0) {\n throw new TypeError(`${propertyPath}.top must be a non-negative integer.`);\n }\n if (!Number.isInteger(rect.width) || rect.width < 0) {\n throw new TypeError(`${propertyPath}.width must be a non-negative integer.`);\n }\n if (!Number.isInteger(rect.height) || rect.height < 0) {\n throw new TypeError(`${propertyPath}.height must be a non-negative integer.`);\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nexport const TIMESCALE = 90_000; // MPEG-TS timestamps run on a 90 kHz clock\nexport const TS_PACKET_SIZE = 188;\nexport const buildMpegTsMimeType = (codecStrings) => {\n let string = 'video/MP2T';\n const uniqueCodecStrings = [...new Set(codecStrings.filter(Boolean))];\n if (uniqueCodecStrings.length > 0) {\n string += `; codecs=\"${uniqueCodecStrings.join(', ')}\"`;\n }\n return string;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { parseOpusTocByte } from '../codec-data.js';\nimport { assert, ilog, toDataView } from '../misc.js';\nexport const OGGS = 0x5367674f; // 'OggS'\nconst OGG_CRC_POLYNOMIAL = 0x04c11db7;\nconst OGG_CRC_TABLE = new Uint32Array(256);\nfor (let n = 0; n < 256; n++) {\n let crc = n << 24;\n for (let k = 0; k < 8; k++) {\n crc = (crc & 0x80000000)\n ? ((crc << 1) ^ OGG_CRC_POLYNOMIAL)\n : (crc << 1);\n }\n OGG_CRC_TABLE[n] = (crc >>> 0) & 0xffffffff;\n}\nexport const computeOggPageCrc = (bytes) => {\n const view = toDataView(bytes);\n const originalChecksum = view.getUint32(22, true);\n view.setUint32(22, 0, true); // Zero out checksum field\n let crc = 0;\n for (let i = 0; i < bytes.length; i++) {\n const byte = bytes[i];\n crc = ((crc << 8) ^ OGG_CRC_TABLE[(crc >>> 24) ^ byte]) >>> 0;\n }\n view.setUint32(22, originalChecksum, true); // Restore checksum field\n return crc;\n};\nexport const extractSampleMetadata = (data, codecInfo, vorbisLastBlocksize) => {\n let durationInSamples = 0;\n let currentBlocksize = null;\n if (data.length > 0) {\n // To know sample duration, we'll need to peak inside the packet\n if (codecInfo.codec === 'vorbis') {\n assert(codecInfo.vorbisInfo);\n const vorbisModeCount = codecInfo.vorbisInfo.modeBlockflags.length;\n const bitCount = ilog(vorbisModeCount - 1);\n const modeMask = ((1 << bitCount) - 1) << 1;\n const modeNumber = (data[0] & modeMask) >> 1;\n if (modeNumber >= codecInfo.vorbisInfo.modeBlockflags.length) {\n throw new Error('Invalid mode number.');\n }\n // In Vorbis, packet duration also depends on the blocksize of the previous packet\n let prevBlocksize = vorbisLastBlocksize;\n const blockflag = codecInfo.vorbisInfo.modeBlockflags[modeNumber];\n currentBlocksize = codecInfo.vorbisInfo.blocksizes[blockflag];\n if (blockflag === 1) {\n const prevMask = (modeMask | 0x1) + 1;\n const flag = data[0] & prevMask ? 1 : 0;\n prevBlocksize = codecInfo.vorbisInfo.blocksizes[flag];\n }\n durationInSamples = prevBlocksize !== null\n ? (prevBlocksize + currentBlocksize) >> 2\n : 0; // The first sample outputs no audio data and therefore has a duration of 0\n }\n else if (codecInfo.codec === 'opus') {\n const toc = parseOpusTocByte(data);\n durationInSamples = toc.durationInSamples;\n }\n }\n return {\n durationInSamples,\n vorbisBlockSize: currentBlocksize,\n };\n};\nexport const buildOggMimeType = (info) => {\n let string = 'audio/ogg';\n if (info.codecStrings) {\n const uniqueCodecMimeTypes = [...new Set(info.codecStrings)];\n string += `; codecs=\"${uniqueCodecMimeTypes.join(', ')}\"`;\n }\n return string;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { readI64Le, readU32Le, readU8 } from '../reader.js';\nimport { OGGS } from './ogg-misc.js';\nexport const MIN_PAGE_HEADER_SIZE = 27;\nexport const MAX_PAGE_HEADER_SIZE = 27 + 255;\nexport const MAX_PAGE_SIZE = MAX_PAGE_HEADER_SIZE + 255 * 255;\nexport const readPageHeader = (slice) => {\n const startPos = slice.filePos;\n const capturePattern = readU32Le(slice);\n if (capturePattern !== OGGS) {\n return null;\n }\n slice.skip(1); // Version\n const headerType = readU8(slice);\n const granulePosition = readI64Le(slice);\n const serialNumber = readU32Le(slice);\n const sequenceNumber = readU32Le(slice);\n const checksum = readU32Le(slice);\n const numberPageSegments = readU8(slice);\n const lacingValues = new Uint8Array(numberPageSegments);\n for (let i = 0; i < numberPageSegments; i++) {\n lacingValues[i] = readU8(slice);\n }\n const headerSize = 27 + numberPageSegments;\n const dataSize = lacingValues.reduce((a, b) => a + b, 0);\n const totalSize = headerSize + dataSize;\n return {\n headerStartPos: startPos,\n totalSize,\n dataStartPos: startPos + headerSize,\n dataSize,\n headerType,\n granulePosition,\n serialNumber,\n sequenceNumber,\n checksum,\n lacingValues,\n };\n};\nexport const findNextPageHeader = (slice, until) => {\n while (slice.filePos < until - (4 - 1)) { // Size of word minus 1\n const word = readU32Le(slice);\n const firstByte = word & 0xff;\n const secondByte = (word >>> 8) & 0xff;\n const thirdByte = (word >>> 16) & 0xff;\n const fourthByte = (word >>> 24) & 0xff;\n const O = 0x4f; // 'O'\n if (firstByte !== O && secondByte !== O && thirdByte !== O && fourthByte !== O) {\n continue;\n }\n slice.skip(-4);\n if (word === OGGS) {\n // We have found the capture pattern\n return true;\n }\n slice.skip(1);\n }\n return false;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { SECOND_TO_MICROSECOND_FACTOR } from './misc.js';\nexport const PLACEHOLDER_DATA = /* #__PURE__ */ new Uint8Array(0);\n/**\n * Represents an encoded chunk of media. Mainly used as an expressive wrapper around WebCodecs API's\n * [`EncodedVideoChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedVideoChunk) and\n * [`EncodedAudioChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedAudioChunk), but can also be used\n * standalone.\n * @group Packets\n * @public\n */\nexport class EncodedPacket {\n /** Creates a new {@link EncodedPacket} from raw bytes and timing information. */\n constructor(\n /**\n * The encoded data of this packet. For any given codec, this data must adhere to the format specified in the\n * Mediabunny Codec Registry.\n */\n data, \n /** The type of this packet. */\n type, \n /**\n * The presentation timestamp of this packet in seconds. May be negative. Samples with negative end timestamps\n * should not be presented.\n */\n timestamp, \n /** The duration of this packet in seconds. */\n duration, \n /**\n * The sequence number indicates the decode order of the packets. Packet A must be decoded before packet B if A\n * has a lower sequence number than B. If two packets have the same sequence number, they are the same packet.\n * Otherwise, sequence numbers are arbitrary and are not guaranteed to have any meaning besides their relative\n * ordering. Negative sequence numbers mean the sequence number is undefined.\n */\n sequenceNumber = -1, byteLength, sideData) {\n this.data = data;\n this.type = type;\n this.timestamp = timestamp;\n this.duration = duration;\n this.sequenceNumber = sequenceNumber;\n if (data === PLACEHOLDER_DATA && byteLength === undefined) {\n throw new Error('Internal error: byteLength must be explicitly provided when constructing metadata-only packets.');\n }\n if (byteLength === undefined) {\n byteLength = data.byteLength;\n }\n if (!(data instanceof Uint8Array)) {\n throw new TypeError('data must be a Uint8Array.');\n }\n if (type !== 'key' && type !== 'delta') {\n throw new TypeError('type must be either \"key\" or \"delta\".');\n }\n if (!Number.isFinite(timestamp)) {\n throw new TypeError('timestamp must be a number.');\n }\n if (!Number.isFinite(duration) || duration < 0) {\n throw new TypeError('duration must be a non-negative number.');\n }\n if (!Number.isFinite(sequenceNumber)) {\n throw new TypeError('sequenceNumber must be a number.');\n }\n if (!Number.isInteger(byteLength) || byteLength < 0) {\n throw new TypeError('byteLength must be a non-negative integer.');\n }\n if (sideData !== undefined && (typeof sideData !== 'object' || !sideData)) {\n throw new TypeError('sideData, when provided, must be an object.');\n }\n if (sideData?.alpha !== undefined && !(sideData.alpha instanceof Uint8Array)) {\n throw new TypeError('sideData.alpha, when provided, must be a Uint8Array.');\n }\n if (sideData?.alphaByteLength !== undefined\n && (!Number.isInteger(sideData.alphaByteLength) || sideData.alphaByteLength < 0)) {\n throw new TypeError('sideData.alphaByteLength, when provided, must be a non-negative integer.');\n }\n this.byteLength = byteLength;\n this.sideData = sideData ?? {};\n if (this.sideData.alpha && this.sideData.alphaByteLength === undefined) {\n this.sideData.alphaByteLength = this.sideData.alpha.byteLength;\n }\n }\n /**\n * If this packet is a metadata-only packet. Metadata-only packets don't contain their packet data. They are the\n * result of retrieving packets with {@link PacketRetrievalOptions.metadataOnly} set to `true`.\n */\n get isMetadataOnly() {\n return this.data === PLACEHOLDER_DATA;\n }\n /** The timestamp of this packet in microseconds. */\n get microsecondTimestamp() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);\n }\n /** The duration of this packet in microseconds. */\n get microsecondDuration() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);\n }\n /** Converts this packet to an\n * [`EncodedVideoChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedVideoChunk) for use with the\n * WebCodecs API. */\n toEncodedVideoChunk() {\n if (this.isMetadataOnly) {\n throw new TypeError('Metadata-only packets cannot be converted to a video chunk.');\n }\n if (typeof EncodedVideoChunk === 'undefined') {\n throw new Error('Your browser does not support EncodedVideoChunk.');\n }\n return new EncodedVideoChunk({\n data: this.data,\n type: this.type,\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration,\n });\n }\n /**\n * Converts this packet to an\n * [`EncodedVideoChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedVideoChunk) for use with the\n * WebCodecs API, using the alpha side data instead of the color data. Throws if no alpha side data is defined.\n */\n alphaToEncodedVideoChunk(type = this.type) {\n if (!this.sideData.alpha) {\n throw new TypeError('This packet does not contain alpha side data.');\n }\n if (this.isMetadataOnly) {\n throw new TypeError('Metadata-only packets cannot be converted to a video chunk.');\n }\n if (typeof EncodedVideoChunk === 'undefined') {\n throw new Error('Your browser does not support EncodedVideoChunk.');\n }\n return new EncodedVideoChunk({\n data: this.sideData.alpha,\n type,\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration,\n });\n }\n /** Converts this packet to an\n * [`EncodedAudioChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedAudioChunk) for use with the\n * WebCodecs API. */\n toEncodedAudioChunk() {\n if (this.isMetadataOnly) {\n throw new TypeError('Metadata-only packets cannot be converted to an audio chunk.');\n }\n if (typeof EncodedAudioChunk === 'undefined') {\n throw new Error('Your browser does not support EncodedAudioChunk.');\n }\n return new EncodedAudioChunk({\n data: this.data,\n type: this.type,\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration,\n });\n }\n /**\n * Creates an {@link EncodedPacket} from an\n * [`EncodedVideoChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedVideoChunk) or\n * [`EncodedAudioChunk`](https://developer.mozilla.org/en-US/docs/Web/API/EncodedAudioChunk). This method is useful\n * for converting chunks from the WebCodecs API to `EncodedPacket` instances.\n */\n static fromEncodedChunk(chunk, sideData) {\n if (!(chunk instanceof EncodedVideoChunk || chunk instanceof EncodedAudioChunk)) {\n throw new TypeError('chunk must be an EncodedVideoChunk or EncodedAudioChunk.');\n }\n const data = new Uint8Array(chunk.byteLength);\n chunk.copyTo(data);\n return new EncodedPacket(data, chunk.type, chunk.timestamp / 1e6, (chunk.duration ?? 0) / 1e6, undefined, undefined, sideData);\n }\n /** Clones this packet while optionally modifying the new packet's data. */\n clone(options) {\n if (options !== undefined && (typeof options !== 'object' || options === null)) {\n throw new TypeError('options, when provided, must be an object.');\n }\n if (options?.data !== undefined && !(options.data instanceof Uint8Array)) {\n throw new TypeError('options.data, when provided, must be a Uint8Array.');\n }\n if (options?.type !== undefined && options.type !== 'key' && options.type !== 'delta') {\n throw new TypeError('options.type, when provided, must be either \"key\" or \"delta\".');\n }\n if (options?.timestamp !== undefined && !Number.isFinite(options.timestamp)) {\n throw new TypeError('options.timestamp, when provided, must be a number.');\n }\n if (options?.duration !== undefined && !Number.isFinite(options.duration)) {\n throw new TypeError('options.duration, when provided, must be a number.');\n }\n if (options?.sequenceNumber !== undefined && !Number.isFinite(options.sequenceNumber)) {\n throw new TypeError('options.sequenceNumber, when provided, must be a number.');\n }\n if (options?.sideData !== undefined && (typeof options.sideData !== 'object' || options.sideData === null)) {\n throw new TypeError('options.sideData, when provided, must be an object.');\n }\n return new EncodedPacket(options?.data ?? this.data, options?.type ?? this.type, options?.timestamp ?? this.timestamp, options?.duration ?? this.duration, options?.sequenceNumber ?? this.sequenceNumber, this.byteLength, options?.sideData ?? this.sideData);\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\n// https://github.com/dystopiancode/pcm-g711/blob/master/pcm-g711/g711.c\nexport const toUlaw = (s16) => {\n const MULAW_MAX = 0x1FFF;\n const MULAW_BIAS = 33;\n let number = s16;\n let mask = 0x1000;\n let sign = 0;\n let position = 12;\n let lsb = 0;\n if (number < 0) {\n number = -number;\n sign = 0x80;\n }\n number += MULAW_BIAS;\n if (number > MULAW_MAX) {\n number = MULAW_MAX;\n }\n while ((number & mask) !== mask && position >= 5) {\n mask >>= 1;\n position--;\n }\n lsb = (number >> (position - 4)) & 0x0f;\n return ~(sign | ((position - 5) << 4) | lsb) & 0xFF;\n};\nexport const fromUlaw = (u8) => {\n const MULAW_BIAS = 33;\n let sign = 0;\n let position = 0;\n let number = ~u8;\n if (number & 0x80) {\n number &= ~(1 << 7);\n sign = -1;\n }\n position = ((number & 0xF0) >> 4) + 5;\n const decoded = ((1 << position) | ((number & 0x0F) << (position - 4))\n | (1 << (position - 5))) - MULAW_BIAS;\n return (sign === 0) ? decoded : -decoded;\n};\nexport const toAlaw = (s16) => {\n const ALAW_MAX = 0xFFF;\n let mask = 0x800;\n let sign = 0;\n let position = 11;\n let lsb = 0;\n let number = s16;\n if (number < 0) {\n number = -number;\n sign = 0x80;\n }\n if (number > ALAW_MAX) {\n number = ALAW_MAX;\n }\n while ((number & mask) !== mask && position >= 5) {\n mask >>= 1;\n position--;\n }\n lsb = (number >> ((position === 4) ? 1 : (position - 4))) & 0x0f;\n return (sign | ((position - 4) << 4) | lsb) ^ 0x55;\n};\nexport const fromAlaw = (u8) => {\n let sign = 0x00;\n let position = 0;\n let number = u8 ^ 0x55;\n if (number & 0x80) {\n number &= ~(1 << 7);\n sign = -1;\n }\n position = ((number & 0xF0) >> 4) + 4;\n let decoded = 0;\n if (position !== 4) {\n decoded = ((1 << position) | ((number & 0x0F) << (position - 4))\n | (1 << (position - 5)));\n }\n else {\n decoded = (number << 1) | 1;\n }\n return (sign === 0) ? decoded : -decoded;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { InputDisposedError } from './input.js';\nimport { assert, clamp, getUint24, toDataView } from './misc.js';\nexport class Reader {\n constructor(source) {\n this.source = source;\n }\n requestSlice(start, length) {\n if (this.source._disposed) {\n throw new InputDisposedError();\n }\n if (start < 0) {\n return null;\n }\n if (this.fileSize !== null && start + length > this.fileSize) {\n return null;\n }\n const end = start + length;\n const result = this.source._read(start, end);\n if (result instanceof Promise) {\n return result.then((x) => {\n if (!x) {\n return null;\n }\n return new FileSlice(x.bytes, x.view, x.offset, start, end);\n });\n }\n else {\n if (!result) {\n return null;\n }\n return new FileSlice(result.bytes, result.view, result.offset, start, end);\n }\n }\n requestSliceRange(start, minLength, maxLength) {\n if (this.source._disposed) {\n throw new InputDisposedError();\n }\n if (start < 0) {\n return null;\n }\n if (this.fileSize !== null) {\n return this.requestSlice(start, clamp(this.fileSize - start, minLength, maxLength));\n }\n else {\n const promisedAttempt = this.requestSlice(start, maxLength);\n const handleAttempt = (attempt) => {\n if (attempt) {\n return attempt;\n }\n const handleFileSize = (fileSize) => {\n assert(fileSize !== null); // The slice couldn't fit, meaning we must know the file size now\n return this.requestSlice(start, clamp(fileSize - start, minLength, maxLength));\n };\n const promisedFileSize = this.source._retrieveSize();\n if (promisedFileSize instanceof Promise) {\n return promisedFileSize.then(handleFileSize);\n }\n else {\n return handleFileSize(promisedFileSize);\n }\n };\n if (promisedAttempt instanceof Promise) {\n return promisedAttempt.then(handleAttempt);\n }\n else {\n return handleAttempt(promisedAttempt);\n }\n }\n }\n}\nexport class FileSlice {\n constructor(\n /** The underlying bytes backing this slice. Avoid using this directly and prefer reader functions instead. */\n bytes, \n /** A view into the bytes backing this slice. Avoid using this directly and prefer reader functions instead. */\n view, \n /** The offset in \"file bytes\" at which `bytes` begins in the file. */\n offset, \n /** The offset in \"file bytes\" where this slice begins. */\n start, \n /** The offset in \"file bytes\" where this slice ends (exclusive). */\n end) {\n this.bytes = bytes;\n this.view = view;\n this.offset = offset;\n this.start = start;\n this.end = end;\n this.bufferPos = start - offset;\n }\n static tempFromBytes(bytes) {\n return new FileSlice(bytes, toDataView(bytes), 0, 0, bytes.length);\n }\n get length() {\n return this.end - this.start;\n }\n get filePos() {\n return this.offset + this.bufferPos;\n }\n set filePos(value) {\n this.bufferPos = value - this.offset;\n }\n /** The number of bytes left from the current pos to the end of the slice. */\n get remainingLength() {\n return Math.max(this.end - this.filePos, 0);\n }\n skip(byteCount) {\n this.bufferPos += byteCount;\n }\n /** Creates a new subslice of this slice whose byte range must be contained within this slice. */\n slice(filePos, length = this.end - filePos) {\n if (filePos < this.start || filePos + length > this.end) {\n throw new RangeError('Slicing outside of original slice.');\n }\n return new FileSlice(this.bytes, this.view, this.offset, filePos, filePos + length);\n }\n}\nconst checkIsInRange = (slice, bytesToRead) => {\n if (slice.filePos < slice.start || slice.filePos + bytesToRead > slice.end) {\n throw new RangeError(`Tried reading [${slice.filePos}, ${slice.filePos + bytesToRead}), but slice is`\n + ` [${slice.start}, ${slice.end}). This is likely an internal error, please report it alongside the file`\n + ` that caused it.`);\n }\n};\nexport const readBytes = (slice, length) => {\n checkIsInRange(slice, length);\n const bytes = slice.bytes.subarray(slice.bufferPos, slice.bufferPos + length);\n slice.bufferPos += length;\n return bytes;\n};\nexport const readU8 = (slice) => {\n checkIsInRange(slice, 1);\n return slice.view.getUint8(slice.bufferPos++);\n};\nexport const readU16 = (slice, littleEndian) => {\n checkIsInRange(slice, 2);\n const value = slice.view.getUint16(slice.bufferPos, littleEndian);\n slice.bufferPos += 2;\n return value;\n};\nexport const readU16Be = (slice) => {\n checkIsInRange(slice, 2);\n const value = slice.view.getUint16(slice.bufferPos, false);\n slice.bufferPos += 2;\n return value;\n};\nexport const readU24Be = (slice) => {\n checkIsInRange(slice, 3);\n const value = getUint24(slice.view, slice.bufferPos, false);\n slice.bufferPos += 3;\n return value;\n};\nexport const readI16Be = (slice) => {\n checkIsInRange(slice, 2);\n const value = slice.view.getInt16(slice.bufferPos, false);\n slice.bufferPos += 2;\n return value;\n};\nexport const readU32 = (slice, littleEndian) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getUint32(slice.bufferPos, littleEndian);\n slice.bufferPos += 4;\n return value;\n};\nexport const readU32Be = (slice) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getUint32(slice.bufferPos, false);\n slice.bufferPos += 4;\n return value;\n};\nexport const readU32Le = (slice) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getUint32(slice.bufferPos, true);\n slice.bufferPos += 4;\n return value;\n};\nexport const readI32Be = (slice) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getInt32(slice.bufferPos, false);\n slice.bufferPos += 4;\n return value;\n};\nexport const readI32Le = (slice) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getInt32(slice.bufferPos, true);\n slice.bufferPos += 4;\n return value;\n};\nexport const readU64 = (slice, littleEndian) => {\n let low;\n let high;\n if (littleEndian) {\n low = readU32(slice, true);\n high = readU32(slice, true);\n }\n else {\n high = readU32(slice, false);\n low = readU32(slice, false);\n }\n return high * 0x100000000 + low;\n};\nexport const readU64Be = (slice) => {\n const high = readU32Be(slice);\n const low = readU32Be(slice);\n return high * 0x100000000 + low;\n};\nexport const readI64Be = (slice) => {\n const high = readI32Be(slice);\n const low = readU32Be(slice);\n return high * 0x100000000 + low;\n};\nexport const readI64Le = (slice) => {\n const low = readU32Le(slice);\n const high = readI32Le(slice);\n return high * 0x100000000 + low;\n};\nexport const readF32Be = (slice) => {\n checkIsInRange(slice, 4);\n const value = slice.view.getFloat32(slice.bufferPos, false);\n slice.bufferPos += 4;\n return value;\n};\nexport const readF64Be = (slice) => {\n checkIsInRange(slice, 8);\n const value = slice.view.getFloat64(slice.bufferPos, false);\n slice.bufferPos += 8;\n return value;\n};\nexport const readAscii = (slice, length) => {\n checkIsInRange(slice, length);\n let str = '';\n for (let i = 0; i < length; i++) {\n str += String.fromCharCode(slice.bytes[slice.bufferPos++]);\n }\n return str;\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { assert, clamp, COLOR_PRIMARIES_MAP, isAllowSharedBufferSource, MATRIX_COEFFICIENTS_MAP, SECOND_TO_MICROSECOND_FACTOR, toDataView, toUint8Array, TRANSFER_CHARACTERISTICS_MAP, isFirefox, polyfillSymbolDispose, assertNever, isWebKit, simplifyRational, validateRectangle, } from './misc.js';\npolyfillSymbolDispose();\n// Let's manually handle logging the garbage collection errors that are typically logged by the browser. This way, they\n// also kick for audio samples (which is normally not the case), making sure any incorrect code is quickly caught.\nlet lastVideoGcErrorLog = -Infinity;\nlet lastAudioGcErrorLog = -Infinity;\nlet finalizationRegistry = null;\nif (typeof FinalizationRegistry !== 'undefined') {\n finalizationRegistry = new FinalizationRegistry((value) => {\n const now = Date.now();\n if (value.type === 'video') {\n if (now - lastVideoGcErrorLog >= 1000) {\n // This error is annoying but oh so important\n console.error(`A VideoSample was garbage collected without first being closed. For proper resource management,`\n + ` make sure to call close() on all your VideoSamples as soon as you're done using them.`);\n lastVideoGcErrorLog = now;\n }\n if (typeof VideoFrame !== 'undefined' && value.data instanceof VideoFrame) {\n value.data.close(); // Prevent the browser error since we're logging our own\n }\n }\n else {\n if (now - lastAudioGcErrorLog >= 1000) {\n console.error(`An AudioSample was garbage collected without first being closed. For proper resource management,`\n + ` make sure to call close() on all your AudioSamples as soon as you're done using them.`);\n lastAudioGcErrorLog = now;\n }\n if (typeof AudioData !== 'undefined' && value.data instanceof AudioData) {\n value.data.close();\n }\n }\n });\n}\n/**\n * The list of {@link VideoSample} pixel formats.\n * @group Samples\n * @public\n */\nexport const VIDEO_SAMPLE_PIXEL_FORMATS = [\n // 4:2:0 Y, U, V\n 'I420',\n 'I420P10',\n 'I420P12',\n // 4:2:0 Y, U, V, A\n 'I420A',\n 'I420AP10',\n 'I420AP12',\n // 4:2:2 Y, U, V\n 'I422',\n 'I422P10',\n 'I422P12',\n // 4:2:2 Y, U, V, A\n 'I422A',\n 'I422AP10',\n 'I422AP12',\n // 4:4:4 Y, U, V\n 'I444',\n 'I444P10',\n 'I444P12',\n // 4:4:4 Y, U, V, A\n 'I444A',\n 'I444AP10',\n 'I444AP12',\n // 4:2:0 Y, UV\n 'NV12',\n // 4:4:4 RGBA\n 'RGBA',\n // 4:4:4 RGBX (opaque)\n 'RGBX',\n // 4:4:4 BGRA\n 'BGRA',\n // 4:4:4 BGRX (opaque)\n 'BGRX',\n];\nconst VIDEO_SAMPLE_PIXEL_FORMATS_SET = new Set(VIDEO_SAMPLE_PIXEL_FORMATS);\n/**\n * Represents a raw, unencoded video sample (frame). Mainly used as an expressive wrapper around WebCodecs API's\n * [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame), but can also be used standalone.\n * @group Samples\n * @public\n */\nexport class VideoSample {\n /** The width of the frame in pixels. */\n get codedWidth() {\n // This is wrong, but the fix is a v2 thing\n return this.visibleRect.width;\n }\n /** The height of the frame in pixels. */\n get codedHeight() {\n // Same here\n return this.visibleRect.height;\n }\n /** The display width of the frame in pixels, after aspect ratio adjustment and rotation. */\n get displayWidth() {\n return this.rotation % 180 === 0 ? this.squarePixelWidth : this.squarePixelHeight;\n }\n /** The display height of the frame in pixels, after aspect ratio adjustment and rotation. */\n get displayHeight() {\n return this.rotation % 180 === 0 ? this.squarePixelHeight : this.squarePixelWidth;\n }\n /** The presentation timestamp of the frame in microseconds. */\n get microsecondTimestamp() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);\n }\n /** The duration of the frame in microseconds. */\n get microsecondDuration() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);\n }\n /**\n * Whether this sample uses a pixel format that can hold transparency data. Note that this doesn't necessarily mean\n * that the sample is transparent.\n */\n get hasAlpha() {\n return this.format && this.format.includes('A');\n }\n constructor(data, init) {\n /** @internal */\n this._closed = false;\n if (data instanceof ArrayBuffer\n || (typeof SharedArrayBuffer !== 'undefined' && data instanceof SharedArrayBuffer)\n || ArrayBuffer.isView(data)) {\n if (!init || typeof init !== 'object') {\n throw new TypeError('init must be an object.');\n }\n if (init.format === undefined || !VIDEO_SAMPLE_PIXEL_FORMATS_SET.has(init.format)) {\n throw new TypeError('init.format must be one of: ' + VIDEO_SAMPLE_PIXEL_FORMATS.join(', '));\n }\n if (!Number.isInteger(init.codedWidth) || init.codedWidth <= 0) {\n throw new TypeError('init.codedWidth must be a positive integer.');\n }\n if (!Number.isInteger(init.codedHeight) || init.codedHeight <= 0) {\n throw new TypeError('init.codedHeight must be a positive integer.');\n }\n if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {\n throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');\n }\n if (!Number.isFinite(init.timestamp)) {\n throw new TypeError('init.timestamp must be a number.');\n }\n if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {\n throw new TypeError('init.duration, when provided, must be a non-negative number.');\n }\n if (init.layout !== undefined) {\n if (!Array.isArray(init.layout)) {\n throw new TypeError('init.layout, when provided, must be an array.');\n }\n for (const plane of init.layout) {\n if (!plane || typeof plane !== 'object' || Array.isArray(plane)) {\n throw new TypeError('Each entry in init.layout must be an object.');\n }\n if (!Number.isInteger(plane.offset) || plane.offset < 0) {\n throw new TypeError('plane.offset must be a non-negative integer.');\n }\n if (!Number.isInteger(plane.stride) || plane.stride < 0) {\n throw new TypeError('plane.stride must be a non-negative integer.');\n }\n }\n }\n if (init.visibleRect !== undefined) {\n validateRectangle(init.visibleRect, 'init.visibleRect');\n }\n if (init.displayWidth !== undefined\n && (!Number.isInteger(init.displayWidth) || init.displayWidth <= 0)) {\n throw new TypeError('init.displayWidth, when provided, must be a positive integer.');\n }\n if (init.displayHeight !== undefined\n && (!Number.isInteger(init.displayHeight) || init.displayHeight <= 0)) {\n throw new TypeError('init.displayHeight, when provided, must be a positive integer.');\n }\n if ((init.displayWidth !== undefined) !== (init.displayHeight !== undefined)) {\n throw new TypeError('init.displayWidth and init.displayHeight must be either both provided or both omitted.');\n }\n this._data = toUint8Array(data).slice(); // Copy it\n this._layout = init.layout ?? createDefaultPlaneLayout(init.format, init.codedWidth, init.codedHeight);\n this.format = init.format;\n this.rotation = init.rotation ?? 0;\n this.timestamp = init.timestamp;\n this.duration = init.duration ?? 0;\n this.colorSpace = new VideoSampleColorSpace(init.colorSpace);\n this.visibleRect = {\n left: init.visibleRect?.left ?? 0,\n top: init.visibleRect?.top ?? 0,\n width: init.visibleRect?.width ?? init.codedWidth,\n height: init.visibleRect?.height ?? init.codedHeight,\n };\n if (init.displayWidth !== undefined) {\n this.squarePixelWidth = this.rotation % 180 === 0 ? init.displayWidth : init.displayHeight;\n this.squarePixelHeight = this.rotation % 180 === 0 ? init.displayHeight : init.displayWidth;\n }\n else {\n this.squarePixelWidth = this.codedWidth;\n this.squarePixelHeight = this.codedHeight;\n }\n }\n else if (typeof VideoFrame !== 'undefined' && data instanceof VideoFrame) {\n if (init?.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {\n throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');\n }\n if (init?.timestamp !== undefined && !Number.isFinite(init?.timestamp)) {\n throw new TypeError('init.timestamp, when provided, must be a number.');\n }\n if (init?.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {\n throw new TypeError('init.duration, when provided, must be a non-negative number.');\n }\n if (init?.visibleRect !== undefined) {\n validateRectangle(init.visibleRect, 'init.visibleRect');\n }\n this._data = data;\n this._layout = null;\n this.format = data.format;\n this.visibleRect = {\n left: data.visibleRect?.x ?? 0,\n top: data.visibleRect?.y ?? 0,\n width: data.visibleRect?.width ?? data.codedWidth,\n height: data.visibleRect?.height ?? data.codedHeight,\n };\n // The VideoFrame's rotation is ignored here. It's still a new field, and I'm not sure of any application\n // where the browser makes use of it. If a case gets found, I'll add it.\n this.rotation = init?.rotation ?? 0;\n // Assuming no innate VideoFrame rotation here\n this.squarePixelWidth = data.displayWidth;\n this.squarePixelHeight = data.displayHeight;\n this.timestamp = init?.timestamp ?? data.timestamp / 1e6;\n this.duration = init?.duration ?? (data.duration ?? 0) / 1e6;\n this.colorSpace = new VideoSampleColorSpace(data.colorSpace);\n }\n else if ((typeof HTMLImageElement !== 'undefined' && data instanceof HTMLImageElement)\n || (typeof SVGImageElement !== 'undefined' && data instanceof SVGImageElement)\n || (typeof ImageBitmap !== 'undefined' && data instanceof ImageBitmap)\n || (typeof HTMLVideoElement !== 'undefined' && data instanceof HTMLVideoElement)\n || (typeof HTMLCanvasElement !== 'undefined' && data instanceof HTMLCanvasElement)\n || (typeof OffscreenCanvas !== 'undefined' && data instanceof OffscreenCanvas)) {\n if (!init || typeof init !== 'object') {\n throw new TypeError('init must be an object.');\n }\n if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {\n throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');\n }\n if (!Number.isFinite(init.timestamp)) {\n throw new TypeError('init.timestamp must be a number.');\n }\n if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {\n throw new TypeError('init.duration, when provided, must be a non-negative number.');\n }\n if (typeof VideoFrame !== 'undefined') {\n return new VideoSample(new VideoFrame(data, {\n timestamp: Math.trunc(init.timestamp * SECOND_TO_MICROSECOND_FACTOR),\n // Drag 0 to undefined\n duration: Math.trunc((init.duration ?? 0) * SECOND_TO_MICROSECOND_FACTOR) || undefined,\n }), init);\n }\n let width = 0;\n let height = 0;\n // Determine the dimensions of the thing\n if ('naturalWidth' in data) {\n width = data.naturalWidth;\n height = data.naturalHeight;\n }\n else if ('videoWidth' in data) {\n width = data.videoWidth;\n height = data.videoHeight;\n }\n else if ('width' in data) {\n width = Number(data.width);\n height = Number(data.height);\n }\n if (!width || !height) {\n throw new TypeError('Could not determine dimensions.');\n }\n const canvas = new OffscreenCanvas(width, height);\n const context = canvas.getContext('2d', {\n alpha: isFirefox(), // Firefox has VideoFrame glitches with opaque canvases\n willReadFrequently: true,\n });\n assert(context);\n // Draw it to a canvas\n context.drawImage(data, 0, 0);\n this._data = canvas;\n this._layout = null;\n this.format = 'RGBX';\n this.visibleRect = { left: 0, top: 0, width, height };\n this.squarePixelWidth = width;\n this.squarePixelHeight = height;\n this.rotation = init.rotation ?? 0;\n this.timestamp = init.timestamp;\n this.duration = init.duration ?? 0;\n this.colorSpace = new VideoSampleColorSpace({\n matrix: 'rgb',\n primaries: 'bt709',\n transfer: 'iec61966-2-1',\n fullRange: true,\n });\n }\n else {\n throw new TypeError('Invalid data type: Must be a BufferSource or CanvasImageSource.');\n }\n this.pixelAspectRatio = simplifyRational({\n num: this.squarePixelWidth * this.codedHeight,\n den: this.squarePixelHeight * this.codedWidth,\n });\n finalizationRegistry?.register(this, { type: 'video', data: this._data }, this);\n }\n /** Clones this video sample. */\n clone() {\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n assert(this._data !== null);\n if (isVideoFrame(this._data)) {\n return new VideoSample(this._data.clone(), {\n timestamp: this.timestamp,\n duration: this.duration,\n rotation: this.rotation,\n });\n }\n else if (this._data instanceof Uint8Array) {\n assert(this._layout);\n return new VideoSample(this._data, {\n format: this.format,\n layout: this._layout,\n codedWidth: this.codedWidth,\n codedHeight: this.codedHeight,\n timestamp: this.timestamp,\n duration: this.duration,\n colorSpace: this.colorSpace,\n rotation: this.rotation,\n visibleRect: this.visibleRect,\n displayWidth: this.displayWidth,\n displayHeight: this.displayHeight,\n });\n }\n else {\n return new VideoSample(this._data, {\n format: this.format,\n codedWidth: this.codedWidth,\n codedHeight: this.codedHeight,\n timestamp: this.timestamp,\n duration: this.duration,\n colorSpace: this.colorSpace,\n rotation: this.rotation,\n visibleRect: this.visibleRect,\n displayWidth: this.displayWidth,\n displayHeight: this.displayHeight,\n });\n }\n }\n /**\n * Closes this video sample, releasing held resources. Video samples should be closed as soon as they are not\n * needed anymore.\n */\n close() {\n if (this._closed) {\n return;\n }\n finalizationRegistry?.unregister(this);\n if (isVideoFrame(this._data)) {\n this._data.close();\n }\n else {\n this._data = null; // GC that shit\n }\n this._closed = true;\n }\n /**\n * Returns the number of bytes required to hold this video sample's pixel data. Throws if `format` is `null`.\n */\n allocationSize(options = {}) {\n validateVideoFrameCopyToOptions(options);\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n if (this.format === null) {\n // https://github.com/Vanilagy/mediabunny/issues/267\n // https://github.com/w3c/webcodecs/issues/920\n throw new Error('Cannot get allocation size when format is null. Sorry!');\n }\n assert(this._data !== null);\n if (!isVideoFrame(this._data)) {\n if (options.colorSpace\n || (options.format && options.format !== this.format)\n || options.layout\n || options.rect) {\n // Temporarily convert to VideoFrame to get it done\n // TODO: Compute this directly without needing to go through VideoFrame\n const videoFrame = this.toVideoFrame();\n const size = videoFrame.allocationSize(options);\n videoFrame.close();\n return size;\n }\n }\n if (isVideoFrame(this._data)) {\n return this._data.allocationSize(options);\n }\n else if (this._data instanceof Uint8Array) {\n return this._data.byteLength;\n }\n else {\n return this.codedWidth * this.codedHeight * 4; // RGBX\n }\n }\n /**\n * Copies this video sample's pixel data to an ArrayBuffer or ArrayBufferView. Throws if `format` is `null`.\n * @returns The byte layout of the planes of the copied data.\n */\n async copyTo(destination, options = {}) {\n if (!isAllowSharedBufferSource(destination)) {\n throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');\n }\n validateVideoFrameCopyToOptions(options);\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n if (this.format === null) {\n throw new Error('Cannot copy video sample data when format is null. Sorry!');\n }\n assert(this._data !== null);\n if (!isVideoFrame(this._data)) {\n if (options.colorSpace\n || (options.format && options.format !== this.format)\n || options.layout\n || options.rect) {\n // Temporarily convert to VideoFrame to get it done\n // TODO: Do this directly without needing to go through VideoFrame\n const videoFrame = this.toVideoFrame();\n const layout = await videoFrame.copyTo(destination, options);\n videoFrame.close();\n return layout;\n }\n }\n if (isVideoFrame(this._data)) {\n return this._data.copyTo(destination, options);\n }\n else if (this._data instanceof Uint8Array) {\n assert(this._layout);\n const dest = toUint8Array(destination);\n dest.set(this._data);\n return this._layout;\n }\n else {\n const canvas = this._data;\n const context = canvas.getContext('2d');\n assert(context);\n const imageData = context.getImageData(0, 0, this.codedWidth, this.codedHeight);\n const dest = toUint8Array(destination);\n dest.set(imageData.data);\n return [{\n offset: 0,\n stride: 4 * this.codedWidth,\n }];\n }\n }\n /**\n * Converts this video sample to a VideoFrame for use with the WebCodecs API. The VideoFrame returned by this\n * method *must* be closed separately from this video sample.\n */\n toVideoFrame() {\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n assert(this._data !== null);\n if (isVideoFrame(this._data)) {\n return new VideoFrame(this._data, {\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration || undefined, // Drag 0 duration to undefined, glitches some codecs\n });\n }\n else if (this._data instanceof Uint8Array) {\n return new VideoFrame(this._data, {\n format: this.format,\n codedWidth: this.codedWidth,\n codedHeight: this.codedHeight,\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration || undefined,\n colorSpace: this.colorSpace,\n });\n }\n else {\n return new VideoFrame(this._data, {\n timestamp: this.microsecondTimestamp,\n duration: this.microsecondDuration || undefined,\n });\n }\n }\n draw(context, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) {\n let sx = 0;\n let sy = 0;\n let sWidth = this.displayWidth;\n let sHeight = this.displayHeight;\n let dx = 0;\n let dy = 0;\n let dWidth = this.displayWidth;\n let dHeight = this.displayHeight;\n if (arg5 !== undefined) {\n sx = arg1;\n sy = arg2;\n sWidth = arg3;\n sHeight = arg4;\n dx = arg5;\n dy = arg6;\n if (arg7 !== undefined) {\n dWidth = arg7;\n dHeight = arg8;\n }\n else {\n dWidth = sWidth;\n dHeight = sHeight;\n }\n }\n else {\n dx = arg1;\n dy = arg2;\n if (arg3 !== undefined) {\n dWidth = arg3;\n dHeight = arg4;\n }\n }\n if (!((typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)\n || (typeof OffscreenCanvasRenderingContext2D !== 'undefined'\n && context instanceof OffscreenCanvasRenderingContext2D))) {\n throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');\n }\n if (!Number.isFinite(sx)) {\n throw new TypeError('sx must be a number.');\n }\n if (!Number.isFinite(sy)) {\n throw new TypeError('sy must be a number.');\n }\n if (!Number.isFinite(sWidth) || sWidth < 0) {\n throw new TypeError('sWidth must be a non-negative number.');\n }\n if (!Number.isFinite(sHeight) || sHeight < 0) {\n throw new TypeError('sHeight must be a non-negative number.');\n }\n if (!Number.isFinite(dx)) {\n throw new TypeError('dx must be a number.');\n }\n if (!Number.isFinite(dy)) {\n throw new TypeError('dy must be a number.');\n }\n if (!Number.isFinite(dWidth) || dWidth < 0) {\n throw new TypeError('dWidth must be a non-negative number.');\n }\n if (!Number.isFinite(dHeight) || dHeight < 0) {\n throw new TypeError('dHeight must be a non-negative number.');\n }\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n ({ sx, sy, sWidth, sHeight } = this._rotateSourceRegion(sx, sy, sWidth, sHeight, this.rotation));\n const source = this.toCanvasImageSource();\n context.save();\n const centerX = dx + dWidth / 2;\n const centerY = dy + dHeight / 2;\n context.translate(centerX, centerY);\n context.rotate(this.rotation * Math.PI / 180);\n const aspectRatioChange = this.rotation % 180 === 0 ? 1 : dWidth / dHeight;\n // Scale to compensate for aspect ratio changes when rotated\n context.scale(1 / aspectRatioChange, aspectRatioChange);\n context.drawImage(source, sx, sy, sWidth, sHeight, -dWidth / 2, -dHeight / 2, dWidth, dHeight);\n context.restore();\n }\n /**\n * Draws the sample in the middle of the canvas corresponding to the context with the specified fit behavior.\n */\n drawWithFit(context, options) {\n if (!((typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)\n || (typeof OffscreenCanvasRenderingContext2D !== 'undefined'\n && context instanceof OffscreenCanvasRenderingContext2D))) {\n throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (!['fill', 'contain', 'cover'].includes(options.fit)) {\n throw new TypeError('options.fit must be \\'fill\\', \\'contain\\', or \\'cover\\'.');\n }\n if (options.rotation !== undefined && ![0, 90, 180, 270].includes(options.rotation)) {\n throw new TypeError('options.rotation, when provided, must be 0, 90, 180, or 270.');\n }\n if (options.crop !== undefined) {\n validateCropRectangle(options.crop, 'options.');\n }\n const canvasWidth = context.canvas.width;\n const canvasHeight = context.canvas.height;\n const rotation = options.rotation ?? this.rotation;\n const [rotatedWidth, rotatedHeight] = rotation % 180 === 0\n ? [this.squarePixelWidth, this.squarePixelHeight]\n : [this.squarePixelHeight, this.squarePixelWidth];\n if (options.crop) {\n clampCropRectangle(options.crop, rotatedWidth, rotatedHeight);\n }\n // These variables specify where the final sample will be drawn on the canvas\n let dx;\n let dy;\n let newWidth;\n let newHeight;\n const { sx, sy, sWidth, sHeight } = this._rotateSourceRegion(options.crop?.left ?? 0, options.crop?.top ?? 0, options.crop?.width ?? rotatedWidth, options.crop?.height ?? rotatedHeight, rotation);\n if (options.fit === 'fill') {\n dx = 0;\n dy = 0;\n newWidth = canvasWidth;\n newHeight = canvasHeight;\n }\n else {\n const [sampleWidth, sampleHeight] = options.crop\n ? [options.crop.width, options.crop.height]\n : [rotatedWidth, rotatedHeight];\n const scale = options.fit === 'contain'\n ? Math.min(canvasWidth / sampleWidth, canvasHeight / sampleHeight)\n : Math.max(canvasWidth / sampleWidth, canvasHeight / sampleHeight);\n newWidth = sampleWidth * scale;\n newHeight = sampleHeight * scale;\n dx = (canvasWidth - newWidth) / 2;\n dy = (canvasHeight - newHeight) / 2;\n }\n context.save();\n const aspectRatioChange = rotation % 180 === 0 ? 1 : newWidth / newHeight;\n context.translate(canvasWidth / 2, canvasHeight / 2);\n context.rotate(rotation * Math.PI / 180);\n // This aspect ratio compensation is done so that we can draw the sample with the intended dimensions and\n // don't need to think about how those dimensions change after the rotation\n context.scale(1 / aspectRatioChange, aspectRatioChange);\n context.translate(-canvasWidth / 2, -canvasHeight / 2);\n // Important that we don't use .draw() here since that would take rotation into account, but we wanna handle it\n // ourselves here\n context.drawImage(this.toCanvasImageSource(), sx, sy, sWidth, sHeight, dx, dy, newWidth, newHeight);\n context.restore();\n }\n /** @internal */\n _rotateSourceRegion(sx, sy, sWidth, sHeight, rotation) {\n // The provided sx,sy,sWidth,sHeight refer to the final rotated image, but that's not actually how the image is\n // stored. Therefore, we must map these back onto the original, pre-rotation image.\n if (rotation === 90) {\n [sx, sy, sWidth, sHeight] = [\n sy,\n this.squarePixelHeight - sx - sWidth,\n sHeight,\n sWidth,\n ];\n }\n else if (rotation === 180) {\n [sx, sy] = [\n this.squarePixelWidth - sx - sWidth,\n this.squarePixelHeight - sy - sHeight,\n ];\n }\n else if (rotation === 270) {\n [sx, sy, sWidth, sHeight] = [\n this.squarePixelWidth - sy - sHeight,\n sx,\n sHeight,\n sWidth,\n ];\n }\n return { sx, sy, sWidth, sHeight };\n }\n /**\n * Converts this video sample to a\n * [`CanvasImageSource`](https://udn.realityripple.com/docs/Web/API/CanvasImageSource) for drawing to a canvas.\n *\n * You must use the value returned by this method immediately, as any VideoFrame created internally will\n * automatically be closed in the next microtask.\n */\n toCanvasImageSource() {\n if (this._closed) {\n throw new Error('VideoSample is closed.');\n }\n assert(this._data !== null);\n if (this._data instanceof Uint8Array) {\n // Requires VideoFrame to be defined\n const videoFrame = this.toVideoFrame();\n queueMicrotask(() => videoFrame.close()); // Let's automatically close the frame in the next microtask\n return videoFrame;\n }\n else {\n return this._data;\n }\n }\n /** Sets the rotation metadata of this video sample. */\n setRotation(newRotation) {\n if (![0, 90, 180, 270].includes(newRotation)) {\n throw new TypeError('newRotation must be 0, 90, 180, or 270.');\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion\n this.rotation = newRotation;\n }\n /** Sets the presentation timestamp of this video sample, in seconds. */\n setTimestamp(newTimestamp) {\n if (!Number.isFinite(newTimestamp)) {\n throw new TypeError('newTimestamp must be a number.');\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion\n this.timestamp = newTimestamp;\n }\n /** Sets the duration of this video sample, in seconds. */\n setDuration(newDuration) {\n if (!Number.isFinite(newDuration) || newDuration < 0) {\n throw new TypeError('newDuration must be a non-negative number.');\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion\n this.duration = newDuration;\n }\n /** Calls `.close()`. */\n [Symbol.dispose]() {\n this.close();\n }\n}\n/**\n * Describes the color space of a {@link VideoSample}. Corresponds to the WebCodecs API's VideoColorSpace.\n * @group Samples\n * @public\n */\nexport class VideoSampleColorSpace {\n /** Creates a new VideoSampleColorSpace. */\n constructor(init) {\n if (init !== undefined) {\n if (!init || typeof init !== 'object') {\n throw new TypeError('init.colorSpace, when provided, must be an object.');\n }\n const primariesValues = Object.keys(COLOR_PRIMARIES_MAP);\n if (init.primaries != null && !primariesValues.includes(init.primaries)) {\n throw new TypeError(`init.colorSpace.primaries, when provided, must be one of ${primariesValues.join(', ')}.`);\n }\n const transferValues = Object.keys(TRANSFER_CHARACTERISTICS_MAP);\n if (init.transfer != null && !transferValues.includes(init.transfer)) {\n throw new TypeError(`init.colorSpace.transfer, when provided, must be one of ${transferValues.join(', ')}.`);\n }\n const matrixValues = Object.keys(MATRIX_COEFFICIENTS_MAP);\n if (init.matrix != null && !matrixValues.includes(init.matrix)) {\n throw new TypeError(`init.colorSpace.matrix, when provided, must be one of ${matrixValues.join(', ')}.`);\n }\n if (init.fullRange != null && typeof init.fullRange !== 'boolean') {\n throw new TypeError('init.colorSpace.fullRange, when provided, must be a boolean.');\n }\n }\n this.primaries = init?.primaries ?? null;\n this.transfer = init?.transfer ?? null;\n this.matrix = init?.matrix ?? null;\n this.fullRange = init?.fullRange ?? null;\n }\n /** Serializes the color space to a JSON object. */\n toJSON() {\n return {\n primaries: this.primaries,\n transfer: this.transfer,\n matrix: this.matrix,\n fullRange: this.fullRange,\n };\n }\n}\nconst isVideoFrame = (x) => {\n return typeof VideoFrame !== 'undefined' && x instanceof VideoFrame;\n};\nexport const clampCropRectangle = (crop, outerWidth, outerHeight) => {\n crop.left = Math.min(crop.left, outerWidth);\n crop.top = Math.min(crop.top, outerHeight);\n crop.width = Math.min(crop.width, outerWidth - crop.left);\n crop.height = Math.min(crop.height, outerHeight - crop.top);\n assert(crop.width >= 0);\n assert(crop.height >= 0);\n};\nexport const validateCropRectangle = (crop, prefix) => {\n if (!crop || typeof crop !== 'object') {\n throw new TypeError(prefix + 'crop, when provided, must be an object.');\n }\n if (!Number.isInteger(crop.left) || crop.left < 0) {\n throw new TypeError(prefix + 'crop.left must be a non-negative integer.');\n }\n if (!Number.isInteger(crop.top) || crop.top < 0) {\n throw new TypeError(prefix + 'crop.top must be a non-negative integer.');\n }\n if (!Number.isInteger(crop.width) || crop.width < 0) {\n throw new TypeError(prefix + 'crop.width must be a non-negative integer.');\n }\n if (!Number.isInteger(crop.height) || crop.height < 0) {\n throw new TypeError(prefix + 'crop.height must be a non-negative integer.');\n }\n};\nconst validateVideoFrameCopyToOptions = (options) => {\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.colorSpace !== undefined && !['display-p3', 'srgb'].includes(options.colorSpace)) {\n throw new TypeError('options.colorSpace, when provided, must be \\'display-p3\\' or \\'srgb\\'.');\n }\n if (options.format !== undefined && typeof options.format !== 'string') {\n throw new TypeError('options.format, when provided, must be a string.');\n }\n if (options.layout !== undefined) {\n if (!Array.isArray(options.layout)) {\n throw new TypeError('options.layout, when provided, must be an array.');\n }\n for (const plane of options.layout) {\n if (!plane || typeof plane !== 'object') {\n throw new TypeError('Each entry in options.layout must be an object.');\n }\n if (!Number.isInteger(plane.offset) || plane.offset < 0) {\n throw new TypeError('plane.offset must be a non-negative integer.');\n }\n if (!Number.isInteger(plane.stride) || plane.stride < 0) {\n throw new TypeError('plane.stride must be a non-negative integer.');\n }\n }\n }\n if (options.rect !== undefined) {\n if (!options.rect || typeof options.rect !== 'object') {\n throw new TypeError('options.rect, when provided, must be an object.');\n }\n if (options.rect.x !== undefined && (!Number.isInteger(options.rect.x) || options.rect.x < 0)) {\n throw new TypeError('options.rect.x, when provided, must be a non-negative integer.');\n }\n if (options.rect.y !== undefined && (!Number.isInteger(options.rect.y) || options.rect.y < 0)) {\n throw new TypeError('options.rect.y, when provided, must be a non-negative integer.');\n }\n if (options.rect.width !== undefined && (!Number.isInteger(options.rect.width) || options.rect.width < 0)) {\n throw new TypeError('options.rect.width, when provided, must be a non-negative integer.');\n }\n if (options.rect.height !== undefined && (!Number.isInteger(options.rect.height) || options.rect.height < 0)) {\n throw new TypeError('options.rect.height, when provided, must be a non-negative integer.');\n }\n }\n};\n/** Implements logic from WebCodecs § 9.4.6 \"Compute Layout and Allocation Size\" */\nconst createDefaultPlaneLayout = (format, codedWidth, codedHeight) => {\n const planes = getPlaneConfigs(format);\n const layouts = [];\n let currentOffset = 0;\n for (const plane of planes) {\n // Per § 9.8, dimensions are usually \"rounded up to the nearest integer\".\n const planeWidth = Math.ceil(codedWidth / plane.widthDivisor);\n const planeHeight = Math.ceil(codedHeight / plane.heightDivisor);\n const stride = planeWidth * plane.sampleBytes;\n // Tight packing\n const planeSize = stride * planeHeight;\n layouts.push({\n offset: currentOffset,\n stride: stride,\n });\n currentOffset += planeSize;\n }\n return layouts;\n};\n/** Helper to retrieve plane configurations based on WebCodecs § 9.8 Pixel Format definitions. */\nconst getPlaneConfigs = (format) => {\n // Helper for standard YUV planes\n const yuv = (yBytes, uvBytes, subX, subY, hasAlpha) => {\n const configs = [\n { sampleBytes: yBytes, widthDivisor: 1, heightDivisor: 1 },\n { sampleBytes: uvBytes, widthDivisor: subX, heightDivisor: subY },\n { sampleBytes: uvBytes, widthDivisor: subX, heightDivisor: subY },\n ];\n if (hasAlpha) {\n // Match luma dimensions\n configs.push({ sampleBytes: yBytes, widthDivisor: 1, heightDivisor: 1 });\n }\n return configs;\n };\n switch (format) {\n case 'I420':\n return yuv(1, 1, 2, 2, false);\n case 'I420P10':\n case 'I420P12':\n return yuv(2, 2, 2, 2, false);\n case 'I420A':\n return yuv(1, 1, 2, 2, true);\n case 'I420AP10':\n case 'I420AP12':\n return yuv(2, 2, 2, 2, true);\n case 'I422':\n return yuv(1, 1, 2, 1, false);\n case 'I422P10':\n case 'I422P12':\n return yuv(2, 2, 2, 1, false);\n case 'I422A':\n return yuv(1, 1, 2, 1, true);\n case 'I422AP10':\n case 'I422AP12':\n return yuv(2, 2, 2, 1, true);\n case 'I444':\n return yuv(1, 1, 1, 1, false);\n case 'I444P10':\n case 'I444P12':\n return yuv(2, 2, 1, 1, false);\n case 'I444A':\n return yuv(1, 1, 1, 1, true);\n case 'I444AP10':\n case 'I444AP12':\n return yuv(2, 2, 1, 1, true);\n case 'NV12':\n return [\n { sampleBytes: 1, widthDivisor: 1, heightDivisor: 1 },\n { sampleBytes: 2, widthDivisor: 2, heightDivisor: 2 }, // Interleaved U and V\n ];\n case 'RGBA':\n case 'RGBX':\n case 'BGRA':\n case 'BGRX':\n return [\n { sampleBytes: 4, widthDivisor: 1, heightDivisor: 1 },\n ];\n default:\n assertNever(format);\n assert(false);\n }\n};\nconst AUDIO_SAMPLE_FORMATS = new Set(['f32', 'f32-planar', 's16', 's16-planar', 's32', 's32-planar', 'u8', 'u8-planar']);\n/**\n * Represents a raw, unencoded audio sample. Mainly used as an expressive wrapper around WebCodecs API's\n * [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData), but can also be used standalone.\n * @group Samples\n * @public\n */\nexport class AudioSample {\n /** The presentation timestamp of the sample in microseconds. */\n get microsecondTimestamp() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);\n }\n /** The duration of the sample in microseconds. */\n get microsecondDuration() {\n return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);\n }\n /**\n * Creates a new {@link AudioSample}, either from an existing\n * [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData) or from raw bytes specified in\n * {@link AudioSampleInit}.\n */\n constructor(init) {\n /** @internal */\n this._closed = false;\n if (isAudioData(init)) {\n if (init.format === null) {\n throw new TypeError('AudioData with null format is not supported.');\n }\n this._data = init;\n this.format = init.format;\n this.sampleRate = init.sampleRate;\n this.numberOfFrames = init.numberOfFrames;\n this.numberOfChannels = init.numberOfChannels;\n this.timestamp = init.timestamp / 1e6;\n this.duration = init.numberOfFrames / init.sampleRate;\n }\n else {\n if (!init || typeof init !== 'object') {\n throw new TypeError('Invalid AudioDataInit: must be an object.');\n }\n if (!AUDIO_SAMPLE_FORMATS.has(init.format)) {\n throw new TypeError('Invalid AudioDataInit: invalid format.');\n }\n if (!Number.isFinite(init.sampleRate) || init.sampleRate <= 0) {\n throw new TypeError('Invalid AudioDataInit: sampleRate must be > 0.');\n }\n if (!Number.isInteger(init.numberOfChannels) || init.numberOfChannels === 0) {\n throw new TypeError('Invalid AudioDataInit: numberOfChannels must be an integer > 0.');\n }\n if (!Number.isFinite(init?.timestamp)) {\n throw new TypeError('init.timestamp must be a number.');\n }\n const numberOfFrames = init.data.byteLength / (getBytesPerSample(init.format) * init.numberOfChannels);\n if (!Number.isInteger(numberOfFrames)) {\n throw new TypeError('Invalid AudioDataInit: data size is not a multiple of frame size.');\n }\n this.format = init.format;\n this.sampleRate = init.sampleRate;\n this.numberOfFrames = numberOfFrames;\n this.numberOfChannels = init.numberOfChannels;\n this.timestamp = init.timestamp;\n this.duration = numberOfFrames / init.sampleRate;\n let dataBuffer;\n if (init.data instanceof ArrayBuffer) {\n dataBuffer = new Uint8Array(init.data);\n }\n else if (ArrayBuffer.isView(init.data)) {\n dataBuffer = new Uint8Array(init.data.buffer, init.data.byteOffset, init.data.byteLength);\n }\n else {\n throw new TypeError('Invalid AudioDataInit: data is not a BufferSource.');\n }\n const expectedSize = this.numberOfFrames * this.numberOfChannels * getBytesPerSample(this.format);\n if (dataBuffer.byteLength < expectedSize) {\n throw new TypeError('Invalid AudioDataInit: insufficient data size.');\n }\n this._data = dataBuffer;\n }\n finalizationRegistry?.register(this, { type: 'audio', data: this._data }, this);\n }\n /** Returns the number of bytes required to hold the audio sample's data as specified by the given options. */\n allocationSize(options) {\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {\n throw new TypeError('planeIndex must be a non-negative integer.');\n }\n if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {\n throw new TypeError('Invalid format.');\n }\n if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {\n throw new TypeError('frameOffset must be a non-negative integer.');\n }\n if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {\n throw new TypeError('frameCount must be a non-negative integer.');\n }\n if (this._closed) {\n throw new Error('AudioSample is closed.');\n }\n const destFormat = options.format ?? this.format;\n const frameOffset = options.frameOffset ?? 0;\n if (frameOffset >= this.numberOfFrames) {\n throw new RangeError('frameOffset out of range');\n }\n const copyFrameCount = options.frameCount !== undefined ? options.frameCount : (this.numberOfFrames - frameOffset);\n if (copyFrameCount > (this.numberOfFrames - frameOffset)) {\n throw new RangeError('frameCount out of range');\n }\n const bytesPerSample = getBytesPerSample(destFormat);\n const isPlanar = formatIsPlanar(destFormat);\n if (isPlanar && options.planeIndex >= this.numberOfChannels) {\n throw new RangeError('planeIndex out of range');\n }\n if (!isPlanar && options.planeIndex !== 0) {\n throw new RangeError('planeIndex out of range');\n }\n const elementCount = isPlanar ? copyFrameCount : copyFrameCount * this.numberOfChannels;\n return elementCount * bytesPerSample;\n }\n /** Copies the audio sample's data to an ArrayBuffer or ArrayBufferView as specified by the given options. */\n copyTo(destination, options) {\n if (!isAllowSharedBufferSource(destination)) {\n throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {\n throw new TypeError('planeIndex must be a non-negative integer.');\n }\n if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {\n throw new TypeError('Invalid format.');\n }\n if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {\n throw new TypeError('frameOffset must be a non-negative integer.');\n }\n if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {\n throw new TypeError('frameCount must be a non-negative integer.');\n }\n if (this._closed) {\n throw new Error('AudioSample is closed.');\n }\n const { planeIndex, format, frameCount: optFrameCount, frameOffset: optFrameOffset } = options;\n const srcFormat = this.format;\n const destFormat = format ?? this.format;\n if (!destFormat)\n throw new Error('Destination format not determined');\n const numFrames = this.numberOfFrames;\n const numChannels = this.numberOfChannels;\n const frameOffset = optFrameOffset ?? 0;\n if (frameOffset >= numFrames) {\n throw new RangeError('frameOffset out of range');\n }\n const copyFrameCount = optFrameCount !== undefined ? optFrameCount : (numFrames - frameOffset);\n if (copyFrameCount > (numFrames - frameOffset)) {\n throw new RangeError('frameCount out of range');\n }\n const destBytesPerSample = getBytesPerSample(destFormat);\n const destIsPlanar = formatIsPlanar(destFormat);\n if (destIsPlanar && planeIndex >= numChannels) {\n throw new RangeError('planeIndex out of range');\n }\n if (!destIsPlanar && planeIndex !== 0) {\n throw new RangeError('planeIndex out of range');\n }\n const destElementCount = destIsPlanar ? copyFrameCount : copyFrameCount * numChannels;\n const requiredSize = destElementCount * destBytesPerSample;\n if (destination.byteLength < requiredSize) {\n throw new RangeError('Destination buffer is too small');\n }\n const destView = toDataView(destination);\n const writeFn = getWriteFunction(destFormat);\n if (isAudioData(this._data)) {\n if (isWebKit() && numChannels > 2 && destFormat !== srcFormat) {\n // WebKit bug workaround\n doAudioDataCopyToWebKitWorkaround(this._data, destView, srcFormat, destFormat, numChannels, planeIndex, frameOffset, copyFrameCount);\n }\n else {\n // Per spec, only f32-planar conversion must be supported, but in practice, all browsers support all\n // destination formats, so let's just delegate here:\n this._data.copyTo(destination, {\n planeIndex,\n frameOffset,\n frameCount: copyFrameCount,\n format: destFormat,\n });\n }\n }\n else {\n const uint8Data = this._data;\n const srcView = toDataView(uint8Data);\n const readFn = getReadFunction(srcFormat);\n const srcBytesPerSample = getBytesPerSample(srcFormat);\n const srcIsPlanar = formatIsPlanar(srcFormat);\n for (let i = 0; i < copyFrameCount; i++) {\n if (destIsPlanar) {\n const destOffset = i * destBytesPerSample;\n let srcOffset;\n if (srcIsPlanar) {\n srcOffset = (planeIndex * numFrames + (i + frameOffset)) * srcBytesPerSample;\n }\n else {\n srcOffset = (((i + frameOffset) * numChannels) + planeIndex) * srcBytesPerSample;\n }\n const normalized = readFn(srcView, srcOffset);\n writeFn(destView, destOffset, normalized);\n }\n else {\n for (let ch = 0; ch < numChannels; ch++) {\n const destIndex = i * numChannels + ch;\n const destOffset = destIndex * destBytesPerSample;\n let srcOffset;\n if (srcIsPlanar) {\n srcOffset = (ch * numFrames + (i + frameOffset)) * srcBytesPerSample;\n }\n else {\n srcOffset = (((i + frameOffset) * numChannels) + ch) * srcBytesPerSample;\n }\n const normalized = readFn(srcView, srcOffset);\n writeFn(destView, destOffset, normalized);\n }\n }\n }\n }\n }\n /** Clones this audio sample. */\n clone() {\n if (this._closed) {\n throw new Error('AudioSample is closed.');\n }\n if (isAudioData(this._data)) {\n const sample = new AudioSample(this._data.clone());\n sample.setTimestamp(this.timestamp); // Make sure the timestamp is precise (beyond microsecond accuracy)\n return sample;\n }\n else {\n return new AudioSample({\n format: this.format,\n sampleRate: this.sampleRate,\n numberOfFrames: this.numberOfFrames,\n numberOfChannels: this.numberOfChannels,\n timestamp: this.timestamp,\n data: this._data,\n });\n }\n }\n /**\n * Closes this audio sample, releasing held resources. Audio samples should be closed as soon as they are not\n * needed anymore.\n */\n close() {\n if (this._closed) {\n return;\n }\n finalizationRegistry?.unregister(this);\n if (isAudioData(this._data)) {\n this._data.close();\n }\n else {\n this._data = new Uint8Array(0);\n }\n this._closed = true;\n }\n /**\n * Converts this audio sample to an AudioData for use with the WebCodecs API. The AudioData returned by this\n * method *must* be closed separately from this audio sample.\n */\n toAudioData() {\n if (this._closed) {\n throw new Error('AudioSample is closed.');\n }\n if (isAudioData(this._data)) {\n if (this._data.timestamp === this.microsecondTimestamp) {\n // Timestamp matches, let's just return the data (but cloned)\n return this._data.clone();\n }\n else {\n // It's impossible to simply change an AudioData's timestamp, so we'll need to create a new one\n if (formatIsPlanar(this.format)) {\n const size = this.allocationSize({ planeIndex: 0, format: this.format });\n const data = new ArrayBuffer(size * this.numberOfChannels);\n // We gotta read out each plane individually\n for (let i = 0; i < this.numberOfChannels; i++) {\n this.copyTo(new Uint8Array(data, i * size, size), { planeIndex: i, format: this.format });\n }\n return new AudioData({\n format: this.format,\n sampleRate: this.sampleRate,\n numberOfFrames: this.numberOfFrames,\n numberOfChannels: this.numberOfChannels,\n timestamp: this.microsecondTimestamp,\n data,\n });\n }\n else {\n const data = new ArrayBuffer(this.allocationSize({ planeIndex: 0, format: this.format }));\n this.copyTo(data, { planeIndex: 0, format: this.format });\n return new AudioData({\n format: this.format,\n sampleRate: this.sampleRate,\n numberOfFrames: this.numberOfFrames,\n numberOfChannels: this.numberOfChannels,\n timestamp: this.microsecondTimestamp,\n data,\n });\n }\n }\n }\n else {\n return new AudioData({\n format: this.format,\n sampleRate: this.sampleRate,\n numberOfFrames: this.numberOfFrames,\n numberOfChannels: this.numberOfChannels,\n timestamp: this.microsecondTimestamp,\n data: this._data.buffer instanceof ArrayBuffer\n ? this._data.buffer\n : this._data.slice(), // In the case of SharedArrayBuffer, convert to ArrayBuffer\n });\n }\n }\n /** Convert this audio sample to an AudioBuffer for use with the Web Audio API. */\n toAudioBuffer() {\n if (this._closed) {\n throw new Error('AudioSample is closed.');\n }\n const audioBuffer = new AudioBuffer({\n numberOfChannels: this.numberOfChannels,\n length: this.numberOfFrames,\n sampleRate: this.sampleRate,\n });\n const dataBytes = new Float32Array(this.allocationSize({ planeIndex: 0, format: 'f32-planar' }) / 4);\n for (let i = 0; i < this.numberOfChannels; i++) {\n this.copyTo(dataBytes, { planeIndex: i, format: 'f32-planar' });\n audioBuffer.copyToChannel(dataBytes, i);\n }\n return audioBuffer;\n }\n /** Sets the presentation timestamp of this audio sample, in seconds. */\n setTimestamp(newTimestamp) {\n if (!Number.isFinite(newTimestamp)) {\n throw new TypeError('newTimestamp must be a number.');\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion\n this.timestamp = newTimestamp;\n }\n /** Calls `.close()`. */\n [Symbol.dispose]() {\n this.close();\n }\n /** @internal */\n static *_fromAudioBuffer(audioBuffer, timestamp) {\n if (!(audioBuffer instanceof AudioBuffer)) {\n throw new TypeError('audioBuffer must be an AudioBuffer.');\n }\n const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample\n const numberOfChannels = audioBuffer.numberOfChannels;\n const sampleRate = audioBuffer.sampleRate;\n const totalFrames = audioBuffer.length;\n const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);\n let currentRelativeFrame = 0;\n let remainingFrames = totalFrames;\n // Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays\n while (remainingFrames > 0) {\n const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);\n const chunkData = new Float32Array(numberOfChannels * framesToCopy);\n for (let channel = 0; channel < numberOfChannels; channel++) {\n audioBuffer.copyFromChannel(chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy), channel, currentRelativeFrame);\n }\n yield new AudioSample({\n format: 'f32-planar',\n sampleRate,\n numberOfFrames: framesToCopy,\n numberOfChannels,\n timestamp: timestamp + currentRelativeFrame / sampleRate,\n data: chunkData,\n });\n currentRelativeFrame += framesToCopy;\n remainingFrames -= framesToCopy;\n }\n }\n /**\n * Creates AudioSamples from an AudioBuffer, starting at the given timestamp in seconds. Typically creates exactly\n * one sample, but may create multiple if the AudioBuffer is exceedingly large.\n */\n static fromAudioBuffer(audioBuffer, timestamp) {\n if (!(audioBuffer instanceof AudioBuffer)) {\n throw new TypeError('audioBuffer must be an AudioBuffer.');\n }\n const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample\n const numberOfChannels = audioBuffer.numberOfChannels;\n const sampleRate = audioBuffer.sampleRate;\n const totalFrames = audioBuffer.length;\n const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);\n let currentRelativeFrame = 0;\n let remainingFrames = totalFrames;\n const result = [];\n // Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays\n while (remainingFrames > 0) {\n const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);\n const chunkData = new Float32Array(numberOfChannels * framesToCopy);\n for (let channel = 0; channel < numberOfChannels; channel++) {\n audioBuffer.copyFromChannel(chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy), channel, currentRelativeFrame);\n }\n const audioSample = new AudioSample({\n format: 'f32-planar',\n sampleRate,\n numberOfFrames: framesToCopy,\n numberOfChannels,\n timestamp: timestamp + currentRelativeFrame / sampleRate,\n data: chunkData,\n });\n result.push(audioSample);\n currentRelativeFrame += framesToCopy;\n remainingFrames -= framesToCopy;\n }\n return result;\n }\n}\nconst getBytesPerSample = (format) => {\n switch (format) {\n case 'u8':\n case 'u8-planar':\n return 1;\n case 's16':\n case 's16-planar':\n return 2;\n case 's32':\n case 's32-planar':\n return 4;\n case 'f32':\n case 'f32-planar':\n return 4;\n default:\n throw new Error('Unknown AudioSampleFormat');\n }\n};\nconst formatIsPlanar = (format) => {\n switch (format) {\n case 'u8-planar':\n case 's16-planar':\n case 's32-planar':\n case 'f32-planar':\n return true;\n default:\n return false;\n }\n};\nconst getReadFunction = (format) => {\n switch (format) {\n case 'u8':\n case 'u8-planar':\n return (view, offset) => (view.getUint8(offset) - 128) / 128;\n case 's16':\n case 's16-planar':\n return (view, offset) => view.getInt16(offset, true) / 32768;\n case 's32':\n case 's32-planar':\n return (view, offset) => view.getInt32(offset, true) / 2147483648;\n case 'f32':\n case 'f32-planar':\n return (view, offset) => view.getFloat32(offset, true);\n }\n};\nconst getWriteFunction = (format) => {\n switch (format) {\n case 'u8':\n case 'u8-planar':\n return (view, offset, value) => view.setUint8(offset, clamp((value + 1) * 127.5, 0, 255));\n case 's16':\n case 's16-planar':\n return (view, offset, value) => view.setInt16(offset, clamp(Math.round(value * 32767), -32768, 32767), true);\n case 's32':\n case 's32-planar':\n return (view, offset, value) => view.setInt32(offset, clamp(Math.round(value * 2147483647), -2147483648, 2147483647), true);\n case 'f32':\n case 'f32-planar':\n return (view, offset, value) => view.setFloat32(offset, value, true);\n }\n};\nconst isAudioData = (x) => {\n return typeof AudioData !== 'undefined' && x instanceof AudioData;\n};\n/**\n * WebKit has a bug where calling AudioData.copyTo with a format different from the source format\n * crashes the tab when there are more than 2 channels. This function works around that by always\n * copying with the source format and then manually converting to the destination format.\n *\n * See https://bugs.webkit.org/show_bug.cgi?id=302521.\n */\nconst doAudioDataCopyToWebKitWorkaround = (audioData, destView, srcFormat, destFormat, numChannels, planeIndex, frameOffset, copyFrameCount) => {\n const readFn = getReadFunction(srcFormat);\n const writeFn = getWriteFunction(destFormat);\n const srcBytesPerSample = getBytesPerSample(srcFormat);\n const destBytesPerSample = getBytesPerSample(destFormat);\n const srcIsPlanar = formatIsPlanar(srcFormat);\n const destIsPlanar = formatIsPlanar(destFormat);\n if (destIsPlanar) {\n if (srcIsPlanar) {\n // src planar -> dest planar: copy single plane and convert\n const data = new ArrayBuffer(copyFrameCount * srcBytesPerSample);\n const dataView = toDataView(data);\n audioData.copyTo(data, {\n planeIndex,\n frameOffset,\n frameCount: copyFrameCount,\n format: srcFormat,\n });\n for (let i = 0; i < copyFrameCount; i++) {\n const srcOffset = i * srcBytesPerSample;\n const destOffset = i * destBytesPerSample;\n const sample = readFn(dataView, srcOffset);\n writeFn(destView, destOffset, sample);\n }\n }\n else {\n // src interleaved -> dest planar: copy all interleaved data, extract one channel\n const data = new ArrayBuffer(copyFrameCount * numChannels * srcBytesPerSample);\n const dataView = toDataView(data);\n audioData.copyTo(data, {\n planeIndex: 0,\n frameOffset,\n frameCount: copyFrameCount,\n format: srcFormat,\n });\n for (let i = 0; i < copyFrameCount; i++) {\n const srcOffset = (i * numChannels + planeIndex) * srcBytesPerSample;\n const destOffset = i * destBytesPerSample;\n const sample = readFn(dataView, srcOffset);\n writeFn(destView, destOffset, sample);\n }\n }\n }\n else {\n if (srcIsPlanar) {\n // src planar -> dest interleaved: copy each plane and interleave\n const planeSize = copyFrameCount * srcBytesPerSample;\n const data = new ArrayBuffer(planeSize);\n const dataView = toDataView(data);\n for (let ch = 0; ch < numChannels; ch++) {\n audioData.copyTo(data, {\n planeIndex: ch,\n frameOffset,\n frameCount: copyFrameCount,\n format: srcFormat,\n });\n for (let i = 0; i < copyFrameCount; i++) {\n const srcOffset = i * srcBytesPerSample;\n const destOffset = (i * numChannels + ch) * destBytesPerSample;\n const sample = readFn(dataView, srcOffset);\n writeFn(destView, destOffset, sample);\n }\n }\n }\n else {\n // src interleaved -> dest interleaved: copy all and convert\n const data = new ArrayBuffer(copyFrameCount * numChannels * srcBytesPerSample);\n const dataView = toDataView(data);\n audioData.copyTo(data, {\n planeIndex: 0,\n frameOffset,\n frameCount: copyFrameCount,\n format: srcFormat,\n });\n for (let i = 0; i < copyFrameCount; i++) {\n for (let ch = 0; ch < numChannels; ch++) {\n const idx = i * numChannels + ch;\n const srcOffset = idx * srcBytesPerSample;\n const destOffset = idx * destBytesPerSample;\n const sample = readFn(dataView, srcOffset);\n writeFn(destView, destOffset, sample);\n }\n }\n }\n }\n};\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { assert, binarySearchLessOrEqual, closedIntervalsOverlap, isNumber, isWebKit, mergeRequestInit, promiseWithResolvers, retriedFetch, toDataView, toUint8Array, } from './misc.js';\nimport * as nodeAlias from './node.js';\nimport { InputDisposedError } from './input.js';\nconst node = typeof nodeAlias !== 'undefined'\n ? nodeAlias // Aliasing it prevents some bundler warnings\n : undefined;\n/**\n * The source base class, representing a resource from which bytes can be read.\n * @group Input sources\n * @public\n */\nexport class Source {\n constructor() {\n /** @internal */\n this._disposed = false;\n /** @internal */\n this._sizePromise = null;\n /** Called each time data is retrieved from the source. Will be called with the retrieved range (end exclusive). */\n this.onread = null;\n }\n /**\n * Resolves with the total size of the file in bytes. This function is memoized, meaning only the first call\n * will retrieve the size.\n *\n * Returns null if the source is unsized.\n */\n async getSizeOrNull() {\n if (this._disposed) {\n throw new InputDisposedError();\n }\n return this._sizePromise ??= Promise.resolve(this._retrieveSize());\n }\n /**\n * Resolves with the total size of the file in bytes. This function is memoized, meaning only the first call\n * will retrieve the size.\n *\n * Throws an error if the source is unsized.\n */\n async getSize() {\n if (this._disposed) {\n throw new InputDisposedError();\n }\n const result = await this.getSizeOrNull();\n if (result === null) {\n throw new Error('Cannot determine the size of an unsized source.');\n }\n return result;\n }\n}\n/**\n * A source backed by an ArrayBuffer or ArrayBufferView, with the entire file held in memory.\n * @group Input sources\n * @public\n */\nexport class BufferSource extends Source {\n /**\n * Creates a new {@link BufferSource} backed by the specified `ArrayBuffer`, `SharedArrayBuffer`,\n * or `ArrayBufferView`.\n */\n constructor(buffer) {\n if (!(buffer instanceof ArrayBuffer)\n && !(typeof SharedArrayBuffer !== 'undefined' && buffer instanceof SharedArrayBuffer)\n && !ArrayBuffer.isView(buffer)) {\n throw new TypeError('buffer must be an ArrayBuffer, SharedArrayBuffer, or ArrayBufferView.');\n }\n super();\n /** @internal */\n this._onreadCalled = false;\n this._bytes = toUint8Array(buffer);\n this._view = toDataView(buffer);\n }\n /** @internal */\n _retrieveSize() {\n return this._bytes.byteLength;\n }\n /** @internal */\n _read() {\n if (!this._onreadCalled) {\n // We just say the first read retrives all bytes from the source (which, I mean, it does)\n this.onread?.(0, this._bytes.byteLength);\n this._onreadCalled = true;\n }\n return {\n bytes: this._bytes,\n view: this._view,\n offset: 0,\n };\n }\n /** @internal */\n _dispose() { }\n}\n/**\n * A source backed by a [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). Since a\n * [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) is also a `Blob`, this is the source to use when\n * reading files off the disk.\n * @group Input sources\n * @public\n */\nexport class BlobSource extends Source {\n /**\n * Creates a new {@link BlobSource} backed by the specified\n * [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob).\n */\n constructor(blob, options = {}) {\n if (!(blob instanceof Blob)) {\n throw new TypeError('blob must be a Blob.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.maxCacheSize !== undefined\n && (!isNumber(options.maxCacheSize) || options.maxCacheSize < 0)) {\n throw new TypeError('options.maxCacheSize, when provided, must be a non-negative number.');\n }\n super();\n /** @internal */\n this._readers = new WeakMap();\n this._blob = blob;\n this._orchestrator = new ReadOrchestrator({\n maxCacheSize: options.maxCacheSize ?? (8 * 2 ** 20 /* 8 MiB */),\n maxWorkerCount: 4,\n runWorker: this._runWorker.bind(this),\n prefetchProfile: PREFETCH_PROFILES.fileSystem,\n });\n }\n /** @internal */\n _retrieveSize() {\n const size = this._blob.size;\n this._orchestrator.fileSize = size;\n return size;\n }\n /** @internal */\n _read(start, end) {\n return this._orchestrator.read(start, end);\n }\n /** @internal */\n async _runWorker(worker) {\n let reader = this._readers.get(worker);\n if (reader === undefined) {\n // https://github.com/Vanilagy/mediabunny/issues/184\n // WebKit has critical bugs with blob.stream():\n // - WebKitBlobResource error 1 when streaming large files\n // - Memory buildup and reload loops on iOS (network process crashes)\n // - ReadableStream stalls under backpressure (especially video)\n // Affects Safari and all iOS browsers (Chrome, Firefox, etc.).\n // Use arrayBuffer() fallback for WebKit browsers.\n if ('stream' in this._blob && !isWebKit()) {\n // Get a reader of the blob starting at the required offset, and then keep it around\n const slice = this._blob.slice(worker.currentPos);\n reader = slice.stream().getReader();\n }\n else {\n // We'll need to use more primitive ways\n reader = null;\n }\n this._readers.set(worker, reader);\n }\n while (worker.currentPos < worker.targetPos && !worker.aborted) {\n if (reader) {\n const { done, value } = await reader.read();\n if (done) {\n this._orchestrator.forgetWorker(worker);\n throw new Error('Blob reader stopped unexpectedly before all requested data was read.');\n }\n if (worker.aborted) {\n break;\n }\n this.onread?.(worker.currentPos, worker.currentPos + value.length);\n this._orchestrator.supplyWorkerData(worker, value);\n }\n else {\n const data = await this._blob.slice(worker.currentPos, worker.targetPos).arrayBuffer();\n if (worker.aborted) {\n break;\n }\n this.onread?.(worker.currentPos, worker.currentPos + data.byteLength);\n this._orchestrator.supplyWorkerData(worker, new Uint8Array(data));\n }\n }\n worker.running = false;\n if (worker.aborted) {\n // MDN: \"Calling this method signals a loss of interest in the stream by a consumer.\"\n await reader?.cancel();\n }\n }\n /** @internal */\n _dispose() {\n this._orchestrator.dispose();\n }\n}\nconst URL_SOURCE_MIN_LOAD_AMOUNT = 0.5 * 2 ** 20; // 0.5 MiB\nconst DEFAULT_RETRY_DELAY = ((previousAttempts, error, src) => {\n // Check if this could be a CORS error. If so, we cannot recover from it and\n // should not attempt to retry.\n // CORS errors are intentionally not opaque, so we need to rely on heuristics.\n const couldBeCorsError = error instanceof Error && (error.message.includes('Failed to fetch') // Chrome\n || error.message.includes('Load failed') // Safari\n || error.message.includes('NetworkError when attempting to fetch resource') // Firefox\n );\n if (couldBeCorsError) {\n let originOfSrc = null;\n // Checking if the origin is different, because only then a CORS error could originate\n try {\n if (typeof window !== 'undefined' && typeof window.location !== 'undefined') {\n originOfSrc = new URL(src instanceof Request ? src.url : src, window.location.href).origin;\n }\n }\n catch {\n // URL parse failed\n }\n // If user is offline, it is probably not a CORS error.\n const isOnline = typeof navigator !== 'undefined' && typeof navigator.onLine === 'boolean' ? navigator.onLine : true;\n if (isOnline && originOfSrc !== null && originOfSrc !== window.location.origin) {\n console.warn(`Request will not be retried because a CORS error was suspected due to different origins. You can`\n + ` modify this behavior by providing your own function for the 'getRetryDelay' option.`);\n return null;\n }\n }\n return Math.min(2 ** (previousAttempts - 2), 16);\n});\n/**\n * A source backed by a URL. This is useful for reading data from the network. Requests will be made using an optimized\n * reading and prefetching pattern to minimize request count and latency.\n * @group Input sources\n * @public\n */\nexport class UrlSource extends Source {\n /**\n * Creates a new {@link UrlSource} backed by the resource at the specified URL.\n *\n * When passing a `Request` instance, note that the `signal` and `headers.Range` options will be overridden by\n * Mediabunny. If you want to cancel ongoing requests, use {@link Input.dispose}.\n */\n constructor(url, options = {}) {\n if (typeof url !== 'string'\n && !(url instanceof URL)\n && !(typeof Request !== 'undefined' && url instanceof Request)) {\n throw new TypeError('url must be a string, URL or Request.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.requestInit !== undefined && (!options.requestInit || typeof options.requestInit !== 'object')) {\n throw new TypeError('options.requestInit, when provided, must be an object.');\n }\n if (options.getRetryDelay !== undefined && typeof options.getRetryDelay !== 'function') {\n throw new TypeError('options.getRetryDelay, when provided, must be a function.');\n }\n if (options.maxCacheSize !== undefined\n && (!isNumber(options.maxCacheSize) || options.maxCacheSize < 0)) {\n throw new TypeError('options.maxCacheSize, when provided, must be a non-negative number.');\n }\n if (options.parallelism !== undefined && (!Number.isInteger(options.parallelism) || options.parallelism < 1)) {\n throw new TypeError('options.parallelism, when provided, must be a positive number.');\n }\n if (options.fetchFn !== undefined && typeof options.fetchFn !== 'function') {\n throw new TypeError('options.fetchFn, when provided, must be a function.');\n // Won't bother validating this function beyond this\n }\n super();\n /** @internal */\n this._existingResponses = new WeakMap();\n this._url = url;\n this._options = options;\n this._getRetryDelay = options.getRetryDelay ?? DEFAULT_RETRY_DELAY;\n // Most files in the real-world have a single sequential access pattern, but having two in parallel can\n // also happen\n const DEFAULT_PARALLELISM = 2;\n this._orchestrator = new ReadOrchestrator({\n maxCacheSize: options.maxCacheSize ?? (64 * 2 ** 20 /* 64 MiB */),\n maxWorkerCount: options.parallelism ?? DEFAULT_PARALLELISM,\n runWorker: this._runWorker.bind(this),\n prefetchProfile: PREFETCH_PROFILES.network,\n });\n }\n /** @internal */\n async _retrieveSize() {\n // Retrieving the resource size for UrlSource is optimized: Almost always (= always), the first bytes we have to\n // read are the start of the file. This means it's smart to combine size fetching with fetching the start of the\n // file. We additionally use this step to probe if the server supports range requests, killing three birds with\n // one stone.\n const abortController = new AbortController();\n const response = await retriedFetch(this._options.fetchFn ?? fetch, this._url, mergeRequestInit(this._options.requestInit ?? {}, {\n headers: {\n // We could also send a non-range request to request the same bytes (all of them), but doing it like\n // this is an easy way to check if the server supports range requests in the first place\n Range: 'bytes=0-',\n },\n signal: abortController.signal,\n }), this._getRetryDelay, () => this._disposed);\n if (!response.ok) {\n // eslint-disable-next-line @typescript-eslint/no-base-to-string\n throw new Error(`Error fetching ${String(this._url)}: ${response.status} ${response.statusText}`);\n }\n let worker;\n let fileSize;\n if (response.status === 206) {\n fileSize = this._getTotalLengthFromRangeResponse(response);\n worker = this._orchestrator.createWorker(0, Math.min(fileSize, URL_SOURCE_MIN_LOAD_AMOUNT));\n }\n else {\n // Server probably returned a 200.\n const contentLength = response.headers.get('Content-Length');\n if (contentLength) {\n fileSize = Number(contentLength);\n worker = this._orchestrator.createWorker(0, fileSize);\n this._orchestrator.options.maxCacheSize = Infinity; // 🤷\n console.warn('HTTP server did not respond with 206 Partial Content, meaning the entire remote resource now has'\n + ' to be downloaded. For efficient media file streaming across a network, please make sure your'\n + ' server supports range requests.');\n }\n else {\n throw new Error(`HTTP response (status ${response.status}) must surface Content-Length header.`);\n }\n }\n this._orchestrator.fileSize = fileSize;\n this._existingResponses.set(worker, { response, abortController });\n this._orchestrator.runWorker(worker);\n return fileSize;\n }\n /** @internal */\n _read(start, end) {\n return this._orchestrator.read(start, end);\n }\n /** @internal */\n async _runWorker(worker) {\n // The outer loop is for resuming a request if it dies mid-response\n while (true) {\n const existing = this._existingResponses.get(worker);\n this._existingResponses.delete(worker);\n let abortController = existing?.abortController;\n let response = existing?.response;\n if (!abortController) {\n abortController = new AbortController();\n response = await retriedFetch(this._options.fetchFn ?? fetch, this._url, mergeRequestInit(this._options.requestInit ?? {}, {\n headers: {\n Range: `bytes=${worker.currentPos}-`,\n },\n signal: abortController.signal,\n }), this._getRetryDelay, () => this._disposed);\n }\n assert(response);\n if (!response.ok) {\n // eslint-disable-next-line @typescript-eslint/no-base-to-string\n throw new Error(`Error fetching ${String(this._url)}: ${response.status} ${response.statusText}`);\n }\n if (worker.currentPos > 0 && response.status !== 206) {\n throw new Error('HTTP server did not respond with 206 Partial Content to a range request. To enable efficient media'\n + ' file streaming across a network, please make sure your server supports range requests.');\n }\n if (!response.body) {\n throw new Error('Missing HTTP response body stream. The used fetch function must provide the response body as a'\n + ' ReadableStream.');\n }\n const reader = response.body.getReader();\n while (true) {\n if (worker.currentPos >= worker.targetPos || worker.aborted) {\n abortController.abort();\n worker.running = false;\n return;\n }\n let readResult;\n try {\n readResult = await reader.read();\n }\n catch (error) {\n if (this._disposed) {\n // No need to try to retry\n throw error;\n }\n const retryDelayInSeconds = this._getRetryDelay(1, error, this._url);\n if (retryDelayInSeconds !== null) {\n console.error('Error while reading response stream. Attempting to resume.', error);\n await new Promise(resolve => setTimeout(resolve, 1000 * retryDelayInSeconds));\n break;\n }\n else {\n throw error;\n }\n }\n if (worker.aborted) {\n continue; // Cleanup happens in next iteration\n }\n const { done, value } = readResult;\n if (done) {\n if (worker.currentPos >= worker.targetPos) {\n // All data was delivered, we're good\n this._orchestrator.forgetWorker(worker);\n worker.running = false;\n return;\n }\n // The response stopped early, before the target. This can happen if server decides to cap range\n // requests arbitrarily, even if the request had an uncapped end. In this case, let's fetch the rest\n // of the data using a new request.\n break;\n }\n this.onread?.(worker.currentPos, worker.currentPos + value.length);\n this._orchestrator.supplyWorkerData(worker, value);\n }\n }\n // The previous UrlSource had logic for circumventing https://issues.chromium.org/issues/436025873; I haven't\n // been able to observe this bug with the new UrlSource (maybe because we're using response streaming), so the\n // logic for that has vanished for now. Leaving a comment here if this becomes relevant again.\n }\n /** @internal */\n _getTotalLengthFromRangeResponse(response) {\n const contentRange = response.headers.get('Content-Range');\n if (contentRange) {\n const match = /\\/(\\d+)/.exec(contentRange);\n if (match) {\n return Number(match[1]);\n }\n }\n const contentLength = response.headers.get('Content-Length');\n if (contentLength) {\n return Number(contentLength);\n }\n else {\n throw new Error('Partial HTTP response (status 206) must surface either Content-Range or'\n + ' Content-Length header.');\n }\n }\n /** @internal */\n _dispose() {\n this._orchestrator.dispose();\n }\n}\n/**\n * A source backed by a path to a file. Intended for server-side usage in Node, Bun, or Deno.\n *\n * Make sure to call `.dispose()` on the corresponding {@link Input} when done to explicitly free the internal file\n * handle acquired by this source.\n * @group Input sources\n * @public\n */\nexport class FilePathSource extends Source {\n /** Creates a new {@link FilePathSource} backed by the file at the specified file path. */\n constructor(filePath, options = {}) {\n if (typeof filePath !== 'string') {\n throw new TypeError('filePath must be a string.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.maxCacheSize !== undefined\n && (!isNumber(options.maxCacheSize) || options.maxCacheSize < 0)) {\n throw new TypeError('options.maxCacheSize, when provided, must be a non-negative number.');\n }\n super();\n /** @internal */\n this._fileHandle = null;\n // Let's back this source with a StreamSource, makes the implementation very simple\n this._streamSource = new StreamSource({\n getSize: async () => {\n this._fileHandle = await node.fs.open(filePath, 'r');\n const stats = await this._fileHandle.stat();\n return stats.size;\n },\n read: async (start, end) => {\n assert(this._fileHandle);\n const buffer = new Uint8Array(end - start);\n await this._fileHandle.read(buffer, 0, end - start, start);\n return buffer;\n },\n maxCacheSize: options.maxCacheSize,\n prefetchProfile: 'fileSystem',\n });\n }\n /** @internal */\n _read(start, end) {\n return this._streamSource._read(start, end);\n }\n /** @internal */\n _retrieveSize() {\n return this._streamSource._retrieveSize();\n }\n /** @internal */\n _dispose() {\n this._streamSource._dispose();\n void this._fileHandle?.close();\n this._fileHandle = null;\n }\n}\n/**\n * A general-purpose, callback-driven source that can get its data from anywhere.\n * @group Input sources\n * @public\n */\nexport class StreamSource extends Source {\n /** Creates a new {@link StreamSource} whose behavior is specified by `options`. */\n constructor(options) {\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (typeof options.getSize !== 'function') {\n throw new TypeError('options.getSize must be a function.');\n }\n if (typeof options.read !== 'function') {\n throw new TypeError('options.read must be a function.');\n }\n if (options.dispose !== undefined && typeof options.dispose !== 'function') {\n throw new TypeError('options.dispose, when provided, must be a function.');\n }\n if (options.maxCacheSize !== undefined\n && (!isNumber(options.maxCacheSize) || options.maxCacheSize < 0)) {\n throw new TypeError('options.maxCacheSize, when provided, must be a non-negative number.');\n }\n if (options.prefetchProfile && !['none', 'fileSystem', 'network'].includes(options.prefetchProfile)) {\n throw new TypeError('options.prefetchProfile, when provided, must be one of \\'none\\', \\'fileSystem\\' or \\'network\\'.');\n }\n super();\n this._options = options;\n this._orchestrator = new ReadOrchestrator({\n maxCacheSize: options.maxCacheSize ?? (8 * 2 ** 20 /* 8 MiB */),\n maxWorkerCount: 2, // Fixed for now, *should* be fine\n prefetchProfile: PREFETCH_PROFILES[options.prefetchProfile ?? 'none'],\n runWorker: this._runWorker.bind(this),\n });\n }\n /** @internal */\n _retrieveSize() {\n const result = this._options.getSize();\n if (result instanceof Promise) {\n return result.then((size) => {\n if (!Number.isInteger(size) || size < 0) {\n throw new TypeError('options.getSize must return or resolve to a non-negative integer.');\n }\n this._orchestrator.fileSize = size;\n return size;\n });\n }\n else {\n if (!Number.isInteger(result) || result < 0) {\n throw new TypeError('options.getSize must return or resolve to a non-negative integer.');\n }\n this._orchestrator.fileSize = result;\n return result;\n }\n }\n /** @internal */\n _read(start, end) {\n return this._orchestrator.read(start, end);\n }\n /** @internal */\n async _runWorker(worker) {\n while (worker.currentPos < worker.targetPos && !worker.aborted) {\n const originalCurrentPos = worker.currentPos;\n const originalTargetPos = worker.targetPos;\n let data = this._options.read(worker.currentPos, originalTargetPos);\n if (data instanceof Promise)\n data = await data;\n if (worker.aborted) {\n break;\n }\n if (data instanceof Uint8Array) {\n data = toUint8Array(data); // Normalize things like Node.js Buffer to Uint8Array\n if (data.length !== originalTargetPos - worker.currentPos) {\n // Yes, we're that strict\n throw new Error(`options.read returned a Uint8Array with unexpected length: Requested ${originalTargetPos - worker.currentPos} bytes, but got ${data.length}.`);\n }\n this.onread?.(worker.currentPos, worker.currentPos + data.length);\n this._orchestrator.supplyWorkerData(worker, data);\n }\n else if (data instanceof ReadableStream) {\n const reader = data.getReader();\n while (worker.currentPos < originalTargetPos && !worker.aborted) {\n const { done, value } = await reader.read();\n if (done) {\n if (worker.currentPos < originalTargetPos) {\n // Yes, we're *that* strict\n throw new Error(`ReadableStream returned by options.read ended before supplying enough data.`\n + ` Requested ${originalTargetPos - originalCurrentPos} bytes, but got ${worker.currentPos - originalCurrentPos}`);\n }\n break;\n }\n if (!(value instanceof Uint8Array)) {\n throw new TypeError('ReadableStream returned by options.read must yield Uint8Array chunks.');\n }\n if (worker.aborted) {\n break;\n }\n const data = toUint8Array(value); // Normalize things like Node.js Buffer to Uint8Array\n this.onread?.(worker.currentPos, worker.currentPos + data.length);\n this._orchestrator.supplyWorkerData(worker, data);\n }\n }\n else {\n throw new TypeError('options.read must return or resolve to a Uint8Array or a ReadableStream.');\n }\n }\n worker.running = false;\n }\n /** @internal */\n _dispose() {\n this._orchestrator.dispose();\n this._options.dispose?.();\n }\n}\n/**\n * A source backed by a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) of\n * `Uint8Array`, representing an append-only byte stream of unknown length. This is the source to use for incrementally\n * streaming in input files that are still being constructed and whose size we don't yet know, like for example the\n * output chunks of [MediaRecorder](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder).\n *\n * This source is *unsized*, meaning calls to `.getSize()` will throw and readers are more limited due to the\n * lack of random file access. You should only use this source with sequential access patterns, such as reading all\n * packets from start to end. This source does not work well with random access patterns unless you increase its\n * max cache size.\n *\n * @group Input sources\n * @public\n */\nexport class ReadableStreamSource extends Source {\n /** Creates a new {@link ReadableStreamSource} backed by the specified `ReadableStream<Uint8Array>`. */\n constructor(stream, options = {}) {\n if (!(stream instanceof ReadableStream)) {\n throw new TypeError('stream must be a ReadableStream.');\n }\n if (!options || typeof options !== 'object') {\n throw new TypeError('options must be an object.');\n }\n if (options.maxCacheSize !== undefined\n && (!isNumber(options.maxCacheSize) || options.maxCacheSize < 0)) {\n throw new TypeError('options.maxCacheSize, when provided, must be a non-negative number.');\n }\n super();\n /** @internal */\n this._reader = null;\n /** @internal */\n this._cache = [];\n /** @internal */\n this._pendingSlices = [];\n /** @internal */\n this._currentIndex = 0;\n /** @internal */\n this._targetIndex = 0;\n /** @internal */\n this._maxRequestedIndex = 0;\n /** @internal */\n this._endIndex = null;\n /** @internal */\n this._pulling = false;\n this._stream = stream;\n this._maxCacheSize = options.maxCacheSize ?? (16 * 2 ** 20 /* 16 MiB */);\n }\n /** @internal */\n _retrieveSize() {\n return this._endIndex; // Starts out as null, meaning this source is unsized\n }\n /** @internal */\n _read(start, end) {\n if (this._endIndex !== null && end > this._endIndex) {\n return null;\n }\n this._maxRequestedIndex = Math.max(this._maxRequestedIndex, end);\n const cacheStartIndex = binarySearchLessOrEqual(this._cache, start, x => x.start);\n const cacheStartEntry = cacheStartIndex !== -1 ? this._cache[cacheStartIndex] : null;\n if (cacheStartEntry && cacheStartEntry.start <= start && end <= cacheStartEntry.end) {\n // The request can be satisfied with a single cache entry\n return {\n bytes: cacheStartEntry.bytes,\n view: cacheStartEntry.view,\n offset: cacheStartEntry.start,\n };\n }\n let lastEnd = start;\n const bytes = new Uint8Array(end - start);\n if (cacheStartIndex !== -1) {\n // Walk over the cache to see if we can satisfy the request using multiple cache entries\n for (let i = cacheStartIndex; i < this._cache.length; i++) {\n const cacheEntry = this._cache[i];\n if (cacheEntry.start >= end) {\n break;\n }\n const cappedStart = Math.max(start, cacheEntry.start);\n if (cappedStart > lastEnd) {\n // We're too far behind\n this._throwDueToCacheMiss();\n }\n const cappedEnd = Math.min(end, cacheEntry.end);\n if (cappedStart < cappedEnd) {\n bytes.set(cacheEntry.bytes.subarray(cappedStart - cacheEntry.start, cappedEnd - cacheEntry.start), cappedStart - start);\n lastEnd = cappedEnd;\n }\n }\n }\n if (lastEnd === end) {\n return {\n bytes,\n view: toDataView(bytes),\n offset: start,\n };\n }\n // We need to pull more data\n if (this._currentIndex > lastEnd) {\n // We're too far behind\n this._throwDueToCacheMiss();\n }\n const { promise, resolve, reject } = promiseWithResolvers();\n this._pendingSlices.push({\n start,\n end,\n bytes,\n resolve,\n reject,\n });\n this._targetIndex = Math.max(this._targetIndex, end);\n // Start pulling from the stream if we're not already doing it\n if (!this._pulling) {\n this._pulling = true;\n void this._pull()\n .catch((error) => {\n this._pulling = false;\n if (this._pendingSlices.length > 0) {\n this._pendingSlices.forEach(x => x.reject(error)); // Make sure to propagate any errors\n this._pendingSlices.length = 0;\n }\n else {\n throw error; // So it doesn't get swallowed\n }\n });\n }\n return promise;\n }\n /** @internal */\n _throwDueToCacheMiss() {\n throw new Error('Read is before the cached region. With ReadableStreamSource, you must access the data more'\n + ' sequentially or increase the size of its cache.');\n }\n /** @internal */\n async _pull() {\n this._reader ??= this._stream.getReader();\n // This is the loop that keeps pulling data from the stream until a target index is reached, filling requests\n // in the process\n while (this._currentIndex < this._targetIndex && !this._disposed) {\n const { done, value } = await this._reader.read();\n if (done) {\n for (const pendingSlice of this._pendingSlices) {\n pendingSlice.resolve(null);\n }\n this._pendingSlices.length = 0;\n this._endIndex = this._currentIndex; // We know how long the file is now!\n break;\n }\n const startIndex = this._currentIndex;\n const endIndex = this._currentIndex + value.byteLength;\n // Fill the pending slices with the data\n for (let i = 0; i < this._pendingSlices.length; i++) {\n const pendingSlice = this._pendingSlices[i];\n const cappedStart = Math.max(startIndex, pendingSlice.start);\n const cappedEnd = Math.min(endIndex, pendingSlice.end);\n if (cappedStart < cappedEnd) {\n pendingSlice.bytes.set(value.subarray(cappedStart - startIndex, cappedEnd - startIndex), cappedStart - pendingSlice.start);\n if (cappedEnd === pendingSlice.end) {\n // Pending slice fully filled\n pendingSlice.resolve({\n bytes: pendingSlice.bytes,\n view: toDataView(pendingSlice.bytes),\n offset: pendingSlice.start,\n });\n this._pendingSlices.splice(i, 1);\n i--;\n }\n }\n }\n this._cache.push({\n start: startIndex,\n end: endIndex,\n bytes: value,\n view: toDataView(value),\n age: 0, // Unused\n });\n // Do cache eviction, based on the distance from the last-requested index. It's important that we do it like\n // this and not based on where the reader is at, because if the reader is fast, we'll unnecessarily evict\n // data that we still might need.\n while (this._cache.length > 0) {\n const firstEntry = this._cache[0];\n const distance = this._maxRequestedIndex - firstEntry.end;\n if (distance <= this._maxCacheSize) {\n break;\n }\n this._cache.shift();\n }\n this._currentIndex += value.byteLength;\n }\n this._pulling = false;\n }\n /** @internal */\n _dispose() {\n this._pendingSlices.length = 0;\n this._cache.length = 0;\n }\n}\nconst PREFETCH_PROFILES = {\n none: (start, end) => ({ start, end }),\n fileSystem: (start, end) => {\n const padding = 2 ** 16;\n start = Math.floor((start - padding) / padding) * padding;\n end = Math.ceil((end + padding) / padding) * padding;\n return { start, end };\n },\n network: (start, end, workers) => {\n // Add a slight bit of start padding because backwards reading is painful\n const paddingStart = 2 ** 16;\n start = Math.max(0, Math.floor((start - paddingStart) / paddingStart) * paddingStart);\n // Remote resources have extreme latency (relatively speaking), so the benefit from intelligent\n // prefetching is great. The network prefetch strategy is as follows: When we notice\n // successive reads to a worker's read region, we prefetch more data at the end of that region,\n // growing exponentially (up to a cap). This performs well for real-world use cases: Either we read a\n // small part of the file once and then never need it again, in which case the requested about of data\n // is small. Or, we're repeatedly doing a sequential access pattern (common in media files), in which\n // case we can become more and more confident to prefetch more and more data.\n for (const worker of workers) {\n const maxExtensionAmount = 8 * 2 ** 20; // 8 MiB\n // When the read region cross the threshold point, we trigger a prefetch. This point is typically\n // in the middle of the worker's read region, or a fixed offset from the end if the region has grown\n // really large.\n const thresholdPoint = Math.max((worker.startPos + worker.targetPos) / 2, worker.targetPos - maxExtensionAmount);\n if (closedIntervalsOverlap(start, end, thresholdPoint, worker.targetPos)) {\n const size = worker.targetPos - worker.startPos;\n // If we extend by maxExtensionAmount\n const a = Math.ceil((size + 1) / maxExtensionAmount) * maxExtensionAmount;\n // If we extend to the next power of 2\n const b = 2 ** Math.ceil(Math.log2(size + 1));\n const extent = Math.min(b, a);\n end = Math.max(end, worker.startPos + extent);\n }\n }\n end = Math.max(end, start + URL_SOURCE_MIN_LOAD_AMOUNT);\n return {\n start,\n end,\n };\n },\n};\n/**\n * Godclass for orchestrating complex, cached read operations. The reading model is as follows: Any reading task is\n * delegated to a *worker*, which is a sequential reader positioned somewhere along the file. All workers run in\n * parallel and can be stopped and resumed in their forward movement. When read requests come in, this orchestrator will\n * first try to satisfy the request with only the cached data. If this isn't possible, workers are spun up for all\n * missing parts (or existing workers are repurposed), and these workers will then fill the holes in the data as they\n * march along the file.\n */\nclass ReadOrchestrator {\n constructor(options) {\n this.options = options;\n this.fileSize = null;\n this.nextAge = 0; // Used for LRU eviction of both cache entries and workers\n this.workers = [];\n this.cache = [];\n this.currentCacheSize = 0;\n this.disposed = false;\n }\n read(innerStart, innerEnd) {\n assert(this.fileSize !== null);\n const prefetchRange = this.options.prefetchProfile(innerStart, innerEnd, this.workers);\n const outerStart = Math.max(prefetchRange.start, 0);\n const outerEnd = Math.min(prefetchRange.end, this.fileSize);\n assert(outerStart <= innerStart && innerEnd <= outerEnd);\n let result = null;\n const innerCacheStartIndex = binarySearchLessOrEqual(this.cache, innerStart, x => x.start);\n const innerStartEntry = innerCacheStartIndex !== -1 ? this.cache[innerCacheStartIndex] : null;\n // See if the read request can be satisfied by a single cache entry\n if (innerStartEntry && innerStartEntry.start <= innerStart && innerEnd <= innerStartEntry.end) {\n innerStartEntry.age = this.nextAge++;\n result = {\n bytes: innerStartEntry.bytes,\n view: innerStartEntry.view,\n offset: innerStartEntry.start,\n };\n // Can't return yet though, still need to check if the prefetch range might lie outside the cached area\n }\n const outerCacheStartIndex = binarySearchLessOrEqual(this.cache, outerStart, x => x.start);\n const bytes = result ? null : new Uint8Array(innerEnd - innerStart);\n let contiguousBytesWriteEnd = 0; // Used to track if the cache is able to completely cover the bytes\n let lastEnd = outerStart;\n // The \"holes\" in the cache (the parts we need to load)\n const outerHoles = [];\n // Loop over the cache and build up the list of holes\n if (outerCacheStartIndex !== -1) {\n for (let i = outerCacheStartIndex; i < this.cache.length; i++) {\n const entry = this.cache[i];\n if (entry.start >= outerEnd) {\n break;\n }\n if (entry.end <= outerStart) {\n continue;\n }\n const cappedOuterStart = Math.max(outerStart, entry.start);\n const cappedOuterEnd = Math.min(outerEnd, entry.end);\n assert(cappedOuterStart <= cappedOuterEnd);\n if (lastEnd < cappedOuterStart) {\n outerHoles.push({ start: lastEnd, end: cappedOuterStart });\n }\n lastEnd = cappedOuterEnd;\n if (bytes) {\n const cappedInnerStart = Math.max(innerStart, entry.start);\n const cappedInnerEnd = Math.min(innerEnd, entry.end);\n if (cappedInnerStart < cappedInnerEnd) {\n const relativeOffset = cappedInnerStart - innerStart;\n // Fill the relevant section of the bytes with the cached data\n bytes.set(entry.bytes.subarray(cappedInnerStart - entry.start, cappedInnerEnd - entry.start), relativeOffset);\n if (relativeOffset === contiguousBytesWriteEnd) {\n contiguousBytesWriteEnd = cappedInnerEnd - innerStart;\n }\n }\n }\n entry.age = this.nextAge++;\n }\n if (lastEnd < outerEnd) {\n outerHoles.push({ start: lastEnd, end: outerEnd });\n }\n }\n else {\n outerHoles.push({ start: outerStart, end: outerEnd });\n }\n if (bytes && contiguousBytesWriteEnd >= bytes.length) {\n // Multiple cache entries were able to completely cover the requested bytes!\n result = {\n bytes,\n view: toDataView(bytes),\n offset: innerStart,\n };\n }\n if (outerHoles.length === 0) {\n assert(result);\n return result;\n }\n // We need to read more data, so now we're in async land\n const { promise, resolve, reject } = promiseWithResolvers();\n const innerHoles = [];\n for (const outerHole of outerHoles) {\n const cappedStart = Math.max(innerStart, outerHole.start);\n const cappedEnd = Math.min(innerEnd, outerHole.end);\n if (cappedStart === outerHole.start && cappedEnd === outerHole.end) {\n innerHoles.push(outerHole); // Can reuse without allocating a new object\n }\n else if (cappedStart < cappedEnd) {\n innerHoles.push({ start: cappedStart, end: cappedEnd });\n }\n }\n // Fire off workers to take care of patching the holes\n for (const outerHole of outerHoles) {\n const pendingSlice = bytes && {\n start: innerStart,\n bytes,\n holes: innerHoles,\n resolve,\n reject,\n };\n let workerFound = false;\n for (const worker of this.workers) {\n // A small tolerance in the case that the requested region is *just* after the target position of an\n // existing worker. In that case, it's probably more efficient to repurpose that worker than to spawn\n // another one so close to it\n const gapTolerance = 2 ** 17;\n // This check also implies worker.currentPos <= outerHole.start, a critical condition\n if (closedIntervalsOverlap(outerHole.start - gapTolerance, outerHole.start, worker.currentPos, worker.targetPos)) {\n worker.targetPos = Math.max(worker.targetPos, outerHole.end); // Update the worker's target position\n workerFound = true;\n if (pendingSlice && !worker.pendingSlices.includes(pendingSlice)) {\n worker.pendingSlices.push(pendingSlice);\n }\n if (!worker.running) {\n // Kick it off if it's idle\n this.runWorker(worker);\n }\n break;\n }\n }\n if (!workerFound) {\n // We need to spawn a new worker\n const newWorker = this.createWorker(outerHole.start, outerHole.end);\n if (pendingSlice) {\n newWorker.pendingSlices = [pendingSlice];\n }\n this.runWorker(newWorker);\n }\n }\n if (!result) {\n assert(bytes);\n result = promise.then(bytes => ({\n bytes,\n view: toDataView(bytes),\n offset: innerStart,\n }));\n }\n else {\n // The requested region was satisfied by the cache, but the entire prefetch region was not\n }\n return result;\n }\n createWorker(startPos, targetPos) {\n const worker = {\n startPos,\n currentPos: startPos,\n targetPos,\n running: false,\n // Due to async shenanigans, it can happen that workers are started after disposal. In this case, instead of\n // simply not creating the worker, we allow it to run but immediately label it as aborted, so it can then\n // shut itself down.\n aborted: this.disposed,\n pendingSlices: [],\n age: this.nextAge++,\n };\n this.workers.push(worker);\n // LRU eviction of the other workers\n while (this.workers.length > this.options.maxWorkerCount) {\n let oldestIndex = 0;\n let oldestWorker = this.workers[0];\n for (let i = 1; i < this.workers.length; i++) {\n const worker = this.workers[i];\n if (worker.age < oldestWorker.age) {\n oldestIndex = i;\n oldestWorker = worker;\n }\n }\n if (oldestWorker.running && oldestWorker.pendingSlices.length > 0) {\n break;\n }\n oldestWorker.aborted = true;\n this.workers.splice(oldestIndex, 1);\n }\n return worker;\n }\n runWorker(worker) {\n assert(!worker.running);\n assert(worker.currentPos < worker.targetPos);\n worker.running = true;\n worker.age = this.nextAge++;\n void this.options.runWorker(worker)\n .catch((error) => {\n worker.running = false;\n if (worker.pendingSlices.length > 0) {\n worker.pendingSlices.forEach(x => x.reject(error)); // Make sure to propagate any errors\n worker.pendingSlices.length = 0;\n }\n else {\n throw error; // So it doesn't get swallowed\n }\n });\n }\n /** Called by a worker when it has read some data. */\n supplyWorkerData(worker, bytes) {\n assert(!worker.aborted);\n const start = worker.currentPos;\n const end = start + bytes.length;\n this.insertIntoCache({\n start,\n end,\n bytes,\n view: toDataView(bytes),\n age: this.nextAge++,\n });\n worker.currentPos += bytes.length;\n worker.targetPos = Math.max(worker.targetPos, worker.currentPos); // In case it overshoots\n // Now, let's see if we can use the read bytes to fill any pending slice\n for (let i = 0; i < worker.pendingSlices.length; i++) {\n const pendingSlice = worker.pendingSlices[i];\n const clampedStart = Math.max(start, pendingSlice.start);\n const clampedEnd = Math.min(end, pendingSlice.start + pendingSlice.bytes.length);\n if (clampedStart < clampedEnd) {\n pendingSlice.bytes.set(bytes.subarray(clampedStart - start, clampedEnd - start), clampedStart - pendingSlice.start);\n }\n for (let j = 0; j < pendingSlice.holes.length; j++) {\n // The hole is intentionally not modified here if the read section starts somewhere in the middle of\n // the hole. We don't need to do \"hole splitting\", since the workers are spawned *by* the holes,\n // meaning there's always a worker which will consume the hole left to right.\n const hole = pendingSlice.holes[j];\n if (start <= hole.start && end > hole.start) {\n hole.start = end;\n }\n if (hole.end <= hole.start) {\n pendingSlice.holes.splice(j, 1);\n j--;\n }\n }\n if (pendingSlice.holes.length === 0) {\n // The slice has been fulfilled, everything has been read. Let's resolve the promise\n pendingSlice.resolve(pendingSlice.bytes);\n worker.pendingSlices.splice(i, 1);\n i--;\n }\n }\n // Remove other idle workers if we \"ate\" into their territory\n for (let i = 0; i < this.workers.length; i++) {\n const otherWorker = this.workers[i];\n if (worker === otherWorker || otherWorker.running) {\n continue;\n }\n if (closedIntervalsOverlap(start, end, otherWorker.currentPos, otherWorker.targetPos)) {\n this.workers.splice(i, 1);\n i--;\n }\n }\n }\n forgetWorker(worker) {\n const index = this.workers.indexOf(worker);\n assert(index !== -1);\n this.workers.splice(index, 1);\n }\n insertIntoCache(entry) {\n if (this.options.maxCacheSize === 0) {\n return; // No caching\n }\n let insertionIndex = binarySearchLessOrEqual(this.cache, entry.start, x => x.start) + 1;\n if (insertionIndex > 0) {\n const previous = this.cache[insertionIndex - 1];\n if (previous.end >= entry.end) {\n // Previous entry swallows the one to be inserted; we don't need to do anything\n return;\n }\n if (previous.end > entry.start) {\n // Partial overlap with the previous entry, let's join\n const joined = new Uint8Array(entry.end - previous.start);\n joined.set(previous.bytes, 0);\n joined.set(entry.bytes, entry.start - previous.start);\n this.currentCacheSize += entry.end - previous.end;\n previous.bytes = joined;\n previous.view = toDataView(joined);\n previous.end = entry.end;\n // Do the rest of the logic with the previous entry instead\n insertionIndex--;\n entry = previous;\n }\n else {\n this.cache.splice(insertionIndex, 0, entry);\n this.currentCacheSize += entry.bytes.length;\n }\n }\n else {\n this.cache.splice(insertionIndex, 0, entry);\n this.currentCacheSize += entry.bytes.length;\n }\n for (let i = insertionIndex + 1; i < this.cache.length; i++) {\n const next = this.cache[i];\n if (entry.end <= next.start) {\n // Even if they touch, we don't wanna merge them, no need\n break;\n }\n if (entry.end >= next.end) {\n // The inserted entry completely swallows the next entry\n this.cache.splice(i, 1);\n this.currentCacheSize -= next.bytes.length;\n i--;\n continue;\n }\n // Partial overlap, let's join\n const joined = new Uint8Array(next.end - entry.start);\n joined.set(entry.bytes, 0);\n joined.set(next.bytes, next.start - entry.start);\n this.currentCacheSize -= entry.end - next.start; // Subtract the overlap\n entry.bytes = joined;\n entry.view = toDataView(joined);\n entry.end = next.end;\n this.cache.splice(i, 1);\n break; // After the join case, we're done: the next entry cannot possibly overlap with the inserted one.\n }\n // LRU eviction of cache entries\n while (this.currentCacheSize > this.options.maxCacheSize) {\n let oldestIndex = 0;\n let oldestEntry = this.cache[0];\n for (let i = 1; i < this.cache.length; i++) {\n const entry = this.cache[i];\n if (entry.age < oldestEntry.age) {\n oldestIndex = i;\n oldestEntry = entry;\n }\n }\n if (this.currentCacheSize - oldestEntry.bytes.length <= this.options.maxCacheSize) {\n // Don't evict if it would shrink the cache below the max size\n break;\n }\n this.cache.splice(oldestIndex, 1);\n this.currentCacheSize -= oldestEntry.bytes.length;\n }\n }\n dispose() {\n for (const worker of this.workers) {\n worker.aborted = true;\n }\n this.workers.length = 0;\n this.cache.length = 0;\n this.disposed = true;\n }\n}\n","/*!\n * Copyright (c) 2026-present, Vanilagy and contributors\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https://mozilla.org/MPL/2.0/.\n */\nimport { Demuxer } from '../demuxer.js';\nimport { InputAudioTrack } from '../input-track.js';\nimport { DEFAULT_TRACK_DISPOSITION } from '../metadata.js';\nimport { assert, UNDETERMINED_LANGUAGE } from '../misc.js';\nimport { EncodedPacket, PLACEHOLDER_DATA } from '../packet.js';\nimport { readAscii, readBytes, readU16, readU32, readU64 } from '../reader.js';\nimport { ID3_V2_HEADER_SIZE, parseId3V2Tag, readId3V2Header } from '../id3.js';\nexport var WaveFormat;\n(function (WaveFormat) {\n WaveFormat[WaveFormat[\"PCM\"] = 1] = \"PCM\";\n WaveFormat[WaveFormat[\"IEEE_FLOAT\"] = 3] = \"IEEE_FLOAT\";\n WaveFormat[WaveFormat[\"ALAW\"] = 6] = \"ALAW\";\n WaveFormat[WaveFormat[\"MULAW\"] = 7] = \"MULAW\";\n WaveFormat[WaveFormat[\"EXTENSIBLE\"] = 65534] = \"EXTENSIBLE\";\n})(WaveFormat || (WaveFormat = {}));\nexport class WaveDemuxer extends Demuxer {\n constructor(input) {\n super(input);\n this.metadataPromise = null;\n this.dataStart = -1;\n this.dataSize = -1;\n this.audioInfo = null;\n this.tracks = [];\n this.lastKnownPacketIndex = 0;\n this.metadataTags = {};\n this.reader = input._reader;\n }\n async readMetadata() {\n return this.metadataPromise ??= (async () => {\n let slice = this.reader.requestSlice(0, 12);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n const riffType = readAscii(slice, 4);\n const littleEndian = riffType !== 'RIFX';\n const isRf64 = riffType === 'RF64';\n const outerChunkSize = readU32(slice, littleEndian);\n let totalFileSize = isRf64\n ? this.reader.fileSize\n : Math.min(outerChunkSize + 8, this.reader.fileSize ?? Infinity);\n const format = readAscii(slice, 4);\n if (format !== 'WAVE') {\n throw new Error('Invalid WAVE file - wrong format');\n }\n let chunksRead = 0;\n let dataChunkSize = null;\n let currentPos = slice.filePos;\n while (totalFileSize === null || currentPos < totalFileSize) {\n let slice = this.reader.requestSlice(currentPos, 8);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n break;\n const chunkId = readAscii(slice, 4);\n const chunkSize = readU32(slice, littleEndian);\n const startPos = slice.filePos;\n if (isRf64 && chunksRead === 0 && chunkId !== 'ds64') {\n throw new Error('Invalid RF64 file: First chunk must be \"ds64\".');\n }\n if (chunkId === 'fmt ') {\n await this.parseFmtChunk(startPos, chunkSize, littleEndian);\n }\n else if (chunkId === 'data') {\n dataChunkSize ??= chunkSize;\n this.dataStart = slice.filePos;\n this.dataSize = Math.min(dataChunkSize, (totalFileSize ?? Infinity) - this.dataStart);\n if (this.reader.fileSize === null) {\n break; // Stop once we hit the data chunk\n }\n }\n else if (chunkId === 'ds64') {\n // File and data chunk sizes are defined in here instead\n let ds64Slice = this.reader.requestSlice(startPos, chunkSize);\n if (ds64Slice instanceof Promise)\n ds64Slice = await ds64Slice;\n if (!ds64Slice)\n break;\n const riffChunkSize = readU64(ds64Slice, littleEndian);\n dataChunkSize = readU64(ds64Slice, littleEndian);\n totalFileSize = Math.min(riffChunkSize + 8, this.reader.fileSize ?? Infinity);\n }\n else if (chunkId === 'LIST') {\n await this.parseListChunk(startPos, chunkSize, littleEndian);\n }\n else if (chunkId === 'ID3 ' || chunkId === 'id3 ') {\n await this.parseId3Chunk(startPos, chunkSize);\n }\n currentPos = startPos + chunkSize + (chunkSize & 1); // Handle padding\n chunksRead++;\n }\n if (!this.audioInfo) {\n throw new Error('Invalid WAVE file - missing \"fmt \" chunk');\n }\n if (this.dataStart === -1) {\n throw new Error('Invalid WAVE file - missing \"data\" chunk');\n }\n const blockSize = this.audioInfo.blockSizeInBytes;\n this.dataSize = Math.floor(this.dataSize / blockSize) * blockSize;\n this.tracks.push(new InputAudioTrack(this.input, new WaveAudioTrackBacking(this)));\n })();\n }\n async parseFmtChunk(startPos, size, littleEndian) {\n let slice = this.reader.requestSlice(startPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return; // File too short\n let formatTag = readU16(slice, littleEndian);\n const numChannels = readU16(slice, littleEndian);\n const sampleRate = readU32(slice, littleEndian);\n slice.skip(4); // Bytes per second\n const blockAlign = readU16(slice, littleEndian);\n let bitsPerSample;\n if (size === 14) { // Plain WAVEFORMAT\n bitsPerSample = 8;\n }\n else {\n bitsPerSample = readU16(slice, littleEndian);\n }\n // Handle WAVEFORMATEXTENSIBLE\n if (size >= 18 && formatTag !== 0x0165) {\n const cbSize = readU16(slice, littleEndian);\n const remainingSize = size - 18;\n const extensionSize = Math.min(remainingSize, cbSize);\n if (extensionSize >= 22 && formatTag === WaveFormat.EXTENSIBLE) {\n // Parse WAVEFORMATEXTENSIBLE\n slice.skip(2 + 4);\n const subFormat = readBytes(slice, 16);\n // Get actual format from subFormat GUID\n formatTag = subFormat[0] | (subFormat[1] << 8);\n }\n }\n if (formatTag === WaveFormat.MULAW || formatTag === WaveFormat.ALAW) {\n bitsPerSample = 8;\n }\n this.audioInfo = {\n format: formatTag,\n numberOfChannels: numChannels,\n sampleRate,\n sampleSizeInBytes: Math.ceil(bitsPerSample / 8),\n blockSizeInBytes: blockAlign,\n };\n }\n async parseListChunk(startPos, size, littleEndian) {\n let slice = this.reader.requestSlice(startPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return; // File too short\n const infoType = readAscii(slice, 4);\n if (infoType !== 'INFO' && infoType !== 'INF0') { // exiftool.org claims INF0 can happen\n return; // Not an INFO chunk\n }\n let currentPos = slice.filePos;\n while (currentPos <= startPos + size - 8) {\n slice.filePos = currentPos;\n const chunkName = readAscii(slice, 4);\n const chunkSize = readU32(slice, littleEndian);\n const bytes = readBytes(slice, chunkSize);\n let stringLength = 0;\n for (let i = 0; i < bytes.length; i++) {\n if (bytes[i] === 0) {\n break;\n }\n stringLength++;\n }\n const value = String.fromCharCode(...bytes.subarray(0, stringLength));\n this.metadataTags.raw ??= {};\n this.metadataTags.raw[chunkName] = value;\n switch (chunkName) {\n case 'INAM':\n case 'TITL':\n {\n this.metadataTags.title ??= value;\n }\n ;\n break;\n case 'TIT3':\n {\n this.metadataTags.description ??= value;\n }\n ;\n break;\n case 'IART':\n {\n this.metadataTags.artist ??= value;\n }\n ;\n break;\n case 'IPRD':\n {\n this.metadataTags.album ??= value;\n }\n ;\n break;\n case 'IPRT':\n case 'ITRK':\n case 'TRCK':\n {\n const parts = value.split('/');\n const trackNum = Number.parseInt(parts[0], 10);\n const tracksTotal = parts[1] && Number.parseInt(parts[1], 10);\n if (Number.isInteger(trackNum) && trackNum > 0) {\n this.metadataTags.trackNumber ??= trackNum;\n }\n if (tracksTotal && Number.isInteger(tracksTotal) && tracksTotal > 0) {\n this.metadataTags.tracksTotal ??= tracksTotal;\n }\n }\n ;\n break;\n case 'ICRD':\n case 'IDIT':\n {\n const date = new Date(value);\n if (!Number.isNaN(date.getTime())) {\n this.metadataTags.date ??= date;\n }\n }\n ;\n break;\n case 'YEAR':\n {\n const year = Number.parseInt(value, 10);\n if (Number.isInteger(year) && year > 0) {\n this.metadataTags.date ??= new Date(year, 0, 1);\n }\n }\n ;\n break;\n case 'IGNR':\n case 'GENR':\n {\n this.metadataTags.genre ??= value;\n }\n ;\n break;\n case 'ICMT':\n case 'CMNT':\n case 'COMM':\n {\n this.metadataTags.comment ??= value;\n }\n ;\n break;\n }\n currentPos += 8 + chunkSize + (chunkSize & 1); // Handle padding\n }\n }\n async parseId3Chunk(startPos, size) {\n // Parse ID3 tag embedded in WAV file (non-default, but used a lot in practice anyway)\n let slice = this.reader.requestSlice(startPos, size);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice)\n return; // File too short\n const id3V2Header = readId3V2Header(slice);\n if (id3V2Header) {\n // Clamp to the available data in case the ID3 header claims more than the WAV chunk provides\n // https://github.com/Vanilagy/mediabunny/issues/300\n const availableSize = size - ID3_V2_HEADER_SIZE;\n id3V2Header.size = Math.min(id3V2Header.size, availableSize);\n if (id3V2Header.size > 0) {\n const contentSlice = slice.slice(startPos + ID3_V2_HEADER_SIZE, id3V2Header.size);\n parseId3V2Tag(contentSlice, id3V2Header, this.metadataTags);\n }\n }\n }\n getCodec() {\n assert(this.audioInfo);\n if (this.audioInfo.format === WaveFormat.MULAW) {\n return 'ulaw';\n }\n if (this.audioInfo.format === WaveFormat.ALAW) {\n return 'alaw';\n }\n if (this.audioInfo.format === WaveFormat.PCM) {\n // All formats are little-endian\n if (this.audioInfo.sampleSizeInBytes === 1) {\n return 'pcm-u8';\n }\n else if (this.audioInfo.sampleSizeInBytes === 2) {\n return 'pcm-s16';\n }\n else if (this.audioInfo.sampleSizeInBytes === 3) {\n return 'pcm-s24';\n }\n else if (this.audioInfo.sampleSizeInBytes === 4) {\n return 'pcm-s32';\n }\n }\n if (this.audioInfo.format === WaveFormat.IEEE_FLOAT) {\n if (this.audioInfo.sampleSizeInBytes === 4) {\n return 'pcm-f32';\n }\n }\n return null;\n }\n async getMimeType() {\n return 'audio/wav';\n }\n async computeDuration() {\n await this.readMetadata();\n const track = this.tracks[0];\n assert(track);\n return track.computeDuration();\n }\n async getTracks() {\n await this.readMetadata();\n return this.tracks;\n }\n async getMetadataTags() {\n await this.readMetadata();\n return this.metadataTags;\n }\n}\nconst PACKET_SIZE_IN_FRAMES = 2048;\nclass WaveAudioTrackBacking {\n constructor(demuxer) {\n this.demuxer = demuxer;\n }\n getId() {\n return 1;\n }\n getNumber() {\n return 1;\n }\n getCodec() {\n return this.demuxer.getCodec();\n }\n getInternalCodecId() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.format;\n }\n async getDecoderConfig() {\n const codec = this.demuxer.getCodec();\n if (!codec) {\n return null;\n }\n assert(this.demuxer.audioInfo);\n return {\n codec,\n numberOfChannels: this.demuxer.audioInfo.numberOfChannels,\n sampleRate: this.demuxer.audioInfo.sampleRate,\n };\n }\n async computeDuration() {\n const lastPacket = await this.getPacket(Infinity, { metadataOnly: true });\n return (lastPacket?.timestamp ?? 0) + (lastPacket?.duration ?? 0);\n }\n getNumberOfChannels() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.numberOfChannels;\n }\n getSampleRate() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.sampleRate;\n }\n getTimeResolution() {\n assert(this.demuxer.audioInfo);\n return this.demuxer.audioInfo.sampleRate;\n }\n getName() {\n return null;\n }\n getLanguageCode() {\n return UNDETERMINED_LANGUAGE;\n }\n getDisposition() {\n return {\n ...DEFAULT_TRACK_DISPOSITION,\n };\n }\n async getFirstTimestamp() {\n return 0;\n }\n async getPacketAtIndex(packetIndex, options) {\n assert(packetIndex >= 0);\n assert(this.demuxer.audioInfo);\n const startOffset = packetIndex * PACKET_SIZE_IN_FRAMES * this.demuxer.audioInfo.blockSizeInBytes;\n if (startOffset >= this.demuxer.dataSize) {\n return null;\n }\n const sizeInBytes = Math.min(PACKET_SIZE_IN_FRAMES * this.demuxer.audioInfo.blockSizeInBytes, this.demuxer.dataSize - startOffset);\n if (this.demuxer.reader.fileSize === null) {\n // If the file size is unknown, we weren't able to cap the dataSize in the init logic and we instead have to\n // rely on the headers telling us how large the file is. But, these might be wrong, so let's check if the\n // requested slice actually exists.\n let slice = this.demuxer.reader.requestSlice(this.demuxer.dataStart + startOffset, sizeInBytes);\n if (slice instanceof Promise)\n slice = await slice;\n if (!slice) {\n return null;\n }\n }\n let data;\n if (options.metadataOnly) {\n data = PLACEHOLDER_DATA;\n }\n else {\n let slice = this.demuxer.reader.requestSlice(this.demuxer.dataStart + startOffset, sizeInBytes);\n if (slice instanceof Promise)\n slice = await slice;\n assert(slice);\n data = readBytes(slice, sizeInBytes);\n }\n const timestamp = packetIndex * PACKET_SIZE_IN_FRAMES / this.demuxer.audioInfo.sampleRate;\n const duration = sizeInBytes / this.demuxer.audioInfo.blockSizeInBytes / this.demuxer.audioInfo.sampleRate;\n this.demuxer.lastKnownPacketIndex = Math.max(packetIndex, this.demuxer.lastKnownPacketIndex);\n return new EncodedPacket(data, 'key', timestamp, duration, packetIndex, sizeInBytes);\n }\n getFirstPacket(options) {\n return this.getPacketAtIndex(0, options);\n }\n async getPacket(timestamp, options) {\n assert(this.demuxer.audioInfo);\n const packetIndex = Math.floor(Math.min(timestamp * this.demuxer.audioInfo.sampleRate / PACKET_SIZE_IN_FRAMES, (this.demuxer.dataSize - 1) / (PACKET_SIZE_IN_FRAMES * this.demuxer.audioInfo.blockSizeInBytes)));\n if (packetIndex < 0) {\n return null;\n }\n const packet = await this.getPacketAtIndex(packetIndex, options);\n if (packet) {\n return packet;\n }\n if (packetIndex === 0) {\n return null; // Empty data chunk\n }\n assert(this.demuxer.reader.fileSize === null);\n // The file is shorter than we thought, meaning the packet we were looking for doesn't exist. So, let's find\n // the last packet by doing a sequential scan, instead.\n let currentPacket = await this.getPacketAtIndex(this.demuxer.lastKnownPacketIndex, options);\n while (currentPacket) {\n const nextPacket = await this.getNextPacket(currentPacket, options);\n if (!nextPacket) {\n break;\n }\n currentPacket = nextPacket;\n }\n return currentPacket;\n }\n getNextPacket(packet, options) {\n assert(this.demuxer.audioInfo);\n const packetIndex = Math.round(packet.timestamp * this.demuxer.audioInfo.sampleRate / PACKET_SIZE_IN_FRAMES);\n return this.getPacketAtIndex(packetIndex + 1, options);\n }\n getKeyPacket(timestamp, options) {\n return this.getPacket(timestamp, options);\n }\n getNextKeyPacket(packet, options) {\n return this.getNextPacket(packet, options);\n }\n}\n","var __defProp = Object.defineProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, {\n get: all[name],\n enumerable: true,\n configurable: true,\n set: (newValue) => all[name] = () => newValue\n });\n};\n\n// src/_check-rsc.ts\nimport { createContext } from \"react\";\nif (typeof createContext !== \"function\") {\n const err = [\n 'Remotion requires React.createContext, but it is \"undefined\".',\n 'If you are in a React Server Component, turn it into a client component by adding \"use client\" at the top of the file.',\n \"\",\n \"Before:\",\n ' import {useCurrentFrame} from \"remotion\";',\n \"\",\n \"After:\",\n ' \"use client\";',\n ' import {useCurrentFrame} from \"remotion\";'\n ];\n throw new Error(err.join(`\n`));\n}\n\n// src/Clipper.tsx\nvar Clipper = () => {\n throw new Error(\"<Clipper> has been removed as of Remotion v4.0.228. The native clipping APIs were experimental and subject to removal at any time. We removed them because they were sparingly used and made rendering often slower rather than faster.\");\n};\n\n// src/enable-sequence-stack-traces.ts\nvar componentsToAddStacksTo = [];\nvar getComponentsToAddStacksTo = () => componentsToAddStacksTo;\nvar addSequenceStackTraces = (component) => {\n componentsToAddStacksTo.push(component);\n};\n\n// src/is-player.tsx\nimport { createContext as createContext2, useContext } from \"react\";\nimport { jsx } from \"react/jsx-runtime\";\nvar IsPlayerContext = createContext2(false);\nvar IsPlayerContextProvider = ({\n children\n}) => {\n return /* @__PURE__ */ jsx(IsPlayerContext.Provider, {\n value: true,\n children\n });\n};\nvar useIsPlayer = () => {\n return useContext(IsPlayerContext);\n};\n\n// src/truthy.ts\nfunction truthy(value) {\n return Boolean(value);\n}\n\n// src/version.ts\nvar VERSION = \"4.0.436\";\n\n// src/multiple-versions-warning.ts\nvar checkMultipleRemotionVersions = () => {\n if (typeof globalThis === \"undefined\") {\n return;\n }\n const set = () => {\n globalThis.remotion_imported = VERSION;\n if (typeof window !== \"undefined\") {\n window.remotion_imported = VERSION;\n }\n };\n const alreadyImported = globalThis.remotion_imported || typeof window !== \"undefined\" && window.remotion_imported;\n if (alreadyImported) {\n if (alreadyImported === VERSION) {\n return;\n }\n if (typeof alreadyImported === \"string\" && alreadyImported.includes(\"webcodecs\")) {\n set();\n return;\n }\n throw new TypeError(`\\uD83D\\uDEA8 Multiple versions of Remotion detected: ${[\n VERSION,\n typeof alreadyImported === \"string\" ? alreadyImported : \"an older version\"\n ].filter(truthy).join(\" and \")}. This will cause things to break in an unexpected way.\nCheck that all your Remotion packages are on the same version. If your dependencies depend on Remotion, make them peer dependencies. You can also run \\`npx remotion versions\\` from your terminal to see which versions are mismatching.`);\n }\n set();\n};\n\n// src/Null.tsx\nvar Null = () => {\n throw new Error(\"<Null> has been removed as of Remotion v4.0.228. The native clipping APIs were experimental and subject to removal at any time. We removed them because they were sparingly used and made rendering often slower rather than faster.\");\n};\n\n// src/Sequence.tsx\nimport {\n forwardRef as forwardRef2,\n useContext as useContext13,\n useEffect,\n useMemo as useMemo11,\n useState as useState4\n} from \"react\";\n\n// src/AbsoluteFill.tsx\nimport { forwardRef, useMemo } from \"react\";\nimport { jsx as jsx2 } from \"react/jsx-runtime\";\nvar hasTailwindClassName = ({\n className,\n classPrefix,\n type\n}) => {\n if (!className) {\n return false;\n }\n if (type === \"exact\") {\n const split = className.split(\" \");\n return classPrefix.some((token) => {\n return split.some((part) => {\n return part.trim() === token || part.trim().endsWith(`:${token}`) || part.trim().endsWith(`!${token}`);\n });\n });\n }\n return classPrefix.some((prefix) => {\n return className.startsWith(prefix) || className.includes(` ${prefix}`) || className.includes(`!${prefix}`) || className.includes(`:${prefix}`);\n });\n};\nvar AbsoluteFillRefForwarding = (props, ref) => {\n const { style, ...other } = props;\n const actualStyle = useMemo(() => {\n return {\n position: \"absolute\",\n top: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"top-\", \"inset-\"],\n type: \"prefix\"\n }) ? undefined : 0,\n left: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"left-\", \"inset-\"],\n type: \"prefix\"\n }) ? undefined : 0,\n right: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"right-\", \"inset-\"],\n type: \"prefix\"\n }) ? undefined : 0,\n bottom: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"bottom-\", \"inset-\"],\n type: \"prefix\"\n }) ? undefined : 0,\n width: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"w-\"],\n type: \"prefix\"\n }) ? undefined : \"100%\",\n height: hasTailwindClassName({\n className: other.className,\n classPrefix: [\"h-\"],\n type: \"prefix\"\n }) ? undefined : \"100%\",\n display: hasTailwindClassName({\n className: other.className,\n classPrefix: [\n \"block\",\n \"inline-block\",\n \"inline\",\n \"flex\",\n \"inline-flex\",\n \"flow-root\",\n \"grid\",\n \"inline-grid\",\n \"contents\",\n \"list-item\",\n \"hidden\"\n ],\n type: \"exact\"\n }) ? undefined : \"flex\",\n flexDirection: hasTailwindClassName({\n className: other.className,\n classPrefix: [\n \"flex-row\",\n \"flex-col\",\n \"flex-row-reverse\",\n \"flex-col-reverse\"\n ],\n type: \"exact\"\n }) ? undefined : \"column\",\n ...style\n };\n }, [other.className, style]);\n return /* @__PURE__ */ jsx2(\"div\", {\n ref,\n style: actualStyle,\n ...other\n });\n};\nvar AbsoluteFill = forwardRef(AbsoluteFillRefForwarding);\n\n// src/freeze.tsx\nimport { useContext as useContext11, useMemo as useMemo8 } from \"react\";\n\n// src/SequenceContext.tsx\nimport { createContext as createContext3 } from \"react\";\nvar SequenceContext = createContext3(null);\n\n// src/timeline-position-state.ts\nvar exports_timeline_position_state = {};\n__export(exports_timeline_position_state, {\n useTimelineSetFrame: () => useTimelineSetFrame,\n useTimelinePosition: () => useTimelinePosition,\n useTimelineContext: () => useTimelineContext,\n usePlayingState: () => usePlayingState,\n useAbsoluteTimelinePosition: () => useAbsoluteTimelinePosition,\n persistCurrentFrame: () => persistCurrentFrame,\n getInitialFrameState: () => getInitialFrameState,\n getFrameForComposition: () => getFrameForComposition\n});\nimport { useContext as useContext7, useMemo as useMemo6 } from \"react\";\n\n// src/TimelineContext.tsx\nimport {\n createContext as createContext6,\n useLayoutEffect,\n useMemo as useMemo2,\n useRef,\n useState as useState2\n} from \"react\";\n\n// src/random.ts\nfunction mulberry32(a) {\n let t = a + 1831565813;\n t = Math.imul(t ^ t >>> 15, t | 1);\n t ^= t + Math.imul(t ^ t >>> 7, t | 61);\n return ((t ^ t >>> 14) >>> 0) / 4294967296;\n}\nfunction hashCode(str) {\n let i = 0;\n let chr = 0;\n let hash = 0;\n for (i = 0;i < str.length; i++) {\n chr = str.charCodeAt(i);\n hash = (hash << 5) - hash + chr;\n hash |= 0;\n }\n return hash;\n}\nvar random = (seed, dummy) => {\n if (dummy !== undefined) {\n throw new TypeError(\"random() takes only one argument\");\n }\n if (seed === null) {\n return Math.random();\n }\n if (typeof seed === \"string\") {\n return mulberry32(hashCode(seed));\n }\n if (typeof seed === \"number\") {\n return mulberry32(seed * 10000000000);\n }\n throw new Error(\"random() argument must be a number or a string\");\n};\n\n// src/use-delay-render.tsx\nimport { createContext as createContext5, useCallback, useContext as useContext4 } from \"react\";\n\n// src/cancel-render.ts\nvar getErrorStackWithMessage = (error) => {\n const stack = error.stack ?? \"\";\n return stack.startsWith(\"Error:\") ? stack : `${error.message}\n${stack}`;\n};\nvar isErrorLike = (err) => {\n if (err instanceof Error) {\n return true;\n }\n if (err === null) {\n return false;\n }\n if (typeof err !== \"object\") {\n return false;\n }\n if (!(\"stack\" in err)) {\n return false;\n }\n if (typeof err.stack !== \"string\") {\n return false;\n }\n if (!(\"message\" in err)) {\n return false;\n }\n if (typeof err.message !== \"string\") {\n return false;\n }\n return true;\n};\nfunction cancelRenderInternal(scope, err) {\n let error;\n if (isErrorLike(err)) {\n error = err;\n if (!error.stack) {\n error.stack = new Error(error.message).stack;\n }\n } else if (typeof err === \"string\") {\n error = Error(err);\n } else {\n error = Error(\"Rendering was cancelled\");\n }\n if (scope) {\n scope.remotion_cancelledError = getErrorStackWithMessage(error);\n }\n throw error;\n}\nfunction cancelRender(err) {\n return cancelRenderInternal(typeof window !== \"undefined\" ? window : undefined, err);\n}\n\n// src/get-remotion-environment.ts\nfunction getNodeEnvString() {\n return [\"NOD\", \"E_EN\", \"V\"].join(\"\");\n}\nvar getEnvString = () => {\n return [\"e\", \"nv\"].join(\"\");\n};\nvar getRemotionEnvironment = () => {\n const isPlayer = typeof window !== \"undefined\" && window.remotion_isPlayer;\n const isRendering = typeof window !== \"undefined\" && typeof window.process !== \"undefined\" && typeof window.process.env !== \"undefined\" && (window.process[getEnvString()][getNodeEnvString()] === \"test\" || window.process[getEnvString()][getNodeEnvString()] === \"production\" && typeof window !== \"undefined\" && typeof window.remotion_puppeteerTimeout !== \"undefined\");\n const isStudio = typeof window !== \"undefined\" && window.remotion_isStudio;\n const isReadOnlyStudio = typeof window !== \"undefined\" && window.remotion_isReadOnlyStudio;\n return {\n isStudio,\n isRendering,\n isPlayer,\n isReadOnlyStudio,\n isClientSideRendering: false\n };\n};\n\n// src/log.ts\nvar logLevels = [\"trace\", \"verbose\", \"info\", \"warn\", \"error\"];\nvar getNumberForLogLevel = (level) => {\n return logLevels.indexOf(level);\n};\nvar isEqualOrBelowLogLevel = (currentLevel, level) => {\n return getNumberForLogLevel(currentLevel) <= getNumberForLogLevel(level);\n};\nvar transformArgs = ({\n args,\n logLevel,\n tag\n}) => {\n const arr = [...args];\n if (getRemotionEnvironment().isRendering && !getRemotionEnvironment().isClientSideRendering) {\n arr.unshift(Symbol.for(`__remotion_level_${logLevel}`));\n }\n if (tag && getRemotionEnvironment().isRendering && !getRemotionEnvironment().isClientSideRendering) {\n arr.unshift(Symbol.for(`__remotion_tag_${tag}`));\n }\n return arr;\n};\nvar verbose = (options, ...args) => {\n if (isEqualOrBelowLogLevel(options.logLevel, \"verbose\")) {\n return console.debug(...transformArgs({ args, logLevel: \"verbose\", tag: options.tag }));\n }\n};\nvar trace = (options, ...args) => {\n if (isEqualOrBelowLogLevel(options.logLevel, \"trace\")) {\n return console.debug(...transformArgs({ args, logLevel: \"trace\", tag: options.tag }));\n }\n};\nvar info = (options, ...args) => {\n if (isEqualOrBelowLogLevel(options.logLevel, \"info\")) {\n return console.log(...transformArgs({ args, logLevel: \"info\", tag: options.tag }));\n }\n};\nvar warn = (options, ...args) => {\n if (isEqualOrBelowLogLevel(options.logLevel, \"warn\")) {\n return console.warn(...transformArgs({ args, logLevel: \"warn\", tag: options.tag }));\n }\n};\nvar error = (options, ...args) => {\n return console.error(...transformArgs({ args, logLevel: \"error\", tag: options.tag }));\n};\nvar Log = {\n trace,\n verbose,\n info,\n warn,\n error\n};\n\n// src/delay-render.ts\nif (typeof window !== \"undefined\") {\n window.remotion_renderReady = false;\n if (!window.remotion_delayRenderTimeouts) {\n window.remotion_delayRenderTimeouts = {};\n }\n window.remotion_delayRenderHandles = [];\n}\nvar DELAY_RENDER_CALLSTACK_TOKEN = \"The delayRender was called:\";\nvar DELAY_RENDER_RETRIES_LEFT = \"Retries left: \";\nvar DELAY_RENDER_RETRY_TOKEN = \"- Rendering the frame will be retried.\";\nvar DELAY_RENDER_CLEAR_TOKEN = \"handle was cleared after\";\nvar defaultTimeout = 30000;\nvar delayRenderInternal = ({\n scope,\n environment,\n label,\n options\n}) => {\n if (typeof label !== \"string\" && label !== null) {\n throw new Error(\"The label parameter of delayRender() must be a string or undefined, got: \" + JSON.stringify(label));\n }\n const handle = Math.random();\n scope.remotion_delayRenderHandles.push(handle);\n const called = Error().stack?.replace(/^Error/g, \"\") ?? \"\";\n if (environment.isRendering) {\n const timeoutToUse = (options?.timeoutInMilliseconds ?? scope.remotion_puppeteerTimeout ?? defaultTimeout) - 2000;\n const retriesLeft = (options?.retries ?? 0) - (scope.remotion_attempt - 1);\n scope.remotion_delayRenderTimeouts[handle] = {\n label: label ?? null,\n startTime: Date.now(),\n timeout: setTimeout(() => {\n const message = [\n `A delayRender()`,\n label ? `\"${label}\"` : null,\n `was called but not cleared after ${timeoutToUse}ms. See https://remotion.dev/docs/timeout for help.`,\n retriesLeft > 0 ? DELAY_RENDER_RETRIES_LEFT + retriesLeft : null,\n retriesLeft > 0 ? DELAY_RENDER_RETRY_TOKEN : null,\n DELAY_RENDER_CALLSTACK_TOKEN,\n called\n ].filter(truthy).join(\" \");\n if (environment.isClientSideRendering) {\n scope.remotion_cancelledError = getErrorStackWithMessage(Error(message));\n } else {\n cancelRenderInternal(scope, Error(message));\n }\n }, timeoutToUse)\n };\n }\n scope.remotion_renderReady = false;\n return handle;\n};\nvar delayRender = (label, options) => {\n if (typeof window === \"undefined\") {\n return Math.random();\n }\n return delayRenderInternal({\n scope: window,\n environment: getRemotionEnvironment(),\n label: label ?? null,\n options: options ?? {}\n });\n};\nvar continueRenderInternal = ({\n scope,\n handle,\n environment,\n logLevel\n}) => {\n if (typeof handle === \"undefined\") {\n throw new TypeError(\"The continueRender() method must be called with a parameter that is the return value of delayRender(). No value was passed.\");\n }\n if (typeof handle !== \"number\") {\n throw new TypeError(\"The parameter passed into continueRender() must be the return value of delayRender() which is a number. Got: \" + JSON.stringify(handle));\n }\n scope.remotion_delayRenderHandles = scope.remotion_delayRenderHandles.filter((h) => {\n if (h === handle) {\n if (environment.isRendering && scope !== undefined) {\n if (!scope.remotion_delayRenderTimeouts[handle]) {\n return false;\n }\n const { label, startTime, timeout } = scope.remotion_delayRenderTimeouts[handle];\n clearTimeout(timeout);\n const message = [\n label ? `\"${label}\"` : \"A handle\",\n DELAY_RENDER_CLEAR_TOKEN,\n `${Date.now() - startTime}ms`\n ].filter(truthy).join(\" \");\n Log.verbose({ logLevel, tag: \"delayRender()\" }, message);\n delete scope.remotion_delayRenderTimeouts[handle];\n }\n return false;\n }\n return true;\n });\n if (scope.remotion_delayRenderHandles.length === 0) {\n scope.remotion_renderReady = true;\n }\n};\nvar continueRender = (handle) => {\n if (typeof window === \"undefined\") {\n return;\n }\n continueRenderInternal({\n scope: window,\n handle,\n environment: getRemotionEnvironment(),\n logLevel: window.remotion_logLevel ?? \"info\"\n });\n};\n\n// src/log-level-context.tsx\nimport { createContext as createContext4 } from \"react\";\nimport * as React2 from \"react\";\nvar LogLevelContext = createContext4({\n logLevel: \"info\",\n mountTime: 0\n});\nvar useLogLevel = () => {\n const { logLevel } = React2.useContext(LogLevelContext);\n if (logLevel === null) {\n throw new Error(\"useLogLevel must be used within a LogLevelProvider\");\n }\n return logLevel;\n};\nvar useMountTime = () => {\n const { mountTime } = React2.useContext(LogLevelContext);\n if (mountTime === null) {\n throw new Error(\"useMountTime must be used within a LogLevelProvider\");\n }\n return mountTime;\n};\n\n// src/use-remotion-environment.ts\nimport { useContext as useContext3, useState } from \"react\";\n\n// src/remotion-environment-context.ts\nimport React3 from \"react\";\nvar RemotionEnvironmentContext = React3.createContext(null);\n\n// src/use-remotion-environment.ts\nvar useRemotionEnvironment = () => {\n const context = useContext3(RemotionEnvironmentContext);\n const [env] = useState(() => getRemotionEnvironment());\n return context ?? env;\n};\n\n// src/use-delay-render.tsx\nvar DelayRenderContextType = createContext5(null);\nvar useDelayRender = () => {\n const environment = useRemotionEnvironment();\n const scope = useContext4(DelayRenderContextType) ?? (typeof window !== \"undefined\" ? window : undefined);\n const logLevel = useLogLevel();\n const delayRender2 = useCallback((label, options) => {\n if (!scope) {\n return Math.random();\n }\n return delayRenderInternal({\n scope,\n environment,\n label: label ?? null,\n options: options ?? {}\n });\n }, [environment, scope]);\n const continueRender2 = useCallback((handle) => {\n if (!scope) {\n return;\n }\n continueRenderInternal({\n scope,\n handle,\n environment,\n logLevel\n });\n }, [environment, logLevel, scope]);\n const cancelRender2 = useCallback((err) => {\n return cancelRenderInternal(scope ?? (typeof window !== \"undefined\" ? window : undefined), err);\n }, [scope]);\n return { delayRender: delayRender2, continueRender: continueRender2, cancelRender: cancelRender2 };\n};\n\n// src/TimelineContext.tsx\nimport { jsx as jsx3 } from \"react/jsx-runtime\";\nvar SetTimelineContext = createContext6({\n setFrame: () => {\n throw new Error(\"default\");\n },\n setPlaying: () => {\n throw new Error(\"default\");\n }\n});\nvar TimelineContext = createContext6(null);\nvar AbsoluteTimeContext = createContext6(null);\nvar TimelineContextProvider = ({ children, frameState }) => {\n const [playing, setPlaying] = useState2(false);\n const imperativePlaying = useRef(false);\n const [playbackRate, setPlaybackRate] = useState2(1);\n const audioAndVideoTags = useRef([]);\n const [remotionRootId] = useState2(() => String(random(null)));\n const [_frame, setFrame] = useState2(() => getInitialFrameState());\n const frame = frameState ?? _frame;\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n if (typeof window !== \"undefined\") {\n useLayoutEffect(() => {\n window.remotion_setFrame = (f, composition, attempt) => {\n window.remotion_attempt = attempt;\n const id = delayRender2(`Setting the current frame to ${f}`);\n let asyncUpdate = true;\n setFrame((s) => {\n const currentFrame = s[composition] ?? window.remotion_initialFrame;\n if (currentFrame === f) {\n asyncUpdate = false;\n return s;\n }\n return {\n ...s,\n [composition]: f\n };\n });\n if (asyncUpdate) {\n requestAnimationFrame(() => continueRender2(id));\n } else {\n continueRender2(id);\n }\n };\n window.remotion_isPlayer = false;\n }, [continueRender2, delayRender2]);\n }\n const timelineContextValue = useMemo2(() => {\n return {\n frame,\n playing,\n imperativePlaying,\n rootId: remotionRootId,\n playbackRate,\n setPlaybackRate,\n audioAndVideoTags\n };\n }, [frame, playbackRate, playing, remotionRootId]);\n const setTimelineContextValue = useMemo2(() => {\n return {\n setFrame,\n setPlaying\n };\n }, []);\n return /* @__PURE__ */ jsx3(AbsoluteTimeContext.Provider, {\n value: timelineContextValue,\n children: /* @__PURE__ */ jsx3(TimelineContext.Provider, {\n value: timelineContextValue,\n children: /* @__PURE__ */ jsx3(SetTimelineContext.Provider, {\n value: setTimelineContextValue,\n children\n })\n })\n });\n};\n\n// src/use-video.ts\nimport { useContext as useContext6, useMemo as useMemo5 } from \"react\";\n\n// src/CompositionManagerContext.tsx\nimport { createContext as createContext7 } from \"react\";\nvar CompositionManager = createContext7({\n compositions: [],\n folders: [],\n currentCompositionMetadata: null,\n canvasContent: null\n});\nvar CompositionSetters = createContext7({\n registerComposition: () => {\n return;\n },\n unregisterComposition: () => {\n return;\n },\n registerFolder: () => {\n return;\n },\n unregisterFolder: () => {\n return;\n },\n setCanvasContent: () => {\n return;\n },\n updateCompositionDefaultProps: () => {\n return;\n },\n onlyRenderComposition: null\n});\n\n// src/ResolveCompositionConfig.tsx\nimport { createContext as createContext9, createRef, useContext as useContext5, useMemo as useMemo4 } from \"react\";\n\n// src/input-props-override.ts\nvar getKey = () => {\n return `remotion_inputPropsOverride` + window.location.origin;\n};\nvar getInputPropsOverride = () => {\n if (typeof localStorage === \"undefined\")\n return null;\n const override = localStorage.getItem(getKey());\n if (!override)\n return null;\n return JSON.parse(override);\n};\nvar setInputPropsOverride = (override) => {\n if (typeof localStorage === \"undefined\")\n return;\n if (override === null) {\n localStorage.removeItem(getKey());\n return;\n }\n localStorage.setItem(getKey(), JSON.stringify(override));\n};\n\n// src/input-props-serialization.ts\nvar DATE_TOKEN = \"remotion-date:\";\nvar FILE_TOKEN = \"remotion-file:\";\nvar serializeJSONWithSpecialTypes = ({\n data,\n indent,\n staticBase\n}) => {\n let customDateUsed = false;\n let customFileUsed = false;\n let mapUsed = false;\n let setUsed = false;\n try {\n const serializedString = JSON.stringify(data, function(key, value) {\n const item = this[key];\n if (item instanceof Date) {\n customDateUsed = true;\n return `${DATE_TOKEN}${item.toISOString()}`;\n }\n if (item instanceof Map) {\n mapUsed = true;\n return value;\n }\n if (item instanceof Set) {\n setUsed = true;\n return value;\n }\n if (typeof item === \"string\" && staticBase !== null && item.startsWith(staticBase)) {\n customFileUsed = true;\n return `${FILE_TOKEN}${item.replace(staticBase + \"/\", \"\")}`;\n }\n return value;\n }, indent);\n return { serializedString, customDateUsed, customFileUsed, mapUsed, setUsed };\n } catch (err) {\n throw new Error(\"Could not serialize the passed input props to JSON: \" + err.message);\n }\n};\nvar deserializeJSONWithSpecialTypes = (data) => {\n return JSON.parse(data, (_, value) => {\n if (typeof value === \"string\" && value.startsWith(DATE_TOKEN)) {\n return new Date(value.replace(DATE_TOKEN, \"\"));\n }\n if (typeof value === \"string\" && value.startsWith(FILE_TOKEN)) {\n return `${window.remotion_staticBase}/${value.replace(FILE_TOKEN, \"\")}`;\n }\n return value;\n });\n};\nvar serializeThenDeserialize = (props) => {\n return deserializeJSONWithSpecialTypes(serializeJSONWithSpecialTypes({\n data: props,\n indent: 2,\n staticBase: window.remotion_staticBase\n }).serializedString);\n};\nvar serializeThenDeserializeInStudio = (props) => {\n if (getRemotionEnvironment().isStudio) {\n return serializeThenDeserialize(props);\n }\n return props;\n};\n\n// src/config/input-props.ts\nvar didWarnSSRImport = false;\nvar warnOnceSSRImport = () => {\n if (didWarnSSRImport) {\n return;\n }\n didWarnSSRImport = true;\n console.warn(\"Called `getInputProps()` on the server. This function is not available server-side and has returned an empty object.\");\n console.warn(\"To hide this warning, don't call this function on the server:\");\n console.warn(\" typeof window === 'undefined' ? {} : getInputProps()\");\n};\nvar getInputProps = () => {\n if (typeof window === \"undefined\") {\n warnOnceSSRImport();\n return {};\n }\n if (getRemotionEnvironment().isPlayer) {\n throw new Error(\"You cannot call `getInputProps()` from a <Player>. Instead, the props are available as React props from component that you passed as `component` prop.\");\n }\n const override = getInputPropsOverride();\n if (override) {\n return override;\n }\n if (typeof window === \"undefined\" || typeof window.remotion_inputProps === \"undefined\") {\n throw new Error(\"Cannot call `getInputProps()` - window.remotion_inputProps is not set. This API is only available if you are in the Studio, or while you are rendering server-side.\");\n }\n const param = window.remotion_inputProps;\n if (!param) {\n return {};\n }\n const parsed = deserializeJSONWithSpecialTypes(param);\n return parsed;\n};\n\n// src/EditorProps.tsx\nimport React5, {\n createContext as createContext8,\n useCallback as useCallback2,\n useImperativeHandle,\n useMemo as useMemo3\n} from \"react\";\nimport { jsx as jsx4 } from \"react/jsx-runtime\";\nvar EditorPropsContext = createContext8({\n props: {},\n updateProps: () => {\n throw new Error(\"Not implemented\");\n },\n resetUnsaved: () => {\n throw new Error(\"Not implemented\");\n }\n});\nvar editorPropsProviderRef = React5.createRef();\nvar timeValueRef = React5.createRef();\nvar EditorPropsProvider = ({ children }) => {\n const [props, setProps] = React5.useState({});\n const updateProps = useCallback2(({\n defaultProps,\n id,\n newProps\n }) => {\n setProps((prev) => {\n return {\n ...prev,\n [id]: typeof newProps === \"function\" ? newProps(prev[id] ?? defaultProps) : newProps\n };\n });\n }, []);\n const resetUnsaved = useCallback2((compositionId) => {\n setProps((prev) => {\n if (prev[compositionId]) {\n const newProps = { ...prev };\n delete newProps[compositionId];\n return newProps;\n }\n return prev;\n });\n }, []);\n useImperativeHandle(editorPropsProviderRef, () => {\n return {\n getProps: () => props,\n setProps\n };\n }, [props]);\n const ctx = useMemo3(() => {\n return { props, updateProps, resetUnsaved };\n }, [props, resetUnsaved, updateProps]);\n return /* @__PURE__ */ jsx4(EditorPropsContext.Provider, {\n value: ctx,\n children\n });\n};\n\n// src/validation/validate-dimensions.ts\nfunction validateDimension(amount, nameOfProp, location) {\n if (typeof amount !== \"number\") {\n throw new Error(`The \"${nameOfProp}\" prop ${location} must be a number, but you passed a value of type ${typeof amount}`);\n }\n if (isNaN(amount)) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must not be NaN, but is NaN.`);\n }\n if (!Number.isFinite(amount)) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be finite, but is ${amount}.`);\n }\n if (amount % 1 !== 0) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be an integer, but is ${amount}.`);\n }\n if (amount <= 0) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be positive, but got ${amount}.`);\n }\n}\n\n// src/validation/validate-duration-in-frames.ts\nfunction validateDurationInFrames(durationInFrames, options) {\n const { allowFloats, component } = options;\n if (typeof durationInFrames === \"undefined\") {\n throw new Error(`The \"durationInFrames\" prop ${component} is missing.`);\n }\n if (typeof durationInFrames !== \"number\") {\n throw new Error(`The \"durationInFrames\" prop ${component} must be a number, but you passed a value of type ${typeof durationInFrames}`);\n }\n if (durationInFrames <= 0) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be positive, but got ${durationInFrames}.`);\n }\n if (!allowFloats && durationInFrames % 1 !== 0) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be an integer, but got ${durationInFrames}.`);\n }\n if (!Number.isFinite(durationInFrames)) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be finite, but got ${durationInFrames}.`);\n }\n}\n\n// src/validation/validate-fps.ts\nfunction validateFps(fps, location, isGif) {\n if (typeof fps !== \"number\") {\n throw new Error(`\"fps\" must be a number, but you passed a value of type ${typeof fps} ${location}`);\n }\n if (!Number.isFinite(fps)) {\n throw new Error(`\"fps\" must be a finite, but you passed ${fps} ${location}`);\n }\n if (isNaN(fps)) {\n throw new Error(`\"fps\" must not be NaN, but got ${fps} ${location}`);\n }\n if (fps <= 0) {\n throw new TypeError(`\"fps\" must be positive, but got ${fps} ${location}`);\n }\n if (isGif && fps > 50) {\n throw new TypeError(`The FPS for a GIF cannot be higher than 50. Use the --every-nth-frame option to lower the FPS: https://remotion.dev/docs/render-as-gif`);\n }\n}\n\n// src/ResolveCompositionConfig.tsx\nvar ResolveCompositionContext = createContext9(null);\nvar resolveCompositionsRef = createRef();\nvar needsResolution = (composition) => {\n return Boolean(composition.calculateMetadata);\n};\nvar PROPS_UPDATED_EXTERNALLY = \"remotion.propsUpdatedExternally\";\nvar useResolvedVideoConfig = (preferredCompositionId) => {\n const context = useContext5(ResolveCompositionContext);\n const { props: allEditorProps } = useContext5(EditorPropsContext);\n const { compositions, canvasContent, currentCompositionMetadata } = useContext5(CompositionManager);\n const currentComposition = canvasContent?.type === \"composition\" ? canvasContent.compositionId : null;\n const compositionId = preferredCompositionId ?? currentComposition;\n const composition = compositions.find((c) => c.id === compositionId);\n const selectedEditorProps = useMemo4(() => {\n return composition ? allEditorProps[composition.id] ?? {} : {};\n }, [allEditorProps, composition]);\n const env = useRemotionEnvironment();\n return useMemo4(() => {\n if (!composition) {\n return null;\n }\n if (currentCompositionMetadata) {\n return {\n type: \"success\",\n result: {\n ...currentCompositionMetadata,\n id: composition.id,\n defaultProps: composition.defaultProps ?? {}\n }\n };\n }\n if (!needsResolution(composition)) {\n validateDurationInFrames(composition.durationInFrames, {\n allowFloats: false,\n component: `in <Composition id=\"${composition.id}\">`\n });\n validateFps(composition.fps, `in <Composition id=\"${composition.id}\">`, false);\n validateDimension(composition.width, \"width\", `in <Composition id=\"${composition.id}\">`);\n validateDimension(composition.height, \"height\", `in <Composition id=\"${composition.id}\">`);\n return {\n type: \"success\",\n result: {\n width: composition.width,\n height: composition.height,\n fps: composition.fps,\n id: composition.id,\n durationInFrames: composition.durationInFrames,\n defaultProps: composition.defaultProps ?? {},\n props: {\n ...composition.defaultProps ?? {},\n ...selectedEditorProps ?? {},\n ...typeof window === \"undefined\" || env.isPlayer || !window.remotion_inputProps ? {} : getInputProps() ?? {}\n },\n defaultCodec: null,\n defaultOutName: null,\n defaultVideoImageFormat: null,\n defaultPixelFormat: null,\n defaultProResProfile: null\n }\n };\n }\n if (!context) {\n return null;\n }\n if (!context[composition.id]) {\n return null;\n }\n return context[composition.id];\n }, [\n composition,\n context,\n currentCompositionMetadata,\n selectedEditorProps,\n env.isPlayer\n ]);\n};\n\n// src/use-video.ts\nvar useVideo = () => {\n const { canvasContent, compositions, currentCompositionMetadata } = useContext6(CompositionManager);\n const selected = compositions.find((c) => {\n return canvasContent?.type === \"composition\" && c.id === canvasContent.compositionId;\n });\n const resolved = useResolvedVideoConfig(selected?.id ?? null);\n return useMemo5(() => {\n if (!resolved) {\n return null;\n }\n if (resolved.type === \"error\") {\n return null;\n }\n if (resolved.type === \"loading\") {\n return null;\n }\n if (!selected) {\n return null;\n }\n return {\n ...resolved.result,\n defaultProps: selected.defaultProps ?? {},\n id: selected.id,\n ...currentCompositionMetadata ?? {},\n component: selected.component\n };\n }, [currentCompositionMetadata, resolved, selected]);\n};\n\n// src/timeline-position-state.ts\nvar makeKey = () => {\n return `remotion.time-all`;\n};\nvar persistCurrentFrame = (time) => {\n localStorage.setItem(makeKey(), JSON.stringify(time));\n};\nvar getInitialFrameState = () => {\n const item = localStorage.getItem(makeKey()) ?? \"{}\";\n const obj = JSON.parse(item);\n return obj;\n};\nvar getFrameForComposition = (composition) => {\n const item = localStorage.getItem(makeKey()) ?? \"{}\";\n const obj = JSON.parse(item);\n if (obj[composition] !== undefined) {\n return Number(obj[composition]);\n }\n if (typeof window === \"undefined\") {\n return 0;\n }\n return window.remotion_initialFrame ?? 0;\n};\nvar useTimelinePositionFromContext = (state) => {\n const videoConfig = useVideo();\n const env = useRemotionEnvironment();\n if (!videoConfig) {\n return typeof window === \"undefined\" ? 0 : window.remotion_initialFrame ?? 0;\n }\n const unclamped = state.frame[videoConfig.id] ?? (env.isPlayer ? 0 : getFrameForComposition(videoConfig.id));\n return Math.min(videoConfig.durationInFrames - 1, unclamped);\n};\nvar useTimelineContext = () => {\n const state = useContext7(TimelineContext);\n if (state === null) {\n throw new Error(\"TimelineContext is not available. This hook must be used inside a <Player> or the Remotion Studio.\");\n }\n return state;\n};\nvar useTimelinePosition = () => {\n const state = useTimelineContext();\n return useTimelinePositionFromContext(state);\n};\nvar useAbsoluteTimelinePosition = () => {\n const state = useContext7(AbsoluteTimeContext);\n if (state === null) {\n throw new Error(\"AbsoluteTimeContext is not available. This hook must be used inside a <Player> or the Remotion Studio.\");\n }\n return useTimelinePositionFromContext(state);\n};\nvar useTimelineSetFrame = () => {\n const { setFrame } = useContext7(SetTimelineContext);\n return setFrame;\n};\nvar usePlayingState = () => {\n const { playing, imperativePlaying } = useTimelineContext();\n const { setPlaying } = useContext7(SetTimelineContext);\n return useMemo6(() => [playing, setPlaying, imperativePlaying], [imperativePlaying, playing, setPlaying]);\n};\n\n// src/use-current-frame.ts\nimport { useContext as useContext8 } from \"react\";\n\n// src/CanUseRemotionHooks.tsx\nimport { createContext as createContext10 } from \"react\";\nimport { jsx as jsx5 } from \"react/jsx-runtime\";\nvar CanUseRemotionHooks = createContext10(false);\nvar CanUseRemotionHooksProvider = ({ children }) => {\n return /* @__PURE__ */ jsx5(CanUseRemotionHooks.Provider, {\n value: true,\n children\n });\n};\n\n// src/use-current-frame.ts\nvar useCurrentFrame = () => {\n const canUseRemotionHooks = useContext8(CanUseRemotionHooks);\n const env = useRemotionEnvironment();\n if (!canUseRemotionHooks) {\n if (env.isPlayer) {\n throw new Error(`useCurrentFrame can only be called inside a component that was passed to <Player>. See: https://www.remotion.dev/docs/player/examples`);\n }\n throw new Error(`useCurrentFrame() can only be called inside a component that was registered as a composition. See https://www.remotion.dev/docs/the-fundamentals#defining-compositions`);\n }\n const frame = useTimelinePosition();\n const context = useContext8(SequenceContext);\n const contextOffset = context ? context.cumulatedFrom + context.relativeFrom : 0;\n return frame - contextOffset;\n};\n\n// src/use-video-config.ts\nimport { useContext as useContext10 } from \"react\";\n\n// src/use-unsafe-video-config.ts\nimport { useContext as useContext9, useMemo as useMemo7 } from \"react\";\nvar useUnsafeVideoConfig = () => {\n const context = useContext9(SequenceContext);\n const ctxWidth = context?.width ?? null;\n const ctxHeight = context?.height ?? null;\n const ctxDuration = context?.durationInFrames ?? null;\n const video = useVideo();\n return useMemo7(() => {\n if (!video) {\n return null;\n }\n const {\n id,\n durationInFrames,\n fps,\n height,\n width,\n defaultProps,\n props,\n defaultCodec,\n defaultOutName,\n defaultVideoImageFormat,\n defaultPixelFormat,\n defaultProResProfile\n } = video;\n return {\n id,\n width: ctxWidth ?? width,\n height: ctxHeight ?? height,\n fps,\n durationInFrames: ctxDuration ?? durationInFrames,\n defaultProps,\n props,\n defaultCodec,\n defaultOutName,\n defaultVideoImageFormat,\n defaultPixelFormat,\n defaultProResProfile\n };\n }, [ctxDuration, ctxHeight, ctxWidth, video]);\n};\n\n// src/use-video-config.ts\nvar useVideoConfig = () => {\n const videoConfig = useUnsafeVideoConfig();\n const context = useContext10(CanUseRemotionHooks);\n const isPlayer = useIsPlayer();\n if (!videoConfig) {\n if (typeof window !== \"undefined\" && window.remotion_isPlayer || isPlayer) {\n throw new Error([\n \"No video config found. Likely reasons:\",\n \"- You are probably calling useVideoConfig() from outside the component passed to <Player />. See https://www.remotion.dev/docs/player/examples for how to set up the Player correctly.\",\n \"- You have multiple versions of Remotion installed which causes the React context to get lost.\"\n ].join(\"-\"));\n }\n throw new Error(\"No video config found. You are probably calling useVideoConfig() from a component which has not been registered as a <Composition />. See https://www.remotion.dev/docs/the-fundamentals#defining-compositions for more information.\");\n }\n if (!context) {\n throw new Error(\"Called useVideoConfig() outside a Remotion composition.\");\n }\n return videoConfig;\n};\n\n// src/freeze.tsx\nimport { jsx as jsx6 } from \"react/jsx-runtime\";\nvar Freeze = ({\n frame: frameToFreeze,\n children,\n active = true\n}) => {\n const frame = useCurrentFrame();\n const videoConfig = useVideoConfig();\n if (typeof frameToFreeze === \"undefined\") {\n throw new Error(`The <Freeze /> component requires a 'frame' prop, but none was passed.`);\n }\n if (typeof frameToFreeze !== \"number\") {\n throw new Error(`The 'frame' prop of <Freeze /> must be a number, but is of type ${typeof frameToFreeze}`);\n }\n if (Number.isNaN(frameToFreeze)) {\n throw new Error(`The 'frame' prop of <Freeze /> must be a real number, but it is NaN.`);\n }\n if (!Number.isFinite(frameToFreeze)) {\n throw new Error(`The 'frame' prop of <Freeze /> must be a finite number, but it is ${frameToFreeze}.`);\n }\n const isActive = useMemo8(() => {\n if (typeof active === \"boolean\") {\n return active;\n }\n if (typeof active === \"function\") {\n return active(frame);\n }\n }, [active, frame]);\n const timelineContext = useTimelineContext();\n const sequenceContext = useContext11(SequenceContext);\n const relativeFrom = sequenceContext?.relativeFrom ?? 0;\n const timelineValue = useMemo8(() => {\n if (!isActive) {\n return timelineContext;\n }\n return {\n ...timelineContext,\n playing: false,\n imperativePlaying: {\n current: false\n },\n frame: {\n [videoConfig.id]: frameToFreeze + relativeFrom\n }\n };\n }, [isActive, timelineContext, videoConfig.id, frameToFreeze, relativeFrom]);\n const newSequenceContext = useMemo8(() => {\n if (!sequenceContext) {\n return null;\n }\n if (!isActive) {\n return sequenceContext;\n }\n return {\n ...sequenceContext,\n cumulatedFrom: 0\n };\n }, [sequenceContext, isActive]);\n return /* @__PURE__ */ jsx6(TimelineContext.Provider, {\n value: timelineValue,\n children: /* @__PURE__ */ jsx6(SequenceContext.Provider, {\n value: newSequenceContext,\n children\n })\n });\n};\n\n// src/nonce.ts\nimport { createContext as createContext11, useCallback as useCallback3, useContext as useContext12, useMemo as useMemo9, useRef as useRef2 } from \"react\";\nvar NonceContext = createContext11({\n getNonce: () => 0\n});\nvar fastRefreshNonce = 0;\ntry {\n if (typeof __webpack_module__ !== \"undefined\") {\n if (__webpack_module__.hot) {\n __webpack_module__.hot.addStatusHandler((status) => {\n if (status === \"idle\") {\n fastRefreshNonce++;\n }\n });\n }\n }\n} catch {}\nvar useNonce = () => {\n const context = useContext12(NonceContext);\n const nonce = context.getNonce();\n const nonceRef = useRef2(nonce);\n nonceRef.current = nonce;\n const history = useRef2([[fastRefreshNonce, nonce]]);\n const get = useCallback3(() => {\n if (fastRefreshNonce !== history.current[history.current.length - 1][0]) {\n history.current = [\n ...history.current,\n [fastRefreshNonce, nonceRef.current]\n ];\n }\n return history.current;\n }, [history]);\n return useMemo9(() => {\n return { get };\n }, [get]);\n};\n\n// src/PremountContext.tsx\nimport { createContext as createContext12 } from \"react\";\nvar PremountContext = createContext12({\n premountFramesRemaining: 0,\n playing: false\n});\n\n// src/SequenceManager.tsx\nimport React8, { useCallback as useCallback4, useMemo as useMemo10, useRef as useRef3, useState as useState3 } from \"react\";\nimport { jsx as jsx7 } from \"react/jsx-runtime\";\nvar SequenceManager = React8.createContext({\n registerSequence: () => {\n throw new Error(\"SequenceManagerContext not initialized\");\n },\n unregisterSequence: () => {\n throw new Error(\"SequenceManagerContext not initialized\");\n },\n sequences: []\n});\nvar SequenceVisibilityToggleContext = React8.createContext({\n hidden: {},\n setHidden: () => {\n throw new Error(\"SequenceVisibilityToggle not initialized\");\n }\n});\nvar VisualModeOverridesContext = React8.createContext({\n dragOverrides: {},\n setDragOverrides: () => {\n throw new Error(\"VisualModeOverridesContext not initialized\");\n },\n clearDragOverrides: () => {\n throw new Error(\"VisualModeOverridesContext not initialized\");\n },\n codeValues: {},\n setCodeValues: () => {\n throw new Error(\"VisualModeOverridesContext not initialized\");\n },\n visualModeEnabled: false\n});\nvar SequenceManagerProvider = ({ children, visualModeEnabled }) => {\n const [sequences, setSequences] = useState3([]);\n const [hidden, setHidden] = useState3({});\n const [dragOverrides, setControlOverrides] = useState3({});\n const controlOverridesRef = useRef3(dragOverrides);\n controlOverridesRef.current = dragOverrides;\n const [codeValues, setCodeValuesMapState] = useState3({});\n const setDragOverrides = useCallback4((sequenceId, key, value) => {\n setControlOverrides((prev) => ({\n ...prev,\n [sequenceId]: {\n ...prev[sequenceId],\n [key]: value\n }\n }));\n }, []);\n const clearDragOverrides = useCallback4((sequenceId) => {\n setControlOverrides((prev) => {\n if (!prev[sequenceId]) {\n return prev;\n }\n const next = { ...prev };\n delete next[sequenceId];\n return next;\n });\n }, []);\n const setCodeValues = useCallback4((sequenceId, values) => {\n setCodeValuesMapState((prev) => {\n if (prev[sequenceId] === values) {\n return prev;\n }\n if (values === null) {\n if (!(sequenceId in prev)) {\n return prev;\n }\n const next = { ...prev };\n delete next[sequenceId];\n return next;\n }\n return { ...prev, [sequenceId]: values };\n });\n }, []);\n const registerSequence = useCallback4((seq) => {\n setSequences((seqs) => {\n return [...seqs, seq];\n });\n }, []);\n const unregisterSequence = useCallback4((seq) => {\n setSequences((seqs) => seqs.filter((s) => s.id !== seq));\n }, []);\n const sequenceContext = useMemo10(() => {\n return {\n registerSequence,\n sequences,\n unregisterSequence\n };\n }, [registerSequence, sequences, unregisterSequence]);\n const hiddenContext = useMemo10(() => {\n return {\n hidden,\n setHidden\n };\n }, [hidden]);\n const overrideContext = useMemo10(() => {\n return {\n visualModeEnabled,\n dragOverrides,\n setDragOverrides,\n clearDragOverrides,\n codeValues,\n setCodeValues\n };\n }, [\n visualModeEnabled,\n dragOverrides,\n setDragOverrides,\n clearDragOverrides,\n codeValues,\n setCodeValues\n ]);\n return /* @__PURE__ */ jsx7(SequenceManager.Provider, {\n value: sequenceContext,\n children: /* @__PURE__ */ jsx7(SequenceVisibilityToggleContext.Provider, {\n value: hiddenContext,\n children: /* @__PURE__ */ jsx7(VisualModeOverridesContext.Provider, {\n value: overrideContext,\n children\n })\n })\n });\n};\n\n// src/v5-flag.ts\nvar ENABLE_V5_BREAKING_CHANGES = false;\n\n// src/Sequence.tsx\nimport { jsx as jsx8 } from \"react/jsx-runtime\";\nvar RegularSequenceRefForwardingFunction = ({\n from = 0,\n durationInFrames = Infinity,\n children,\n name,\n height,\n width,\n showInTimeline = true,\n controls,\n _remotionInternalLoopDisplay: loopDisplay,\n _remotionInternalStack: stack,\n _remotionInternalPremountDisplay: premountDisplay,\n _remotionInternalPostmountDisplay: postmountDisplay,\n ...other\n}, ref) => {\n const { layout = \"absolute-fill\" } = other;\n const [id] = useState4(() => String(Math.random()));\n const parentSequence = useContext13(SequenceContext);\n const { rootId } = useTimelineContext();\n const cumulatedFrom = parentSequence ? parentSequence.cumulatedFrom + parentSequence.relativeFrom : 0;\n const nonce = useNonce();\n if (layout !== \"absolute-fill\" && layout !== \"none\") {\n throw new TypeError(`The layout prop of <Sequence /> expects either \"absolute-fill\" or \"none\", but you passed: ${layout}`);\n }\n if (layout === \"none\" && typeof other.style !== \"undefined\") {\n throw new TypeError('If layout=\"none\", you may not pass a style.');\n }\n if (typeof durationInFrames !== \"number\") {\n throw new TypeError(`You passed to durationInFrames an argument of type ${typeof durationInFrames}, but it must be a number.`);\n }\n if (durationInFrames <= 0) {\n throw new TypeError(`durationInFrames must be positive, but got ${durationInFrames}`);\n }\n if (typeof from !== \"number\") {\n throw new TypeError(`You passed to the \"from\" props of your <Sequence> an argument of type ${typeof from}, but it must be a number.`);\n }\n if (!Number.isFinite(from)) {\n throw new TypeError(`The \"from\" prop of a sequence must be finite, but got ${from}.`);\n }\n const absoluteFrame = useTimelinePosition();\n const videoConfig = useVideoConfig();\n const parentSequenceDuration = parentSequence ? Math.min(parentSequence.durationInFrames - from, durationInFrames) : durationInFrames;\n const actualDurationInFrames = Math.max(0, Math.min(videoConfig.durationInFrames - from, parentSequenceDuration));\n const { registerSequence, unregisterSequence } = useContext13(SequenceManager);\n const { hidden } = useContext13(SequenceVisibilityToggleContext);\n const premounting = useMemo11(() => {\n return parentSequence?.premounting || Boolean(other._remotionInternalIsPremounting);\n }, [other._remotionInternalIsPremounting, parentSequence?.premounting]);\n const postmounting = useMemo11(() => {\n return parentSequence?.postmounting || Boolean(other._remotionInternalIsPostmounting);\n }, [other._remotionInternalIsPostmounting, parentSequence?.postmounting]);\n const contextValue = useMemo11(() => {\n return {\n cumulatedFrom,\n relativeFrom: from,\n durationInFrames: actualDurationInFrames,\n parentFrom: parentSequence?.relativeFrom ?? 0,\n id,\n height: height ?? parentSequence?.height ?? null,\n width: width ?? parentSequence?.width ?? null,\n premounting,\n postmounting,\n premountDisplay: premountDisplay ?? null,\n postmountDisplay: postmountDisplay ?? null\n };\n }, [\n cumulatedFrom,\n from,\n actualDurationInFrames,\n parentSequence,\n id,\n height,\n width,\n premounting,\n postmounting,\n premountDisplay,\n postmountDisplay\n ]);\n const timelineClipName = useMemo11(() => {\n return name ?? \"\";\n }, [name]);\n const env = useRemotionEnvironment();\n const inheritedStack = other?.stack ?? null;\n useEffect(() => {\n if (!env.isStudio) {\n return;\n }\n registerSequence({\n from,\n duration: actualDurationInFrames,\n id,\n displayName: timelineClipName,\n parent: parentSequence?.id ?? null,\n type: \"sequence\",\n rootId,\n showInTimeline,\n nonce: nonce.get(),\n loopDisplay,\n stack: stack ?? inheritedStack,\n premountDisplay: premountDisplay ?? null,\n postmountDisplay: postmountDisplay ?? null,\n controls: controls ?? null\n });\n return () => {\n unregisterSequence(id);\n };\n }, [\n durationInFrames,\n id,\n name,\n registerSequence,\n timelineClipName,\n unregisterSequence,\n parentSequence?.id,\n actualDurationInFrames,\n rootId,\n from,\n showInTimeline,\n nonce,\n loopDisplay,\n stack,\n premountDisplay,\n postmountDisplay,\n env.isStudio,\n inheritedStack,\n controls\n ]);\n const endThreshold = Math.ceil(cumulatedFrom + from + durationInFrames - 1);\n const content = absoluteFrame < cumulatedFrom + from ? null : absoluteFrame > endThreshold ? null : children;\n const styleIfThere = other.layout === \"none\" ? undefined : other.style;\n const defaultStyle = useMemo11(() => {\n return {\n flexDirection: undefined,\n ...width ? { width } : {},\n ...height ? { height } : {},\n ...styleIfThere ?? {}\n };\n }, [height, styleIfThere, width]);\n if (ref !== null && layout === \"none\") {\n throw new TypeError('It is not supported to pass both a `ref` and `layout=\"none\"` to <Sequence />.');\n }\n const isSequenceHidden = hidden[id] ?? false;\n if (isSequenceHidden) {\n return null;\n }\n return /* @__PURE__ */ jsx8(SequenceContext.Provider, {\n value: contextValue,\n children: content === null ? null : other.layout === \"none\" ? content : /* @__PURE__ */ jsx8(AbsoluteFill, {\n ref,\n style: defaultStyle,\n className: other.className,\n children: content\n })\n });\n};\nvar RegularSequence = forwardRef2(RegularSequenceRefForwardingFunction);\nvar PremountedPostmountedSequenceRefForwardingFunction = (props, ref) => {\n const parentPremountContext = useContext13(PremountContext);\n const frame = useCurrentFrame() - parentPremountContext.premountFramesRemaining;\n if (props.layout === \"none\") {\n throw new Error('`<Sequence>` with `premountFor` and `postmountFor` props does not support layout=\"none\"');\n }\n const {\n style: passedStyle,\n from = 0,\n durationInFrames = Infinity,\n premountFor = 0,\n postmountFor = 0,\n styleWhilePremounted,\n styleWhilePostmounted,\n ...otherProps\n } = props;\n const endThreshold = Math.ceil(from + durationInFrames - 1);\n const premountingActive = frame < from && frame >= from - premountFor;\n const postmountingActive = frame > endThreshold && frame <= endThreshold + postmountFor;\n const freezeFrame = premountingActive ? from : postmountingActive ? from + durationInFrames - 1 : 0;\n const isFreezingActive = premountingActive || postmountingActive;\n const style = useMemo11(() => {\n return {\n ...passedStyle,\n opacity: premountingActive || postmountingActive ? 0 : 1,\n pointerEvents: premountingActive || postmountingActive ? \"none\" : passedStyle?.pointerEvents ?? undefined,\n ...premountingActive ? styleWhilePremounted : {},\n ...postmountingActive ? styleWhilePostmounted : {}\n };\n }, [\n passedStyle,\n premountingActive,\n postmountingActive,\n styleWhilePremounted,\n styleWhilePostmounted\n ]);\n const { playing } = useTimelineContext();\n const premountFramesRemaining = premountingActive ? from - frame : 0;\n const premountContextValue = useMemo11(() => {\n return {\n premountFramesRemaining,\n playing: parentPremountContext.playing || playing\n };\n }, [premountFramesRemaining, parentPremountContext.playing, playing]);\n return /* @__PURE__ */ jsx8(PremountContext.Provider, {\n value: premountContextValue,\n children: /* @__PURE__ */ jsx8(Freeze, {\n frame: freezeFrame,\n active: isFreezingActive,\n children: /* @__PURE__ */ jsx8(Sequence, {\n ref,\n from,\n durationInFrames,\n style,\n _remotionInternalPremountDisplay: premountFor,\n _remotionInternalPostmountDisplay: postmountFor,\n _remotionInternalIsPremounting: premountingActive,\n _remotionInternalIsPostmounting: postmountingActive,\n ...otherProps\n })\n })\n });\n};\nvar PremountedPostmountedSequence = forwardRef2(PremountedPostmountedSequenceRefForwardingFunction);\nvar SequenceRefForwardingFunction = (props, ref) => {\n const env = useRemotionEnvironment();\n const { fps } = useVideoConfig();\n if (props.layout !== \"none\" && !env.isRendering) {\n const effectivePremountFor = ENABLE_V5_BREAKING_CHANGES ? props.premountFor ?? fps : props.premountFor;\n if (effectivePremountFor || props.postmountFor) {\n return /* @__PURE__ */ jsx8(PremountedPostmountedSequence, {\n ref,\n ...props,\n premountFor: effectivePremountFor\n });\n }\n }\n return /* @__PURE__ */ jsx8(RegularSequence, {\n ...props,\n ref\n });\n};\nvar Sequence = forwardRef2(SequenceRefForwardingFunction);\n// src/animated-image/AnimatedImage.tsx\nimport {\n forwardRef as forwardRef3,\n useEffect as useEffect2,\n useImperativeHandle as useImperativeHandle3,\n useLayoutEffect as useLayoutEffect2,\n useRef as useRef5,\n useState as useState5\n} from \"react\";\n\n// src/animated-image/canvas.tsx\nimport React10, { useCallback as useCallback5, useImperativeHandle as useImperativeHandle2, useRef as useRef4 } from \"react\";\nimport { jsx as jsx9 } from \"react/jsx-runtime\";\nvar calcArgs = (fit, frameSize, canvasSize) => {\n switch (fit) {\n case \"fill\": {\n return [\n 0,\n 0,\n frameSize.width,\n frameSize.height,\n 0,\n 0,\n canvasSize.width,\n canvasSize.height\n ];\n }\n case \"contain\": {\n const ratio = Math.min(canvasSize.width / frameSize.width, canvasSize.height / frameSize.height);\n const centerX = (canvasSize.width - frameSize.width * ratio) / 2;\n const centerY = (canvasSize.height - frameSize.height * ratio) / 2;\n return [\n 0,\n 0,\n frameSize.width,\n frameSize.height,\n centerX,\n centerY,\n frameSize.width * ratio,\n frameSize.height * ratio\n ];\n }\n case \"cover\": {\n const ratio = Math.max(canvasSize.width / frameSize.width, canvasSize.height / frameSize.height);\n const centerX = (canvasSize.width - frameSize.width * ratio) / 2;\n const centerY = (canvasSize.height - frameSize.height * ratio) / 2;\n return [\n 0,\n 0,\n frameSize.width,\n frameSize.height,\n centerX,\n centerY,\n frameSize.width * ratio,\n frameSize.height * ratio\n ];\n }\n default:\n throw new Error(\"Unknown fit: \" + fit);\n }\n};\nvar CanvasRefForwardingFunction = ({ width, height, fit, className, style }, ref) => {\n const canvasRef = useRef4(null);\n const draw = useCallback5((imageData) => {\n const canvas = canvasRef.current;\n const canvasWidth = width ?? imageData.displayWidth;\n const canvasHeight = height ?? imageData.displayHeight;\n if (!canvas) {\n throw new Error(\"Canvas ref is not set\");\n }\n const ctx = canvasRef.current?.getContext(\"2d\");\n if (!ctx) {\n throw new Error(\"Could not get 2d context\");\n }\n canvas.width = canvasWidth;\n canvas.height = canvasHeight;\n ctx.drawImage(imageData, ...calcArgs(fit, {\n height: imageData.displayHeight,\n width: imageData.displayWidth\n }, {\n width: canvasWidth,\n height: canvasHeight\n }));\n }, [fit, height, width]);\n useImperativeHandle2(ref, () => {\n return {\n draw,\n getCanvas: () => {\n if (!canvasRef.current) {\n throw new Error(\"Canvas ref is not set\");\n }\n return canvasRef.current;\n },\n clear: () => {\n const ctx = canvasRef.current?.getContext(\"2d\");\n if (!ctx) {\n throw new Error(\"Could not get 2d context\");\n }\n ctx.clearRect(0, 0, canvasRef.current.width, canvasRef.current.height);\n }\n };\n }, [draw]);\n return /* @__PURE__ */ jsx9(\"canvas\", {\n ref: canvasRef,\n className,\n style\n });\n};\nvar Canvas = React10.forwardRef(CanvasRefForwardingFunction);\n\n// src/animated-image/decode-image.ts\nvar CACHE_SIZE = 5;\nvar getActualTime = ({\n loopBehavior,\n durationFound,\n timeInSec\n}) => {\n return loopBehavior === \"loop\" ? durationFound ? timeInSec % durationFound : timeInSec : Math.min(timeInSec, durationFound || Infinity);\n};\nvar decodeImage = async ({\n resolvedSrc,\n signal,\n currentTime,\n initialLoopBehavior\n}) => {\n if (typeof ImageDecoder === \"undefined\") {\n throw new Error(\"Your browser does not support the WebCodecs ImageDecoder API.\");\n }\n const res = await fetch(resolvedSrc, { signal });\n const { body } = res;\n if (!body) {\n throw new Error(\"Got no body\");\n }\n const decoder = new ImageDecoder({\n data: body,\n type: res.headers.get(\"Content-Type\") || \"image/gif\"\n });\n await decoder.completed;\n const { selectedTrack } = decoder.tracks;\n if (!selectedTrack) {\n throw new Error(\"No selected track\");\n }\n const cache = [];\n let durationFound = null;\n const getFrameByIndex = async (frameIndex) => {\n const foundInCache = cache.find((c) => c.frameIndex === frameIndex);\n if (foundInCache && foundInCache.frame) {\n return foundInCache;\n }\n const frame = await decoder.decode({\n frameIndex,\n completeFramesOnly: true\n });\n if (foundInCache) {\n foundInCache.frame = frame.image;\n } else {\n cache.push({\n frame: frame.image,\n frameIndex,\n timeInSeconds: frame.image.timestamp / 1e6\n });\n }\n return {\n frame: frame.image,\n frameIndex,\n timeInSeconds: frame.image.timestamp / 1e6\n };\n };\n const clearCache = (closeToTimeInSec) => {\n const itemsInCache = cache.filter((c) => c.frame);\n const sortByClosestToCurrentTime = itemsInCache.sort((a, b) => {\n const aDiff = Math.abs(a.timeInSeconds - closeToTimeInSec);\n const bDiff = Math.abs(b.timeInSeconds - closeToTimeInSec);\n return aDiff - bDiff;\n });\n for (let i = 0;i < sortByClosestToCurrentTime.length; i++) {\n if (i < CACHE_SIZE) {\n continue;\n }\n const item = sortByClosestToCurrentTime[i];\n item.frame = null;\n }\n };\n const ensureFrameBeforeAndAfter = async ({\n timeInSec,\n loopBehavior\n }) => {\n const actualTimeInSec = getActualTime({\n durationFound,\n loopBehavior,\n timeInSec\n });\n const framesBefore = cache.filter((c) => c.timeInSeconds <= actualTimeInSec);\n const biggestIndex = framesBefore.map((c) => c.frameIndex).reduce((a, b) => Math.max(a, b), 0);\n let i = biggestIndex;\n while (true) {\n const f = await getFrameByIndex(i);\n i++;\n if (!f.frame) {\n throw new Error(\"No frame found\");\n }\n if (!f.frame.duration) {\n break;\n }\n if (i === selectedTrack.frameCount && durationFound === null) {\n const duration = (f.frame.timestamp + f.frame.duration) / 1e6;\n durationFound = duration;\n }\n if (f.timeInSeconds > actualTimeInSec || i === selectedTrack.frameCount) {\n break;\n }\n }\n if (selectedTrack.frameCount - biggestIndex < 3 && loopBehavior === \"loop\") {\n await getFrameByIndex(0);\n }\n clearCache(actualTimeInSec);\n };\n await ensureFrameBeforeAndAfter({\n timeInSec: currentTime,\n loopBehavior: initialLoopBehavior\n });\n await ensureFrameBeforeAndAfter({\n timeInSec: currentTime,\n loopBehavior: initialLoopBehavior\n });\n const getFrame = async (timeInSec, loopBehavior) => {\n if (durationFound !== null && timeInSec > durationFound && loopBehavior === \"clear-after-finish\") {\n return null;\n }\n const actualTimeInSec = getActualTime({\n loopBehavior,\n durationFound,\n timeInSec\n });\n await ensureFrameBeforeAndAfter({ timeInSec: actualTimeInSec, loopBehavior });\n const itemsInCache = cache.filter((c) => c.frame);\n const closest = itemsInCache.reduce((a, b) => {\n const aDiff = Math.abs(a.timeInSeconds - actualTimeInSec);\n const bDiff = Math.abs(b.timeInSeconds - actualTimeInSec);\n return aDiff < bDiff ? a : b;\n });\n if (!closest.frame) {\n throw new Error(\"No frame found\");\n }\n return closest;\n };\n return {\n getFrame,\n frameCount: selectedTrack.frameCount\n };\n};\n\n// src/animated-image/resolve-image-source.tsx\nvar resolveAnimatedImageSource = (src) => {\n if (typeof window === \"undefined\") {\n return src;\n }\n return new URL(src, window.origin).href;\n};\n\n// src/animated-image/AnimatedImage.tsx\nimport { jsx as jsx10 } from \"react/jsx-runtime\";\nvar AnimatedImage = forwardRef3(({\n src,\n width,\n height,\n onError,\n loopBehavior = \"loop\",\n playbackRate = 1,\n fit = \"fill\",\n ...props\n}, canvasRef) => {\n const mountState = useRef5({ isMounted: true });\n useEffect2(() => {\n const { current } = mountState;\n current.isMounted = true;\n return () => {\n current.isMounted = false;\n };\n }, []);\n const resolvedSrc = resolveAnimatedImageSource(src);\n const [imageDecoder, setImageDecoder] = useState5(null);\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n const [decodeHandle] = useState5(() => delayRender2(`Rendering <AnimatedImage/> with src=\"${resolvedSrc}\"`));\n const frame = useCurrentFrame();\n const { fps } = useVideoConfig();\n const currentTime = frame / playbackRate / fps;\n const currentTimeRef = useRef5(currentTime);\n currentTimeRef.current = currentTime;\n const ref = useRef5(null);\n useImperativeHandle3(canvasRef, () => {\n const c = ref.current?.getCanvas();\n if (!c) {\n throw new Error(\"Canvas ref is not set\");\n }\n return c;\n }, []);\n const [initialLoopBehavior] = useState5(() => loopBehavior);\n useEffect2(() => {\n const controller = new AbortController;\n decodeImage({\n resolvedSrc,\n signal: controller.signal,\n currentTime: currentTimeRef.current,\n initialLoopBehavior\n }).then((d) => {\n setImageDecoder(d);\n continueRender2(decodeHandle);\n }).catch((err) => {\n if (err.name === \"AbortError\") {\n continueRender2(decodeHandle);\n return;\n }\n if (onError) {\n onError?.(err);\n continueRender2(decodeHandle);\n } else {\n cancelRender(err);\n }\n });\n return () => {\n controller.abort();\n };\n }, [\n resolvedSrc,\n decodeHandle,\n onError,\n initialLoopBehavior,\n continueRender2\n ]);\n useLayoutEffect2(() => {\n if (!imageDecoder) {\n return;\n }\n const delay = delayRender2(`Rendering frame at ${currentTime} of <AnimatedImage src=\"${src}\"/>`);\n imageDecoder.getFrame(currentTime, loopBehavior).then((videoFrame) => {\n if (mountState.current.isMounted) {\n if (videoFrame === null) {\n ref.current?.clear();\n } else {\n ref.current?.draw(videoFrame.frame);\n }\n }\n continueRender2(delay);\n }).catch((err) => {\n if (onError) {\n onError(err);\n continueRender2(delay);\n } else {\n cancelRender(err);\n }\n });\n }, [\n currentTime,\n imageDecoder,\n loopBehavior,\n onError,\n src,\n continueRender2,\n delayRender2\n ]);\n return /* @__PURE__ */ jsx10(Canvas, {\n ref,\n width,\n height,\n fit,\n ...props\n });\n});\n// src/Artifact.tsx\nimport { useContext as useContext14, useLayoutEffect as useLayoutEffect4, useState as useState7 } from \"react\";\n\n// src/RenderAssetManager.tsx\nimport {\n createContext as createContext13,\n useCallback as useCallback6,\n useImperativeHandle as useImperativeHandle4,\n useLayoutEffect as useLayoutEffect3,\n useMemo as useMemo12,\n useRef as useRef6,\n useState as useState6\n} from \"react\";\n\n// src/validation/validate-artifact.ts\nvar validateArtifactFilename = (filename) => {\n if (typeof filename !== \"string\") {\n throw new TypeError(`The \"filename\" must be a string, but you passed a value of type ${typeof filename}`);\n }\n if (filename.trim() === \"\") {\n throw new Error(\"The `filename` must not be empty\");\n }\n if (!filename.match(/^([0-9a-zA-Z-!_.*'()/:&$@=;+,?]+)/g)) {\n throw new Error('The `filename` must match \"/^([0-9a-zA-Z-!_.*\\'()/:&$@=;+,?]+)/g\". Use forward slashes only, even on Windows.');\n }\n};\nvar validateContent = (content) => {\n if (typeof content !== \"string\" && !(content instanceof Uint8Array)) {\n throw new TypeError(`The \"content\" must be a string or Uint8Array, but you passed a value of type ${typeof content}`);\n }\n if (typeof content === \"string\" && content.trim() === \"\") {\n throw new Error(\"The `content` must not be empty\");\n }\n};\nvar validateRenderAsset = (artifact) => {\n if (artifact.type !== \"artifact\") {\n return;\n }\n validateArtifactFilename(artifact.filename);\n if (artifact.contentType === \"thumbnail\") {\n return;\n }\n validateContent(artifact.content);\n};\n\n// src/RenderAssetManager.tsx\nimport { jsx as jsx11 } from \"react/jsx-runtime\";\nvar RenderAssetManager = createContext13({\n registerRenderAsset: () => {\n return;\n },\n unregisterRenderAsset: () => {\n return;\n },\n renderAssets: []\n});\nvar RenderAssetManagerProvider = ({ children, collectAssets }) => {\n const [renderAssets, setRenderAssets] = useState6([]);\n const renderAssetsRef = useRef6([]);\n const registerRenderAsset = useCallback6((renderAsset) => {\n validateRenderAsset(renderAsset);\n renderAssetsRef.current = [...renderAssetsRef.current, renderAsset];\n setRenderAssets(renderAssetsRef.current);\n }, []);\n if (collectAssets) {\n useImperativeHandle4(collectAssets, () => {\n return {\n collectAssets: () => {\n const assets = renderAssetsRef.current;\n renderAssetsRef.current = [];\n setRenderAssets([]);\n return assets;\n }\n };\n }, []);\n }\n const unregisterRenderAsset = useCallback6((id) => {\n renderAssetsRef.current = renderAssetsRef.current.filter((a) => a.id !== id);\n setRenderAssets(renderAssetsRef.current);\n }, []);\n useLayoutEffect3(() => {\n if (typeof window !== \"undefined\") {\n window.remotion_collectAssets = () => {\n const assets = renderAssetsRef.current;\n renderAssetsRef.current = [];\n setRenderAssets([]);\n return assets;\n };\n }\n }, []);\n const contextValue = useMemo12(() => {\n return {\n registerRenderAsset,\n unregisterRenderAsset,\n renderAssets\n };\n }, [renderAssets, registerRenderAsset, unregisterRenderAsset]);\n return /* @__PURE__ */ jsx11(RenderAssetManager.Provider, {\n value: contextValue,\n children\n });\n};\n\n// src/Artifact.tsx\nvar ArtifactThumbnail = Symbol(\"Thumbnail\");\nvar Artifact = ({ filename, content, downloadBehavior }) => {\n const { registerRenderAsset, unregisterRenderAsset } = useContext14(RenderAssetManager);\n const env = useRemotionEnvironment();\n const frame = useCurrentFrame();\n const [id] = useState7(() => {\n return String(Math.random());\n });\n useLayoutEffect4(() => {\n if (!env.isRendering) {\n return;\n }\n if (content instanceof Uint8Array) {\n registerRenderAsset({\n type: \"artifact\",\n id,\n content: btoa(new TextDecoder(\"utf8\").decode(content)),\n filename,\n frame,\n contentType: \"binary\",\n downloadBehavior: downloadBehavior ?? null\n });\n } else if (content === ArtifactThumbnail) {\n registerRenderAsset({\n type: \"artifact\",\n id,\n filename,\n frame,\n contentType: \"thumbnail\",\n downloadBehavior: downloadBehavior ?? null\n });\n } else {\n registerRenderAsset({\n type: \"artifact\",\n id,\n content,\n filename,\n frame,\n contentType: \"text\",\n downloadBehavior: downloadBehavior ?? null\n });\n }\n return () => {\n return unregisterRenderAsset(id);\n };\n }, [\n content,\n env.isRendering,\n filename,\n frame,\n id,\n registerRenderAsset,\n unregisterRenderAsset,\n downloadBehavior\n ]);\n return null;\n};\nArtifact.Thumbnail = ArtifactThumbnail;\n// src/audio/Audio.tsx\nimport { forwardRef as forwardRef6, useCallback as useCallback11, useContext as useContext26 } from \"react\";\n\n// src/absolute-src.ts\nvar getAbsoluteSrc = (relativeSrc) => {\n if (typeof window === \"undefined\") {\n return relativeSrc;\n }\n if (relativeSrc.startsWith(\"http://\") || relativeSrc.startsWith(\"https://\") || relativeSrc.startsWith(\"file://\") || relativeSrc.startsWith(\"blob:\") || relativeSrc.startsWith(\"data:\")) {\n return relativeSrc;\n }\n return new URL(relativeSrc, window.origin).href;\n};\n\n// src/calculate-media-duration.ts\nvar calculateMediaDuration = ({\n trimAfter,\n mediaDurationInFrames,\n playbackRate,\n trimBefore\n}) => {\n let duration = mediaDurationInFrames;\n if (typeof trimAfter !== \"undefined\") {\n duration = trimAfter;\n }\n if (typeof trimBefore !== \"undefined\") {\n duration -= trimBefore;\n }\n const actualDuration = duration / playbackRate;\n return Math.floor(actualDuration);\n};\n\n// src/loop/index.tsx\nimport React11, { createContext as createContext14, useMemo as useMemo13 } from \"react\";\nimport { jsx as jsx12 } from \"react/jsx-runtime\";\nvar LoopContext = createContext14(null);\nvar useLoop = () => {\n return React11.useContext(LoopContext);\n};\nvar Loop = ({ durationInFrames, times = Infinity, children, name, ...props }) => {\n const currentFrame = useCurrentFrame();\n const { durationInFrames: compDuration } = useVideoConfig();\n validateDurationInFrames(durationInFrames, {\n component: \"of the <Loop /> component\",\n allowFloats: true\n });\n if (typeof times !== \"number\") {\n throw new TypeError(`You passed to \"times\" an argument of type ${typeof times}, but it must be a number.`);\n }\n if (times !== Infinity && times % 1 !== 0) {\n throw new TypeError(`The \"times\" prop of a loop must be an integer, but got ${times}.`);\n }\n if (times < 0) {\n throw new TypeError(`The \"times\" prop of a loop must be at least 0, but got ${times}`);\n }\n const maxTimes = Math.ceil(compDuration / durationInFrames);\n const actualTimes = Math.min(maxTimes, times);\n const style = props.layout === \"none\" ? undefined : props.style;\n const maxFrame = durationInFrames * (actualTimes - 1);\n const iteration = Math.floor(currentFrame / durationInFrames);\n const start = iteration * durationInFrames;\n const from = Math.min(start, maxFrame);\n const loopDisplay = useMemo13(() => {\n return {\n numberOfTimes: Math.min(compDuration / durationInFrames, times),\n startOffset: -from,\n durationInFrames\n };\n }, [compDuration, durationInFrames, from, times]);\n const loopContext = useMemo13(() => {\n return {\n iteration: Math.floor(currentFrame / durationInFrames),\n durationInFrames\n };\n }, [currentFrame, durationInFrames]);\n return /* @__PURE__ */ jsx12(LoopContext.Provider, {\n value: loopContext,\n children: /* @__PURE__ */ jsx12(Sequence, {\n durationInFrames,\n from,\n name: name ?? \"<Loop>\",\n _remotionInternalLoopDisplay: loopDisplay,\n layout: props.layout,\n style,\n children\n })\n });\n};\nLoop.useLoop = useLoop;\n\n// src/prefetch.ts\nimport { useContext as useContext15 } from \"react\";\n\n// src/playback-logging.ts\nvar playbackLogging = ({\n logLevel,\n tag,\n message,\n mountTime\n}) => {\n const tags = [mountTime ? Date.now() - mountTime + \"ms \" : null, tag].filter(Boolean).join(\" \");\n Log.trace({ logLevel, tag: null }, `[${tags}]`, message);\n};\n\n// src/prefetch-state.tsx\nimport { createContext as createContext15, useEffect as useEffect3, useState as useState8 } from \"react\";\nimport { jsx as jsx13 } from \"react/jsx-runtime\";\nvar PreloadContext = createContext15({});\nvar preloads = {};\nvar updaters = [];\nvar setPreloads = (updater) => {\n preloads = updater(preloads);\n updaters.forEach((u) => u());\n};\nvar PrefetchProvider = ({ children }) => {\n const [_preloads, _setPreloads] = useState8(() => preloads);\n useEffect3(() => {\n const updaterFunction = () => {\n _setPreloads(preloads);\n };\n updaters.push(updaterFunction);\n return () => {\n updaters = updaters.filter((u) => u !== updaterFunction);\n };\n }, []);\n return /* @__PURE__ */ jsx13(PreloadContext.Provider, {\n value: _preloads,\n children\n });\n};\n\n// src/prefetch.ts\nvar removeAndGetHashFragment = (src) => {\n const hashIndex = src.indexOf(\"#\");\n if (hashIndex === -1) {\n return null;\n }\n return hashIndex;\n};\nvar getSrcWithoutHash = (src) => {\n const hashIndex = removeAndGetHashFragment(src);\n if (hashIndex === null) {\n return src;\n }\n return src.slice(0, hashIndex);\n};\nvar usePreload = (src) => {\n const preloads2 = useContext15(PreloadContext);\n const hashFragmentIndex = removeAndGetHashFragment(src);\n const withoutHashFragment = getSrcWithoutHash(src);\n if (!preloads2[withoutHashFragment]) {\n return src;\n }\n if (hashFragmentIndex !== null) {\n return preloads2[withoutHashFragment] + src.slice(hashFragmentIndex);\n }\n return preloads2[withoutHashFragment];\n};\nvar blobToBase64 = function(blob) {\n const reader = new FileReader;\n return new Promise((resolve, reject) => {\n reader.onload = function() {\n const dataUrl = reader.result;\n resolve(dataUrl);\n };\n reader.onerror = (err) => {\n return reject(err);\n };\n reader.readAsDataURL(blob);\n });\n};\nvar getBlobFromReader = async ({\n reader,\n contentType,\n contentLength,\n onProgress\n}) => {\n let receivedLength = 0;\n const chunks = [];\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n break;\n }\n chunks.push(value);\n receivedLength += value.length;\n if (onProgress) {\n onProgress({ loadedBytes: receivedLength, totalBytes: contentLength });\n }\n }\n const chunksAll = new Uint8Array(receivedLength);\n let position = 0;\n for (const chunk of chunks) {\n chunksAll.set(chunk, position);\n position += chunk.length;\n }\n return new Blob([chunksAll], {\n type: contentType ?? undefined\n });\n};\nvar prefetch = (src, options) => {\n const method = options?.method ?? \"blob-url\";\n const logLevel = options?.logLevel ?? \"info\";\n const srcWithoutHash = getSrcWithoutHash(src);\n if (getRemotionEnvironment().isRendering) {\n return {\n free: () => {\n return;\n },\n waitUntilDone: () => Promise.resolve(srcWithoutHash)\n };\n }\n Log.verbose({ logLevel, tag: \"prefetch\" }, `Starting prefetch ${srcWithoutHash}`);\n let canceled = false;\n let objectUrl = null;\n let resolve = () => {\n return;\n };\n let reject = () => {\n return;\n };\n const waitUntilDone = new Promise((res, rej) => {\n resolve = res;\n reject = rej;\n });\n const controller = new AbortController;\n let canBeAborted = true;\n fetch(srcWithoutHash, {\n signal: controller.signal,\n credentials: options?.credentials ?? undefined\n }).then((res) => {\n canBeAborted = false;\n if (canceled) {\n return null;\n }\n if (!res.ok) {\n throw new Error(`HTTP error, status = ${res.status}`);\n }\n const headerContentType = res.headers.get(\"Content-Type\");\n const contentType = options?.contentType ?? headerContentType;\n const hasProperContentType = contentType && (contentType.startsWith(\"video/\") || contentType.startsWith(\"audio/\") || contentType.startsWith(\"image/\"));\n if (!hasProperContentType) {\n console.warn(`Called prefetch() on ${srcWithoutHash} which returned a \"Content-Type\" of ${headerContentType}. Prefetched content should have a proper content type (video/... or audio/...) or a contentType passed the options of prefetch(). Otherwise, prefetching will not work properly in all browsers.`);\n }\n if (!res.body) {\n throw new Error(`HTTP response of ${srcWithoutHash} has no body`);\n }\n const reader = res.body.getReader();\n return getBlobFromReader({\n reader,\n contentType: options?.contentType ?? headerContentType ?? null,\n contentLength: res.headers.get(\"Content-Length\") ? parseInt(res.headers.get(\"Content-Length\"), 10) : null,\n onProgress: options?.onProgress\n });\n }).then((buf) => {\n if (!buf) {\n return;\n }\n const actualBlob = options?.contentType ? new Blob([buf], { type: options.contentType }) : buf;\n if (method === \"base64\") {\n return blobToBase64(actualBlob);\n }\n return URL.createObjectURL(actualBlob);\n }).then((url) => {\n if (canceled) {\n return;\n }\n playbackLogging({\n logLevel,\n tag: \"prefetch\",\n message: `Finished prefetch ${srcWithoutHash} with method ${method}`,\n mountTime: null\n });\n objectUrl = url;\n setPreloads((p) => ({\n ...p,\n [srcWithoutHash]: objectUrl\n }));\n resolve(objectUrl);\n }).catch((err) => {\n if (err?.message.includes(\"free() called\")) {\n return;\n }\n reject(err);\n });\n return {\n free: () => {\n playbackLogging({\n logLevel,\n tag: \"prefetch\",\n message: `Freeing ${srcWithoutHash}`,\n mountTime: null\n });\n if (objectUrl) {\n if (method === \"blob-url\") {\n URL.revokeObjectURL(objectUrl);\n }\n setPreloads((p) => {\n const copy = { ...p };\n delete copy[srcWithoutHash];\n return copy;\n });\n } else {\n canceled = true;\n if (canBeAborted) {\n try {\n controller.abort(new Error(\"free() called\"));\n } catch {}\n }\n }\n },\n waitUntilDone: () => {\n return waitUntilDone;\n }\n };\n};\n\n// src/validate-media-props.ts\nvar validateMediaProps = (props, component) => {\n if (typeof props.volume !== \"number\" && typeof props.volume !== \"function\" && typeof props.volume !== \"undefined\") {\n throw new TypeError(`You have passed a volume of type ${typeof props.volume} to your <${component} /> component. Volume must be a number or a function with the signature '(frame: number) => number' undefined.`);\n }\n if (typeof props.volume === \"number\" && props.volume < 0) {\n throw new TypeError(`You have passed a volume below 0 to your <${component} /> component. Volume must be between 0 and 1`);\n }\n if (typeof props.playbackRate !== \"number\" && typeof props.playbackRate !== \"undefined\") {\n throw new TypeError(`You have passed a playbackRate of type ${typeof props.playbackRate} to your <${component} /> component. Playback rate must a real number or undefined.`);\n }\n if (typeof props.playbackRate === \"number\" && (isNaN(props.playbackRate) || !Number.isFinite(props.playbackRate) || props.playbackRate <= 0)) {\n throw new TypeError(`You have passed a playbackRate of ${props.playbackRate} to your <${component} /> component. Playback rate must be a real number above 0.`);\n }\n};\n\n// src/validate-start-from-props.ts\nvar validateStartFromProps = (startFrom, endAt) => {\n if (typeof startFrom !== \"undefined\") {\n if (typeof startFrom !== \"number\") {\n throw new TypeError(`type of startFrom prop must be a number, instead got type ${typeof startFrom}.`);\n }\n if (isNaN(startFrom) || startFrom === Infinity) {\n throw new TypeError(\"startFrom prop can not be NaN or Infinity.\");\n }\n if (startFrom < 0) {\n throw new TypeError(`startFrom must be greater than equal to 0 instead got ${startFrom}.`);\n }\n }\n if (typeof endAt !== \"undefined\") {\n if (typeof endAt !== \"number\") {\n throw new TypeError(`type of endAt prop must be a number, instead got type ${typeof endAt}.`);\n }\n if (isNaN(endAt)) {\n throw new TypeError(\"endAt prop can not be NaN.\");\n }\n if (endAt <= 0) {\n throw new TypeError(`endAt must be a positive number, instead got ${endAt}.`);\n }\n }\n if (endAt < startFrom) {\n throw new TypeError(\"endAt prop must be greater than startFrom prop.\");\n }\n};\nvar validateTrimProps = (trimBefore, trimAfter) => {\n if (typeof trimBefore !== \"undefined\") {\n if (typeof trimBefore !== \"number\") {\n throw new TypeError(`type of trimBefore prop must be a number, instead got type ${typeof trimBefore}.`);\n }\n if (isNaN(trimBefore) || trimBefore === Infinity) {\n throw new TypeError(\"trimBefore prop can not be NaN or Infinity.\");\n }\n if (trimBefore < 0) {\n throw new TypeError(`trimBefore must be greater than equal to 0 instead got ${trimBefore}.`);\n }\n }\n if (typeof trimAfter !== \"undefined\") {\n if (typeof trimAfter !== \"number\") {\n throw new TypeError(`type of trimAfter prop must be a number, instead got type ${typeof trimAfter}.`);\n }\n if (isNaN(trimAfter)) {\n throw new TypeError(\"trimAfter prop can not be NaN.\");\n }\n if (trimAfter <= 0) {\n throw new TypeError(`trimAfter must be a positive number, instead got ${trimAfter}.`);\n }\n }\n if (trimAfter <= trimBefore) {\n throw new TypeError(\"trimAfter prop must be greater than trimBefore prop.\");\n }\n};\nvar validateMediaTrimProps = ({\n startFrom,\n endAt,\n trimBefore,\n trimAfter\n}) => {\n if (typeof startFrom !== \"undefined\" && typeof trimBefore !== \"undefined\") {\n throw new TypeError(\"Cannot use both startFrom and trimBefore props. Use trimBefore instead as startFrom is deprecated.\");\n }\n if (typeof endAt !== \"undefined\" && typeof trimAfter !== \"undefined\") {\n throw new TypeError(\"Cannot use both endAt and trimAfter props. Use trimAfter instead as endAt is deprecated.\");\n }\n const hasNewProps = typeof trimBefore !== \"undefined\" || typeof trimAfter !== \"undefined\";\n const hasOldProps = typeof startFrom !== \"undefined\" || typeof endAt !== \"undefined\";\n if (hasNewProps) {\n validateTrimProps(trimBefore, trimAfter);\n } else if (hasOldProps) {\n validateStartFromProps(startFrom, endAt);\n }\n};\nvar resolveTrimProps = ({\n startFrom,\n endAt,\n trimBefore,\n trimAfter\n}) => {\n const trimBeforeValue = trimBefore ?? startFrom ?? undefined;\n const trimAfterValue = trimAfter ?? endAt ?? undefined;\n return { trimBeforeValue, trimAfterValue };\n};\n\n// src/video/duration-state.tsx\nimport { createContext as createContext16, useMemo as useMemo14, useReducer } from \"react\";\nimport { jsx as jsx14 } from \"react/jsx-runtime\";\nvar durationReducer = (state, action) => {\n switch (action.type) {\n case \"got-duration\": {\n const absoluteSrc = getAbsoluteSrc(action.src);\n if (state[absoluteSrc] === action.durationInSeconds) {\n return state;\n }\n return {\n ...state,\n [absoluteSrc]: action.durationInSeconds\n };\n }\n default:\n return state;\n }\n};\nvar DurationsContext = createContext16({\n durations: {},\n setDurations: () => {\n throw new Error(\"context missing\");\n }\n});\nvar DurationsContextProvider = ({ children }) => {\n const [durations, setDurations] = useReducer(durationReducer, {});\n const value = useMemo14(() => {\n return {\n durations,\n setDurations\n };\n }, [durations]);\n return /* @__PURE__ */ jsx14(DurationsContext.Provider, {\n value,\n children\n });\n};\n\n// src/audio/AudioForPreview.tsx\nimport React17, {\n forwardRef as forwardRef4,\n useContext as useContext24,\n useEffect as useEffect10,\n useImperativeHandle as useImperativeHandle5,\n useMemo as useMemo22,\n useRef as useRef14,\n useState as useState13\n} from \"react\";\n\n// src/get-cross-origin-value.ts\nvar getCrossOriginValue = ({\n crossOrigin,\n requestsVideoFrame,\n isClientSideRendering\n}) => {\n if (crossOrigin !== undefined && crossOrigin !== null) {\n return crossOrigin;\n }\n if (isClientSideRendering) {\n return \"anonymous\";\n }\n if (requestsVideoFrame) {\n return \"anonymous\";\n }\n return;\n};\n\n// src/use-amplification.ts\nimport { useContext as useContext17, useLayoutEffect as useLayoutEffect5, useRef as useRef9 } from \"react\";\n\n// src/audio/shared-audio-tags.tsx\nimport React14, {\n createContext as createContext17,\n createRef as createRef2,\n useCallback as useCallback7,\n useContext as useContext16,\n useMemo as useMemo16,\n useRef as useRef7,\n useState as useState9\n} from \"react\";\n\n// src/play-and-handle-not-allowed-error.ts\nvar playAndHandleNotAllowedError = ({\n mediaRef,\n mediaType,\n onAutoPlayError,\n logLevel,\n mountTime,\n reason,\n isPlayer\n}) => {\n const { current } = mediaRef;\n if (!current) {\n return;\n }\n playbackLogging({\n logLevel,\n tag: \"play\",\n message: `Attempting to play ${current.src}. Reason: ${reason}`,\n mountTime\n });\n const prom = current.play();\n if (!prom.catch) {\n return;\n }\n prom.catch((err) => {\n if (!current) {\n return;\n }\n if (err.message.includes(\"request was interrupted by a call to pause\")) {\n return;\n }\n if (err.message.includes(\"The operation was aborted.\")) {\n return;\n }\n if (err.message.includes(\"The fetching process for the media resource was aborted by the user agent\")) {\n return;\n }\n if (err.message.includes(\"request was interrupted by a new load request\")) {\n return;\n }\n if (err.message.includes(\"because the media was removed from the document\")) {\n return;\n }\n if (err.message.includes(\"user didn't interact with the document\") && current.muted) {\n return;\n }\n console.log(`Could not play ${mediaType} due to following error: `, err);\n if (!current.muted) {\n if (onAutoPlayError) {\n onAutoPlayError();\n return;\n }\n if (mediaType === \"video\" && isPlayer) {\n Log.info({ logLevel, tag: \"<\" + mediaType + \">\" }, `The video will be muted and we'll retry playing it.`);\n Log.info({ logLevel, tag: \"<\" + mediaType + \">\" }, \"Use onAutoPlayError() to handle this error yourself.\");\n current.muted = true;\n current.play();\n }\n }\n });\n};\n\n// src/audio/shared-element-source-node.ts\nvar makeSharedElementSourceNode = ({\n audioContext,\n ref\n}) => {\n let connected = null;\n let disposed = false;\n return {\n attemptToConnect: () => {\n if (disposed) {\n throw new Error(\"SharedElementSourceNode has been disposed\");\n }\n if (!connected && ref.current) {\n const mediaElementSourceNode = audioContext.createMediaElementSource(ref.current);\n connected = mediaElementSourceNode;\n }\n },\n get: () => {\n if (!connected) {\n throw new Error(\"Audio element not connected\");\n }\n return connected;\n },\n cleanup: () => {\n if (connected) {\n connected.disconnect();\n connected = null;\n }\n disposed = true;\n }\n };\n};\n\n// src/audio/use-audio-context.ts\nimport { useMemo as useMemo15 } from \"react\";\nvar warned = false;\nvar warnOnce = (logLevel) => {\n if (warned) {\n return;\n }\n warned = true;\n if (typeof window !== \"undefined\") {\n Log.warn({ logLevel, tag: null }, \"AudioContext is not supported in this browser\");\n }\n};\nvar useSingletonAudioContext = ({\n logLevel,\n latencyHint,\n audioEnabled\n}) => {\n const env = useRemotionEnvironment();\n const audioContext = useMemo15(() => {\n if (env.isRendering) {\n return null;\n }\n if (!audioEnabled) {\n return null;\n }\n if (typeof AudioContext === \"undefined\") {\n warnOnce(logLevel);\n return null;\n }\n return new AudioContext({\n latencyHint,\n sampleRate: 48000\n });\n }, [logLevel, latencyHint, env.isRendering, audioEnabled]);\n return audioContext;\n};\n\n// src/audio/shared-audio-tags.tsx\nimport { jsx as jsx15, jsxs } from \"react/jsx-runtime\";\nvar EMPTY_AUDIO = \"data:audio/mp3;base64,/+MYxAAJcAV8AAgAABn//////+/gQ5BAMA+D4Pg+BAQBAEAwD4Pg+D4EBAEAQDAPg++hYBH///hUFQVBUFREDQNHmf///////+MYxBUGkAGIMAAAAP/29Xt6lUxBTUUzLjEwMFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV/+MYxDUAAANIAAAAAFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV\";\nvar compareProps = (obj1, obj2) => {\n const keysA = Object.keys(obj1).sort();\n const keysB = Object.keys(obj2).sort();\n if (keysA.length !== keysB.length) {\n return false;\n }\n for (let i = 0;i < keysA.length; i++) {\n if (keysA[i] !== keysB[i]) {\n return false;\n }\n if (obj1[keysA[i]] !== obj2[keysB[i]]) {\n return false;\n }\n }\n return true;\n};\nvar didPropChange = (key, newProp, prevProp) => {\n if (key === \"src\" && !prevProp.startsWith(\"data:\") && !newProp.startsWith(\"data:\")) {\n return new URL(prevProp, window.origin).toString() !== new URL(newProp, window.origin).toString();\n }\n if (prevProp === newProp) {\n return false;\n }\n return true;\n};\nvar SharedAudioContext = createContext17(null);\nvar SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyHint, audioEnabled }) => {\n const audios = useRef7([]);\n const [initialNumberOfAudioTags] = useState9(numberOfAudioTags);\n if (numberOfAudioTags !== initialNumberOfAudioTags) {\n throw new Error(\"The number of shared audio tags has changed dynamically. Once you have set this property, you cannot change it afterwards.\");\n }\n const logLevel = useLogLevel();\n const audioContext = useSingletonAudioContext({\n logLevel,\n latencyHint: audioLatencyHint,\n audioEnabled\n });\n const audioSyncAnchor = useMemo16(() => ({ value: 0 }), []);\n const prevEndTimes = useRef7({ scheduledEndTime: null, mediaEndTime: null });\n const scheduleAudioNode = useMemo16(() => {\n return ({\n node,\n mediaTimestamp,\n targetTime,\n currentTime,\n sequenceEndTime,\n sequenceStartTime,\n debugAudioScheduling\n }) => {\n if (!audioContext) {\n throw new Error(\"Audio context not found\");\n }\n const bufferDuration = node.buffer?.duration ?? 0;\n const unclampedMediaEndTime = mediaTimestamp + bufferDuration;\n const needsTrimEnd = unclampedMediaEndTime > sequenceEndTime;\n const needsTrimStart = mediaTimestamp < sequenceStartTime;\n const offsetBecauseOfTrim = needsTrimStart ? sequenceStartTime - mediaTimestamp : 0;\n const offsetBecauseOfTooLate = targetTime < 0 ? -targetTime : 0;\n const offset = offsetBecauseOfTrim + offsetBecauseOfTooLate;\n const duration = needsTrimEnd ? bufferDuration - Math.max(0, unclampedMediaEndTime - sequenceEndTime) - offset : bufferDuration - offset;\n const scheduledTime = targetTime + currentTime + offset;\n if (offset < 0) {\n throw new Error(\"offset < 0: \" + JSON.stringify({\n offset,\n targetTime,\n currentTime,\n offsetBecauseOfTrim,\n offsetBecauseOfTooLate\n }));\n }\n if (duration > 0) {\n node.start(scheduledTime, offset, duration);\n }\n const scheduledEndTime = scheduledTime + duration / node.playbackRate.value;\n const mediaTime = mediaTimestamp + offset;\n const mediaEndTime = mediaTime + duration;\n const latency = audioContext.baseLatency + audioContext.outputLatency;\n const timeDiff = scheduledTime - currentTime - latency;\n const prev = prevEndTimes.current;\n const scheduledMismatch = prev.scheduledEndTime !== null && Math.abs(scheduledTime - prev.scheduledEndTime) > 0.001;\n const mediaMismatch = prev.mediaEndTime !== null && Math.abs(mediaTime - prev.mediaEndTime) > 0.001;\n if (debugAudioScheduling) {\n Log.info({ logLevel, tag: \"audio-scheduling\" }, \"scheduled %c%s%c %s %c%s%c %s %c%s%c %s %s %s\", scheduledMismatch ? \"color: red; font-weight: bold\" : \"\", scheduledTime.toFixed(4), \"\", scheduledEndTime.toFixed(4), mediaMismatch ? \"color: red; font-weight: bold\" : \"\", mediaTime.toFixed(4), \"\", mediaEndTime.toFixed(4), duration < 0 ? \"color: red; font-weight: bold\" : timeDiff < 0 ? \"color: red; font-weight: bold\" : \"color: blue; font-weight: bold\", duration < 0 ? \"missed \" + Math.abs(offset).toFixed(2) + \"s\" : Math.abs(timeDiff).toFixed(2) + (timeDiff < 0 ? \" delay\" : \" ahead\"), \"\", \"current=\" + currentTime.toFixed(4), \"offset=\" + offset.toFixed(4), \"latency=\" + latency.toFixed(4), \"state=\" + audioContext.state);\n }\n prev.scheduledEndTime = scheduledEndTime;\n prev.mediaEndTime = mediaEndTime;\n return duration > 0 ? {\n type: \"started\",\n scheduledTime\n } : {\n type: \"not-started\"\n };\n };\n }, [audioContext, logLevel]);\n const refs = useMemo16(() => {\n return new Array(numberOfAudioTags).fill(true).map(() => {\n const ref = createRef2();\n return {\n id: Math.random(),\n ref,\n mediaElementSourceNode: audioContext ? makeSharedElementSourceNode({\n audioContext,\n ref\n }) : null\n };\n });\n }, [audioContext, numberOfAudioTags]);\n const effectToUse = React14.useInsertionEffect ?? React14.useLayoutEffect;\n effectToUse(() => {\n return () => {\n requestAnimationFrame(() => {\n refs.forEach(({ mediaElementSourceNode }) => {\n mediaElementSourceNode?.cleanup();\n });\n });\n };\n }, [refs]);\n const takenAudios = useRef7(new Array(numberOfAudioTags).fill(false));\n const rerenderAudios = useCallback7(() => {\n refs.forEach(({ ref, id }) => {\n const data = audios.current?.find((a) => a.id === id);\n const { current } = ref;\n if (!current) {\n return;\n }\n if (data === undefined) {\n current.src = EMPTY_AUDIO;\n return;\n }\n if (!data) {\n throw new TypeError(\"Expected audio data to be there\");\n }\n Object.keys(data.props).forEach((key) => {\n if (didPropChange(key, data.props[key], current[key])) {\n current[key] = data.props[key];\n }\n });\n });\n }, [refs]);\n const registerAudio = useCallback7((options) => {\n const { aud, audioId, premounting, postmounting } = options;\n const found = audios.current?.find((a) => a.audioId === audioId);\n if (found) {\n return found;\n }\n const firstFreeAudio = takenAudios.current.findIndex((a) => a === false);\n if (firstFreeAudio === -1) {\n throw new Error(`Tried to simultaneously mount ${numberOfAudioTags + 1} <Html5Audio /> tags at the same time. With the current settings, the maximum amount of <Html5Audio /> tags is limited to ${numberOfAudioTags} at the same time. Remotion pre-mounts silent audio tags to help avoid browser autoplay restrictions. See https://remotion.dev/docs/player/autoplay#using-the-numberofsharedaudiotags-prop for more information on how to increase this limit.`);\n }\n const { id, ref, mediaElementSourceNode } = refs[firstFreeAudio];\n const cloned = [...takenAudios.current];\n cloned[firstFreeAudio] = id;\n takenAudios.current = cloned;\n const newElem = {\n props: aud,\n id,\n el: ref,\n audioId,\n mediaElementSourceNode,\n premounting,\n audioMounted: Boolean(ref.current),\n postmounting,\n cleanupOnMediaTagUnmount: () => {}\n };\n audios.current?.push(newElem);\n rerenderAudios();\n return newElem;\n }, [numberOfAudioTags, refs, rerenderAudios]);\n const unregisterAudio = useCallback7((id) => {\n const cloned = [...takenAudios.current];\n const index = refs.findIndex((r) => r.id === id);\n if (index === -1) {\n throw new TypeError(\"Error occured in \");\n }\n cloned[index] = false;\n takenAudios.current = cloned;\n audios.current = audios.current?.filter((a) => a.id !== id);\n rerenderAudios();\n }, [refs, rerenderAudios]);\n const updateAudio = useCallback7(({\n aud,\n audioId,\n id,\n premounting,\n postmounting\n }) => {\n let changed = false;\n audios.current = audios.current?.map((prevA) => {\n const audioMounted = Boolean(prevA.el.current);\n if (prevA.audioMounted !== audioMounted) {\n changed = true;\n }\n if (prevA.id === id) {\n const isTheSame = compareProps(aud, prevA.props) && prevA.premounting === premounting && prevA.postmounting === postmounting;\n if (isTheSame) {\n return prevA;\n }\n changed = true;\n return {\n ...prevA,\n props: aud,\n premounting,\n postmounting,\n audioId,\n audioMounted\n };\n }\n return prevA;\n });\n if (changed) {\n rerenderAudios();\n }\n }, [rerenderAudios]);\n const mountTime = useMountTime();\n const env = useRemotionEnvironment();\n const playAllAudios = useCallback7(() => {\n refs.forEach((ref) => {\n const audio = audios.current.find((a) => a.el === ref.ref);\n if (audio?.premounting) {\n return;\n }\n playAndHandleNotAllowedError({\n mediaRef: ref.ref,\n mediaType: \"audio\",\n onAutoPlayError: null,\n logLevel,\n mountTime,\n reason: \"playing all audios\",\n isPlayer: env.isPlayer\n });\n });\n audioContext?.resume();\n }, [audioContext, logLevel, mountTime, refs, env.isPlayer]);\n const value = useMemo16(() => {\n return {\n registerAudio,\n unregisterAudio,\n updateAudio,\n playAllAudios,\n numberOfAudioTags,\n audioContext,\n audioSyncAnchor,\n scheduleAudioNode\n };\n }, [\n numberOfAudioTags,\n playAllAudios,\n registerAudio,\n unregisterAudio,\n updateAudio,\n audioContext,\n audioSyncAnchor,\n scheduleAudioNode\n ]);\n return /* @__PURE__ */ jsxs(SharedAudioContext.Provider, {\n value,\n children: [\n refs.map(({ id, ref }) => {\n return /* @__PURE__ */ jsx15(\"audio\", {\n ref,\n preload: \"metadata\",\n src: EMPTY_AUDIO\n }, id);\n }),\n children\n ]\n });\n};\nvar useSharedAudio = ({\n aud,\n audioId,\n premounting,\n postmounting\n}) => {\n const ctx = useContext16(SharedAudioContext);\n const [elem] = useState9(() => {\n if (ctx && ctx.numberOfAudioTags > 0) {\n return ctx.registerAudio({ aud, audioId, premounting, postmounting });\n }\n const el = React14.createRef();\n const mediaElementSourceNode = ctx?.audioContext ? makeSharedElementSourceNode({\n audioContext: ctx.audioContext,\n ref: el\n }) : null;\n return {\n el,\n id: Math.random(),\n props: aud,\n audioId,\n mediaElementSourceNode,\n premounting,\n audioMounted: Boolean(el.current),\n postmounting,\n cleanupOnMediaTagUnmount: () => {\n mediaElementSourceNode?.cleanup();\n }\n };\n });\n const effectToUse = React14.useInsertionEffect ?? React14.useLayoutEffect;\n if (typeof document !== \"undefined\") {\n effectToUse(() => {\n if (ctx && ctx.numberOfAudioTags > 0) {\n ctx.updateAudio({ id: elem.id, aud, audioId, premounting, postmounting });\n }\n }, [aud, ctx, elem.id, audioId, premounting, postmounting]);\n effectToUse(() => {\n return () => {\n if (ctx && ctx.numberOfAudioTags > 0) {\n ctx.unregisterAudio(elem.id);\n }\n };\n }, [ctx, elem.id]);\n }\n return elem;\n};\n\n// src/is-approximately-the-same.ts\nvar FLOATING_POINT_ERROR_THRESHOLD = 0.00001;\nvar isApproximatelyTheSame = (num1, num2) => {\n return Math.abs(num1 - num2) < FLOATING_POINT_ERROR_THRESHOLD;\n};\n\n// src/video/video-fragment.ts\nimport { useRef as useRef8 } from \"react\";\nvar toSeconds = (time, fps) => {\n return Math.round(time / fps * 100) / 100;\n};\nvar isSafari = () => {\n if (typeof window === \"undefined\") {\n return false;\n }\n const isAppleWebKit = /AppleWebKit/.test(window.navigator.userAgent);\n if (!isAppleWebKit) {\n return false;\n }\n const isNotChrome = !window.navigator.userAgent.includes(\"Chrome/\");\n return isNotChrome;\n};\nvar isIosSafari = () => {\n if (typeof window === \"undefined\") {\n return false;\n }\n const isIpadIPodIPhone = /iP(ad|od|hone)/i.test(window.navigator.userAgent);\n return isIpadIPodIPhone && isSafari();\n};\nvar isIOSSafariAndBlob = (actualSrc) => {\n return isIosSafari() && actualSrc.startsWith(\"blob:\");\n};\nvar getVideoFragmentStart = ({\n actualFrom,\n fps\n}) => {\n return toSeconds(Math.max(0, -actualFrom), fps);\n};\nvar getVideoFragmentEnd = ({\n duration,\n fps\n}) => {\n return toSeconds(duration, fps);\n};\nvar appendVideoFragment = ({\n actualSrc,\n actualFrom,\n duration,\n fps\n}) => {\n if (isIOSSafariAndBlob(actualSrc)) {\n return actualSrc;\n }\n if (actualSrc.startsWith(\"data:\")) {\n return actualSrc;\n }\n const existingHash = Boolean(new URL(actualSrc, (typeof window === \"undefined\" ? null : window.location.href) ?? \"http://localhost:3000\").hash);\n if (existingHash) {\n return actualSrc;\n }\n if (!Number.isFinite(actualFrom)) {\n return actualSrc;\n }\n const withStartHash = `${actualSrc}#t=${getVideoFragmentStart({ actualFrom, fps })}`;\n if (!Number.isFinite(duration)) {\n return withStartHash;\n }\n return `${withStartHash},${getVideoFragmentEnd({ duration, fps })}`;\n};\nvar isSubsetOfDuration = ({\n prevStartFrom,\n newStartFrom,\n prevDuration,\n newDuration,\n fps\n}) => {\n const previousFrom = getVideoFragmentStart({ actualFrom: prevStartFrom, fps });\n const newFrom = getVideoFragmentStart({ actualFrom: newStartFrom, fps });\n const previousEnd = getVideoFragmentEnd({ duration: prevDuration, fps });\n const newEnd = getVideoFragmentEnd({ duration: newDuration, fps });\n if (newFrom < previousFrom) {\n return false;\n }\n if (newEnd > previousEnd) {\n return false;\n }\n return true;\n};\nvar useAppendVideoFragment = ({\n actualSrc: initialActualSrc,\n actualFrom: initialActualFrom,\n duration: initialDuration,\n fps\n}) => {\n const actualFromRef = useRef8(initialActualFrom);\n const actualDuration = useRef8(initialDuration);\n const actualSrc = useRef8(initialActualSrc);\n if (!isSubsetOfDuration({\n prevStartFrom: actualFromRef.current,\n newStartFrom: initialActualFrom,\n prevDuration: actualDuration.current,\n newDuration: initialDuration,\n fps\n }) || initialActualSrc !== actualSrc.current) {\n actualFromRef.current = initialActualFrom;\n actualDuration.current = initialDuration;\n actualSrc.current = initialActualSrc;\n }\n const appended = appendVideoFragment({\n actualSrc: actualSrc.current,\n actualFrom: actualFromRef.current,\n duration: actualDuration.current,\n fps\n });\n return appended;\n};\n\n// src/use-amplification.ts\nvar warned2 = false;\nvar warnSafariOnce = (logLevel) => {\n if (warned2) {\n return;\n }\n warned2 = true;\n Log.warn({ logLevel, tag: null }, \"In Safari, setting a volume and a playback rate at the same time is buggy.\");\n Log.warn({ logLevel, tag: null }, \"In Desktop Safari, only volumes <= 1 will be applied.\");\n Log.warn({ logLevel, tag: null }, logLevel, \"In Mobile Safari, the volume will be ignored and set to 1 if a playbackRate is set.\");\n};\nvar useVolume = ({\n mediaRef,\n volume,\n logLevel,\n source,\n shouldUseWebAudioApi\n}) => {\n const audioStuffRef = useRef9(null);\n const currentVolumeRef = useRef9(volume);\n currentVolumeRef.current = volume;\n const sharedAudioContext = useContext17(SharedAudioContext);\n if (!sharedAudioContext) {\n throw new Error(\"useAmplification must be used within a SharedAudioContext\");\n }\n const { audioContext } = sharedAudioContext;\n if (typeof window !== \"undefined\") {\n useLayoutEffect5(() => {\n if (!audioContext) {\n return;\n }\n if (!mediaRef.current) {\n return;\n }\n if (!shouldUseWebAudioApi) {\n return;\n }\n if (mediaRef.current.playbackRate !== 1 && isSafari()) {\n warnSafariOnce(logLevel);\n return;\n }\n if (!source) {\n return;\n }\n const gainNode = new GainNode(audioContext, {\n gain: currentVolumeRef.current\n });\n source.attemptToConnect();\n source.get().connect(gainNode);\n gainNode.connect(audioContext.destination);\n audioStuffRef.current = {\n gainNode\n };\n Log.trace({ logLevel, tag: null }, `Starting to amplify ${mediaRef.current?.src}. Gain = ${currentVolumeRef.current}, playbackRate = ${mediaRef.current?.playbackRate}`);\n return () => {\n audioStuffRef.current = null;\n gainNode.disconnect();\n source.get().disconnect();\n };\n }, [logLevel, mediaRef, audioContext, source, shouldUseWebAudioApi]);\n }\n if (audioStuffRef.current) {\n const valueToSet = volume;\n if (!isApproximatelyTheSame(audioStuffRef.current.gainNode.gain.value, valueToSet)) {\n audioStuffRef.current.gainNode.gain.value = valueToSet;\n Log.trace({ logLevel, tag: null }, `Setting gain to ${valueToSet} for ${mediaRef.current?.src}`);\n }\n }\n const safariCase = isSafari() && mediaRef.current && mediaRef.current?.playbackRate !== 1;\n const shouldUseTraditionalVolume = safariCase || !shouldUseWebAudioApi;\n if (shouldUseTraditionalVolume && mediaRef.current && !isApproximatelyTheSame(volume, mediaRef.current?.volume)) {\n mediaRef.current.volume = Math.min(volume, 1);\n }\n return audioStuffRef;\n};\n\n// src/use-media-in-timeline.ts\nimport { useContext as useContext19, useEffect as useEffect4, useMemo as useMemo17, useState as useState10 } from \"react\";\n\n// src/audio/use-audio-frame.ts\nimport { useContext as useContext18 } from \"react\";\nvar useMediaStartsAt = () => {\n const parentSequence = useContext18(SequenceContext);\n const startsAt = Math.min(0, parentSequence?.relativeFrom ?? 0);\n return startsAt;\n};\nvar useFrameForVolumeProp = (behavior) => {\n const loop = Loop.useLoop();\n const frame = useCurrentFrame();\n const startsAt = useMediaStartsAt();\n if (behavior === \"repeat\" || loop === null) {\n return frame + startsAt;\n }\n return frame + startsAt + loop.durationInFrames * loop.iteration;\n};\n\n// src/get-asset-file-name.ts\nvar getAssetDisplayName = (filename) => {\n if (/data:|blob:/.test(filename.substring(0, 5))) {\n return \"Data URL\";\n }\n const splitted = filename.split(\"/\").map((s) => s.split(\"\\\\\")).flat(1);\n return splitted[splitted.length - 1];\n};\n\n// src/volume-prop.ts\nvar evaluateVolume = ({\n frame,\n volume,\n mediaVolume = 1\n}) => {\n if (typeof volume === \"number\") {\n return volume * mediaVolume;\n }\n if (typeof volume === \"undefined\") {\n return Number(mediaVolume);\n }\n const evaluated = volume(frame) * mediaVolume;\n if (typeof evaluated !== \"number\") {\n throw new TypeError(`You passed in a a function to the volume prop but it did not return a number but a value of type ${typeof evaluated} for frame ${frame}`);\n }\n if (Number.isNaN(evaluated)) {\n throw new TypeError(`You passed in a function to the volume prop but it returned NaN for frame ${frame}.`);\n }\n if (!Number.isFinite(evaluated)) {\n throw new TypeError(`You passed in a function to the volume prop but it returned a non-finite number for frame ${frame}.`);\n }\n return Math.max(0, evaluated);\n};\n\n// src/use-media-in-timeline.ts\nvar didWarn = {};\nvar warnOnce2 = (message) => {\n if (didWarn[message]) {\n return;\n }\n console.warn(message);\n didWarn[message] = true;\n};\nvar useBasicMediaInTimeline = ({\n volume,\n mediaVolume,\n mediaType,\n src,\n displayName,\n trimBefore,\n trimAfter,\n playbackRate\n}) => {\n if (!src) {\n throw new Error(\"No src passed\");\n }\n const startsAt = useMediaStartsAt();\n const parentSequence = useContext19(SequenceContext);\n const videoConfig = useVideoConfig();\n const [initialVolume] = useState10(() => volume);\n const mediaDuration = calculateMediaDuration({\n mediaDurationInFrames: videoConfig.durationInFrames,\n playbackRate,\n trimBefore,\n trimAfter\n });\n const duration = parentSequence ? Math.min(parentSequence.durationInFrames, mediaDuration) : mediaDuration;\n const volumes = useMemo17(() => {\n if (typeof volume === \"number\") {\n return volume;\n }\n return new Array(Math.floor(Math.max(0, duration + startsAt))).fill(true).map((_, i) => {\n return evaluateVolume({\n frame: i + startsAt,\n volume,\n mediaVolume\n });\n }).join(\",\");\n }, [duration, startsAt, volume, mediaVolume]);\n useEffect4(() => {\n if (typeof volume === \"number\" && volume !== initialVolume) {\n warnOnce2(`Remotion: The ${mediaType} with src ${src} has changed it's volume. Prefer the callback syntax for setting volume to get better timeline display: https://www.remotion.dev/docs/audio/volume`);\n }\n }, [initialVolume, mediaType, src, volume]);\n const doesVolumeChange = typeof volume === \"function\";\n const nonce = useNonce();\n const { rootId } = useTimelineContext();\n const env = useRemotionEnvironment();\n return {\n volumes,\n duration,\n doesVolumeChange,\n nonce,\n rootId,\n isStudio: env.isStudio,\n finalDisplayName: displayName ?? getAssetDisplayName(src)\n };\n};\nvar useMediaInTimeline = ({\n volume,\n mediaVolume,\n src,\n mediaType,\n playbackRate,\n displayName,\n id,\n stack,\n showInTimeline,\n premountDisplay,\n postmountDisplay,\n loopDisplay\n}) => {\n const parentSequence = useContext19(SequenceContext);\n const startsAt = useMediaStartsAt();\n const { registerSequence, unregisterSequence } = useContext19(SequenceManager);\n const {\n volumes,\n duration,\n doesVolumeChange,\n nonce,\n rootId,\n isStudio,\n finalDisplayName\n } = useBasicMediaInTimeline({\n volume,\n mediaVolume,\n mediaType,\n src,\n displayName,\n trimAfter: undefined,\n trimBefore: undefined,\n playbackRate\n });\n useEffect4(() => {\n if (!src) {\n throw new Error(\"No src passed\");\n }\n if (!isStudio && window.process?.env?.NODE_ENV !== \"test\") {\n return;\n }\n if (!showInTimeline) {\n return;\n }\n registerSequence({\n type: mediaType,\n src,\n id,\n duration,\n from: 0,\n parent: parentSequence?.id ?? null,\n displayName: finalDisplayName,\n rootId,\n volume: volumes,\n showInTimeline: true,\n nonce: nonce.get(),\n startMediaFrom: 0 - startsAt,\n doesVolumeChange,\n loopDisplay,\n playbackRate,\n stack,\n premountDisplay,\n postmountDisplay,\n controls: null\n });\n return () => {\n unregisterSequence(id);\n };\n }, [\n duration,\n id,\n parentSequence,\n src,\n registerSequence,\n unregisterSequence,\n volumes,\n doesVolumeChange,\n nonce,\n mediaType,\n startsAt,\n playbackRate,\n stack,\n showInTimeline,\n premountDisplay,\n postmountDisplay,\n isStudio,\n loopDisplay,\n rootId,\n finalDisplayName\n ]);\n};\n\n// src/use-media-playback.ts\nimport {\n useCallback as useCallback10,\n useContext as useContext22,\n useEffect as useEffect8,\n useLayoutEffect as useLayoutEffect7,\n useRef as useRef13\n} from \"react\";\n\n// src/buffer-until-first-frame.ts\nimport { useCallback as useCallback9, useMemo as useMemo20, useRef as useRef11 } from \"react\";\n\n// src/use-buffer-state.ts\nimport { useContext as useContext21, useMemo as useMemo19 } from \"react\";\n\n// src/buffering.tsx\nimport React15, {\n useCallback as useCallback8,\n useContext as useContext20,\n useEffect as useEffect5,\n useLayoutEffect as useLayoutEffect6,\n useMemo as useMemo18,\n useRef as useRef10,\n useState as useState11\n} from \"react\";\nimport { jsx as jsx16 } from \"react/jsx-runtime\";\nvar useBufferManager = (logLevel, mountTime) => {\n const [blocks, setBlocks] = useState11([]);\n const [onBufferingCallbacks, setOnBufferingCallbacks] = useState11([]);\n const [onResumeCallbacks, setOnResumeCallbacks] = useState11([]);\n const env = useRemotionEnvironment();\n const rendering = env.isRendering;\n const buffering = useRef10(false);\n const addBlock = useCallback8((block) => {\n if (rendering) {\n return {\n unblock: () => {\n return;\n }\n };\n }\n setBlocks((b) => [...b, block]);\n return {\n unblock: () => {\n setBlocks((b) => {\n const newArr = b.filter((bx) => bx !== block);\n if (newArr.length === b.length) {\n return b;\n }\n return newArr;\n });\n }\n };\n }, [rendering]);\n const listenForBuffering = useCallback8((callback) => {\n setOnBufferingCallbacks((c) => [...c, callback]);\n return {\n remove: () => {\n setOnBufferingCallbacks((c) => c.filter((cb) => cb !== callback));\n }\n };\n }, []);\n const listenForResume = useCallback8((callback) => {\n setOnResumeCallbacks((c) => [...c, callback]);\n return {\n remove: () => {\n setOnResumeCallbacks((c) => c.filter((cb) => cb !== callback));\n }\n };\n }, []);\n useEffect5(() => {\n if (rendering) {\n return;\n }\n if (blocks.length > 0) {\n onBufferingCallbacks.forEach((c) => c());\n playbackLogging({\n logLevel,\n message: \"Player is entering buffer state\",\n mountTime,\n tag: \"player\"\n });\n }\n }, [blocks]);\n if (typeof window !== \"undefined\") {\n useLayoutEffect6(() => {\n if (rendering) {\n return;\n }\n if (blocks.length === 0) {\n onResumeCallbacks.forEach((c) => c());\n playbackLogging({\n logLevel,\n message: \"Player is exiting buffer state\",\n mountTime,\n tag: \"player\"\n });\n }\n }, [blocks]);\n }\n return useMemo18(() => {\n return { addBlock, listenForBuffering, listenForResume, buffering };\n }, [addBlock, buffering, listenForBuffering, listenForResume]);\n};\nvar BufferingContextReact = React15.createContext(null);\nvar BufferingProvider = ({ children }) => {\n const { logLevel, mountTime } = useContext20(LogLevelContext);\n const bufferManager = useBufferManager(logLevel ?? \"info\", mountTime);\n return /* @__PURE__ */ jsx16(BufferingContextReact.Provider, {\n value: bufferManager,\n children\n });\n};\nvar useIsPlayerBuffering = (bufferManager) => {\n const [isBuffering, setIsBuffering] = useState11(bufferManager.buffering.current);\n useEffect5(() => {\n const onBuffer = () => {\n setIsBuffering(true);\n };\n const onResume = () => {\n setIsBuffering(false);\n };\n bufferManager.listenForBuffering(onBuffer);\n bufferManager.listenForResume(onResume);\n return () => {\n bufferManager.listenForBuffering(() => {\n return;\n });\n bufferManager.listenForResume(() => {\n return;\n });\n };\n }, [bufferManager]);\n return isBuffering;\n};\n\n// src/use-buffer-state.ts\nvar useBufferState = () => {\n const buffer = useContext21(BufferingContextReact);\n const addBlock = buffer ? buffer.addBlock : null;\n return useMemo19(() => ({\n delayPlayback: () => {\n if (!addBlock) {\n throw new Error(\"Tried to enable the buffering state, but a Remotion context was not found. This API can only be called in a component that was passed to the Remotion Player or a <Composition>. Or you might have experienced a version mismatch - run `npx remotion versions` and ensure all packages have the same version. This error is thrown by the buffer state https://remotion.dev/docs/player/buffer-state\");\n }\n const { unblock } = addBlock({\n id: String(Math.random())\n });\n return { unblock };\n }\n }), [addBlock]);\n};\n\n// src/buffer-until-first-frame.ts\nvar isSafariWebkit = () => {\n const isSafari2 = /^((?!chrome|android).)*safari/i.test(window.navigator.userAgent);\n return isSafari2;\n};\nvar useBufferUntilFirstFrame = ({\n mediaRef,\n mediaType,\n onVariableFpsVideoDetected,\n pauseWhenBuffering,\n logLevel,\n mountTime\n}) => {\n const bufferingRef = useRef11(false);\n const { delayPlayback } = useBufferState();\n const bufferUntilFirstFrame = useCallback9((requestedTime) => {\n if (mediaType !== \"video\") {\n return;\n }\n if (!pauseWhenBuffering) {\n return;\n }\n const current = mediaRef.current;\n if (!current) {\n return;\n }\n if (current.readyState >= current.HAVE_FUTURE_DATA && !isSafariWebkit()) {\n playbackLogging({\n logLevel,\n message: `Not using buffer until first frame, because readyState is ${current.readyState} and is not Safari or Desktop Chrome`,\n mountTime,\n tag: \"buffer\"\n });\n return;\n }\n if (!current.requestVideoFrameCallback) {\n playbackLogging({\n logLevel,\n message: `Not using buffer until first frame, because requestVideoFrameCallback is not supported`,\n mountTime,\n tag: \"buffer\"\n });\n return;\n }\n bufferingRef.current = true;\n playbackLogging({\n logLevel,\n message: `Buffering ${mediaRef.current?.src} until the first frame is received`,\n mountTime,\n tag: \"buffer\"\n });\n const playback = delayPlayback();\n const unblock = () => {\n playback.unblock();\n current.removeEventListener(\"ended\", unblock, {\n once: true\n });\n current.removeEventListener(\"pause\", unblock, {\n once: true\n });\n bufferingRef.current = false;\n };\n const onEndedOrPauseOrCanPlay = () => {\n unblock();\n };\n current.requestVideoFrameCallback((_, info2) => {\n const differenceFromRequested = Math.abs(info2.mediaTime - requestedTime);\n if (differenceFromRequested > 0.5) {\n onVariableFpsVideoDetected();\n }\n unblock();\n });\n current.addEventListener(\"ended\", onEndedOrPauseOrCanPlay, { once: true });\n current.addEventListener(\"pause\", onEndedOrPauseOrCanPlay, { once: true });\n current.addEventListener(\"canplay\", onEndedOrPauseOrCanPlay, {\n once: true\n });\n }, [\n delayPlayback,\n logLevel,\n mediaRef,\n mediaType,\n mountTime,\n onVariableFpsVideoDetected,\n pauseWhenBuffering\n ]);\n return useMemo20(() => {\n return {\n isBuffering: () => bufferingRef.current,\n bufferUntilFirstFrame\n };\n }, [bufferUntilFirstFrame]);\n};\n\n// src/media-tag-current-time-timestamp.ts\nimport React16 from \"react\";\nvar useCurrentTimeOfMediaTagWithUpdateTimeStamp = (mediaRef) => {\n const lastUpdate = React16.useRef({\n time: mediaRef.current?.currentTime ?? 0,\n lastUpdate: performance.now()\n });\n const nowCurrentTime = mediaRef.current?.currentTime ?? null;\n if (nowCurrentTime !== null) {\n if (lastUpdate.current.time !== nowCurrentTime) {\n lastUpdate.current.time = nowCurrentTime;\n lastUpdate.current.lastUpdate = performance.now();\n }\n }\n return lastUpdate;\n};\n\n// src/seek.ts\nvar seek = ({\n mediaRef,\n time,\n logLevel,\n why,\n mountTime\n}) => {\n const timeToSet = isIosSafari() ? Number(time.toFixed(1)) : time;\n playbackLogging({\n logLevel,\n tag: \"seek\",\n message: `Seeking from ${mediaRef.currentTime} to ${timeToSet}. src= ${mediaRef.src} Reason: ${why}`,\n mountTime\n });\n mediaRef.currentTime = timeToSet;\n return timeToSet;\n};\n\n// src/use-media-buffering.ts\nimport { useEffect as useEffect6, useState as useState12 } from \"react\";\nvar useMediaBuffering = ({\n element,\n shouldBuffer,\n isPremounting,\n isPostmounting,\n logLevel,\n mountTime,\n src\n}) => {\n const buffer = useBufferState();\n const [isBuffering, setIsBuffering] = useState12(false);\n useEffect6(() => {\n let cleanupFns = [];\n const { current } = element;\n if (!current) {\n return;\n }\n if (!shouldBuffer) {\n return;\n }\n if (isPremounting || isPostmounting) {\n if ((isPremounting || isPostmounting) && current.readyState < current.HAVE_FUTURE_DATA) {\n if (!navigator.userAgent.includes(\"Firefox/\")) {\n playbackLogging({\n logLevel,\n message: `Calling .load() on ${current.src} because readyState is ${current.readyState} and it is not Firefox. Element is premounted ${current.playbackRate}`,\n tag: \"load\",\n mountTime\n });\n const previousPlaybackRate = current.playbackRate;\n current.load();\n current.playbackRate = previousPlaybackRate;\n }\n }\n return;\n }\n const cleanup = (reason) => {\n let didDoSomething = false;\n cleanupFns.forEach((fn) => {\n fn(reason);\n didDoSomething = true;\n });\n cleanupFns = [];\n setIsBuffering((previous) => {\n if (previous) {\n didDoSomething = true;\n }\n return false;\n });\n if (didDoSomething) {\n playbackLogging({\n logLevel,\n message: `Unmarking as buffering: ${current.src}. Reason: ${reason}`,\n tag: \"buffer\",\n mountTime\n });\n }\n };\n const blockMedia = (reason) => {\n setIsBuffering(true);\n playbackLogging({\n logLevel,\n message: `Marking as buffering: ${current.src}. Reason: ${reason}`,\n tag: \"buffer\",\n mountTime\n });\n const { unblock } = buffer.delayPlayback();\n const onCanPlay = () => {\n cleanup('\"canplay\" was fired');\n init();\n };\n const onError = () => {\n cleanup('\"error\" event was occurred');\n init();\n };\n current.addEventListener(\"canplay\", onCanPlay, {\n once: true\n });\n cleanupFns.push(() => {\n current.removeEventListener(\"canplay\", onCanPlay);\n });\n current.addEventListener(\"error\", onError, {\n once: true\n });\n cleanupFns.push(() => {\n current.removeEventListener(\"error\", onError);\n });\n cleanupFns.push((cleanupReason) => {\n playbackLogging({\n logLevel,\n message: `Unblocking ${current.src} from buffer. Reason: ${cleanupReason}`,\n tag: \"buffer\",\n mountTime\n });\n unblock();\n });\n };\n const init = () => {\n if (current.readyState < current.HAVE_FUTURE_DATA) {\n blockMedia(`readyState is ${current.readyState}, which is less than HAVE_FUTURE_DATA`);\n if (!navigator.userAgent.includes(\"Firefox/\")) {\n playbackLogging({\n logLevel,\n message: `Calling .load() on ${src} because readyState is ${current.readyState} and it is not Firefox. ${current.playbackRate}`,\n tag: \"load\",\n mountTime\n });\n const previousPlaybackRate = current.playbackRate;\n current.load();\n current.playbackRate = previousPlaybackRate;\n }\n } else {\n const onWaiting = () => {\n blockMedia('\"waiting\" event was fired');\n };\n current.addEventListener(\"waiting\", onWaiting);\n cleanupFns.push(() => {\n current.removeEventListener(\"waiting\", onWaiting);\n });\n }\n };\n init();\n return () => {\n cleanup(\"element was unmounted or prop changed\");\n };\n }, [\n buffer,\n src,\n element,\n isPremounting,\n isPostmounting,\n logLevel,\n shouldBuffer,\n mountTime\n ]);\n return isBuffering;\n};\n\n// src/use-request-video-callback-time.ts\nimport { useEffect as useEffect7, useRef as useRef12 } from \"react\";\nvar useRequestVideoCallbackTime = ({\n mediaRef,\n mediaType,\n lastSeek,\n onVariableFpsVideoDetected\n}) => {\n const currentTime = useRef12(null);\n useEffect7(() => {\n const { current } = mediaRef;\n if (current) {\n currentTime.current = {\n time: current.currentTime,\n lastUpdate: performance.now()\n };\n } else {\n currentTime.current = null;\n return;\n }\n if (mediaType !== \"video\") {\n currentTime.current = null;\n return;\n }\n const videoTag = current;\n if (!videoTag.requestVideoFrameCallback) {\n return;\n }\n let cancel = () => {\n return;\n };\n const request = () => {\n if (!videoTag) {\n return;\n }\n const cb = videoTag.requestVideoFrameCallback((_, info2) => {\n if (currentTime.current !== null) {\n const difference = Math.abs(currentTime.current.time - info2.mediaTime);\n const differenceToLastSeek = Math.abs(lastSeek.current === null ? Infinity : info2.mediaTime - lastSeek.current);\n if (difference > 0.5 && differenceToLastSeek > 0.5 && info2.mediaTime > currentTime.current.time) {\n onVariableFpsVideoDetected();\n }\n }\n currentTime.current = {\n time: info2.mediaTime,\n lastUpdate: performance.now()\n };\n request();\n });\n cancel = () => {\n videoTag.cancelVideoFrameCallback(cb);\n cancel = () => {\n return;\n };\n };\n };\n request();\n return () => {\n cancel();\n };\n }, [lastSeek, mediaRef, mediaType, onVariableFpsVideoDetected]);\n return currentTime;\n};\n\n// src/interpolate.ts\nfunction interpolateFunction(input, inputRange, outputRange, options) {\n const { extrapolateLeft, extrapolateRight, easing } = options;\n let result = input;\n const [inputMin, inputMax] = inputRange;\n const [outputMin, outputMax] = outputRange;\n if (result < inputMin) {\n if (extrapolateLeft === \"identity\") {\n return result;\n }\n if (extrapolateLeft === \"clamp\") {\n result = inputMin;\n } else if (extrapolateLeft === \"wrap\") {\n const range = inputMax - inputMin;\n result = ((result - inputMin) % range + range) % range + inputMin;\n } else if (extrapolateLeft === \"extend\") {}\n }\n if (result > inputMax) {\n if (extrapolateRight === \"identity\") {\n return result;\n }\n if (extrapolateRight === \"clamp\") {\n result = inputMax;\n } else if (extrapolateRight === \"wrap\") {\n const range = inputMax - inputMin;\n result = ((result - inputMin) % range + range) % range + inputMin;\n } else if (extrapolateRight === \"extend\") {}\n }\n if (outputMin === outputMax) {\n return outputMin;\n }\n result = (result - inputMin) / (inputMax - inputMin);\n result = easing(result);\n result = result * (outputMax - outputMin) + outputMin;\n return result;\n}\nfunction findRange(input, inputRange) {\n let i;\n for (i = 1;i < inputRange.length - 1; ++i) {\n if (inputRange[i] >= input) {\n break;\n }\n }\n return i - 1;\n}\nfunction checkValidInputRange(arr) {\n for (let i = 1;i < arr.length; ++i) {\n if (!(arr[i] > arr[i - 1])) {\n throw new Error(`inputRange must be strictly monotonically increasing but got [${arr.join(\",\")}]`);\n }\n }\n}\nfunction checkInfiniteRange(name, arr) {\n if (arr.length < 2) {\n throw new Error(name + \" must have at least 2 elements\");\n }\n for (const element of arr) {\n if (typeof element !== \"number\") {\n throw new Error(`${name} must contain only numbers`);\n }\n if (!Number.isFinite(element)) {\n throw new Error(`${name} must contain only finite numbers, but got [${arr.join(\",\")}]`);\n }\n }\n}\nfunction interpolate(input, inputRange, outputRange, options) {\n if (typeof input === \"undefined\") {\n throw new Error(\"input can not be undefined\");\n }\n if (typeof inputRange === \"undefined\") {\n throw new Error(\"inputRange can not be undefined\");\n }\n if (typeof outputRange === \"undefined\") {\n throw new Error(\"outputRange can not be undefined\");\n }\n if (inputRange.length !== outputRange.length) {\n throw new Error(\"inputRange (\" + inputRange.length + \") and outputRange (\" + outputRange.length + \") must have the same length\");\n }\n checkInfiniteRange(\"inputRange\", inputRange);\n checkInfiniteRange(\"outputRange\", outputRange);\n checkValidInputRange(inputRange);\n const easing = options?.easing ?? ((num) => num);\n let extrapolateLeft = \"extend\";\n if (options?.extrapolateLeft !== undefined) {\n extrapolateLeft = options.extrapolateLeft;\n }\n let extrapolateRight = \"extend\";\n if (options?.extrapolateRight !== undefined) {\n extrapolateRight = options.extrapolateRight;\n }\n if (typeof input !== \"number\") {\n throw new TypeError(\"Cannot interpolate an input which is not a number\");\n }\n const range = findRange(input, inputRange);\n return interpolateFunction(input, [inputRange[range], inputRange[range + 1]], [outputRange[range], outputRange[range + 1]], {\n easing,\n extrapolateLeft,\n extrapolateRight\n });\n}\n\n// src/video/get-current-time.ts\nvar getExpectedMediaFrameUncorrected = ({\n frame,\n playbackRate,\n startFrom\n}) => {\n return interpolate(frame, [-1, startFrom, startFrom + 1], [-1, startFrom, startFrom + playbackRate]);\n};\nvar getMediaTime = ({\n fps,\n frame,\n playbackRate,\n startFrom\n}) => {\n const expectedFrame = getExpectedMediaFrameUncorrected({\n frame,\n playbackRate,\n startFrom\n });\n const msPerFrame = 1000 / fps;\n return expectedFrame * msPerFrame / 1000;\n};\n\n// src/warn-about-non-seekable-media.ts\nvar alreadyWarned = {};\nvar warnAboutNonSeekableMedia = (ref, type) => {\n if (ref === null) {\n return;\n }\n if (ref.seekable.length === 0) {\n return;\n }\n if (ref.seekable.length > 1) {\n return;\n }\n if (alreadyWarned[ref.src]) {\n return;\n }\n const range = { start: ref.seekable.start(0), end: ref.seekable.end(0) };\n if (range.start === 0 && range.end === 0) {\n const msg = [\n `The media ${ref.src} cannot be seeked. This could be one of few reasons:`,\n \"1) The media resource was replaced while the video is playing but it was not loaded yet.\",\n \"2) The media does not support seeking.\",\n \"3) The media was loaded with security headers prventing it from being included.\",\n \"Please see https://remotion.dev/docs/non-seekable-media for assistance.\"\n ].join(`\n`);\n if (type === \"console-error\") {\n console.error(msg);\n } else if (type === \"console-warning\") {\n console.warn(`The media ${ref.src} does not support seeking. The video will render fine, but may not play correctly in the Remotion Studio and in the <Player>. See https://remotion.dev/docs/non-seekable-media for an explanation.`);\n } else {\n throw new Error(msg);\n }\n alreadyWarned[ref.src] = true;\n }\n};\n\n// src/use-media-playback.ts\nvar useMediaPlayback = ({\n mediaRef,\n src,\n mediaType,\n playbackRate: localPlaybackRate,\n onlyWarnForMediaSeekingError,\n acceptableTimeshift,\n pauseWhenBuffering,\n isPremounting,\n isPostmounting,\n onAutoPlayError\n}) => {\n const { playbackRate: globalPlaybackRate } = useTimelineContext();\n const frame = useCurrentFrame();\n const absoluteFrame = useTimelinePosition();\n const [playing] = usePlayingState();\n const buffering = useContext22(BufferingContextReact);\n const { fps } = useVideoConfig();\n const mediaStartsAt = useMediaStartsAt();\n const lastSeekDueToShift = useRef13(null);\n const lastSeek = useRef13(null);\n const logLevel = useLogLevel();\n const mountTime = useMountTime();\n if (!buffering) {\n throw new Error(\"useMediaPlayback must be used inside a <BufferingContext>\");\n }\n const isVariableFpsVideoMap = useRef13({});\n const onVariableFpsVideoDetected = useCallback10(() => {\n if (!src) {\n return;\n }\n if (isVariableFpsVideoMap.current[src]) {\n return;\n }\n Log.verbose({ logLevel, tag: null }, `Detected ${src} as a variable FPS video. Disabling buffering while seeking.`);\n isVariableFpsVideoMap.current[src] = true;\n }, [logLevel, src]);\n const rvcCurrentTime = useRequestVideoCallbackTime({\n mediaRef,\n mediaType,\n lastSeek,\n onVariableFpsVideoDetected\n });\n const mediaTagCurrentTime = useCurrentTimeOfMediaTagWithUpdateTimeStamp(mediaRef);\n const desiredUnclampedTime = getMediaTime({\n frame,\n playbackRate: localPlaybackRate,\n startFrom: -mediaStartsAt,\n fps\n });\n const isMediaTagBuffering = useMediaBuffering({\n element: mediaRef,\n shouldBuffer: pauseWhenBuffering,\n isPremounting,\n isPostmounting,\n logLevel,\n mountTime,\n src: src ?? null\n });\n const { bufferUntilFirstFrame, isBuffering } = useBufferUntilFirstFrame({\n mediaRef,\n mediaType,\n onVariableFpsVideoDetected,\n pauseWhenBuffering,\n logLevel,\n mountTime\n });\n const playbackRate = localPlaybackRate * globalPlaybackRate;\n const acceptableTimeShiftButLessThanDuration = (() => {\n const DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_NORMAL_PLAYBACK = 0.45;\n const DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_AMPLIFICATION = DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_NORMAL_PLAYBACK + 0.2;\n const defaultAcceptableTimeshift = DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_AMPLIFICATION;\n if (mediaRef.current?.duration) {\n return Math.min(mediaRef.current.duration, acceptableTimeshift ?? defaultAcceptableTimeshift);\n }\n return acceptableTimeshift ?? defaultAcceptableTimeshift;\n })();\n const isPlayerBuffering = useIsPlayerBuffering(buffering);\n useEffect8(() => {\n if (mediaRef.current?.paused) {\n return;\n }\n if (!playing) {\n playbackLogging({\n logLevel,\n tag: \"pause\",\n message: `Pausing ${mediaRef.current?.src} because ${isPremounting ? \"media is premounting\" : isPostmounting ? \"media is postmounting\" : \"Player is not playing\"}`,\n mountTime\n });\n mediaRef.current?.pause();\n return;\n }\n const isMediaTagBufferingOrStalled = isMediaTagBuffering || isBuffering();\n const playerBufferingNotStateButLive = buffering.buffering.current;\n if (playerBufferingNotStateButLive && !isMediaTagBufferingOrStalled) {\n playbackLogging({\n logLevel,\n tag: \"pause\",\n message: `Pausing ${mediaRef.current?.src} because player is buffering but media tag is not`,\n mountTime\n });\n mediaRef.current?.pause();\n }\n }, [\n isBuffering,\n isMediaTagBuffering,\n buffering,\n isPlayerBuffering,\n isPremounting,\n logLevel,\n mediaRef,\n mediaType,\n mountTime,\n playing,\n isPostmounting\n ]);\n const env = useRemotionEnvironment();\n useLayoutEffect7(() => {\n const playbackRateToSet = Math.max(0, playbackRate);\n if (mediaRef.current && mediaRef.current.playbackRate !== playbackRateToSet) {\n mediaRef.current.playbackRate = playbackRateToSet;\n }\n }, [mediaRef, playbackRate]);\n useEffect8(() => {\n const tagName = mediaType === \"audio\" ? \"<Html5Audio>\" : \"<Html5Video>\";\n if (!mediaRef.current) {\n throw new Error(`No ${mediaType} ref found`);\n }\n if (!src) {\n throw new Error(`No 'src' attribute was passed to the ${tagName} element.`);\n }\n const { duration } = mediaRef.current;\n const shouldBeTime = !Number.isNaN(duration) && Number.isFinite(duration) ? Math.min(duration, desiredUnclampedTime) : desiredUnclampedTime;\n const mediaTagTime = mediaTagCurrentTime.current.time;\n const rvcTime = rvcCurrentTime.current?.time ?? null;\n const isVariableFpsVideo = isVariableFpsVideoMap.current[src];\n const timeShiftMediaTag = Math.abs(shouldBeTime - mediaTagTime);\n const timeShiftRvcTag = rvcTime ? Math.abs(shouldBeTime - rvcTime) : null;\n const mostRecentTimeshift = rvcCurrentTime.current?.lastUpdate && rvcCurrentTime.current.time > mediaTagCurrentTime.current.lastUpdate ? timeShiftRvcTag : timeShiftMediaTag;\n const timeShift = timeShiftRvcTag && !isVariableFpsVideo ? mostRecentTimeshift : timeShiftMediaTag;\n if (timeShift > acceptableTimeShiftButLessThanDuration && lastSeekDueToShift.current !== shouldBeTime) {\n lastSeek.current = seek({\n mediaRef: mediaRef.current,\n time: shouldBeTime,\n logLevel,\n why: `because time shift is too big. shouldBeTime = ${shouldBeTime}, isTime = ${mediaTagTime}, requestVideoCallbackTime = ${rvcTime}, timeShift = ${timeShift}${isVariableFpsVideo ? \", isVariableFpsVideo = true\" : \"\"}, isPremounting = ${isPremounting}, isPostmounting = ${isPostmounting}, pauseWhenBuffering = ${pauseWhenBuffering}`,\n mountTime\n });\n lastSeekDueToShift.current = lastSeek.current;\n if (playing) {\n if (playbackRate > 0) {\n bufferUntilFirstFrame(shouldBeTime);\n }\n if (mediaRef.current.paused) {\n playAndHandleNotAllowedError({\n mediaRef,\n mediaType,\n onAutoPlayError,\n logLevel,\n mountTime,\n reason: \"player is playing but media tag is paused, and just seeked\",\n isPlayer: env.isPlayer\n });\n }\n }\n if (!onlyWarnForMediaSeekingError) {\n warnAboutNonSeekableMedia(mediaRef.current, onlyWarnForMediaSeekingError ? \"console-warning\" : \"console-error\");\n }\n return;\n }\n const seekThreshold = playing ? 0.15 : 0.01;\n const makesSenseToSeek = Math.abs(mediaRef.current.currentTime - shouldBeTime) > seekThreshold;\n const isMediaTagBufferingOrStalled = isMediaTagBuffering || isBuffering();\n const isSomethingElseBuffering = buffering.buffering.current && !isMediaTagBufferingOrStalled;\n if (!playing || isSomethingElseBuffering) {\n if (makesSenseToSeek) {\n lastSeek.current = seek({\n mediaRef: mediaRef.current,\n time: shouldBeTime,\n logLevel,\n why: `not playing or something else is buffering. time offset is over seek threshold (${seekThreshold})`,\n mountTime\n });\n }\n return;\n }\n if (!playing || buffering.buffering.current) {\n return;\n }\n const pausedCondition = mediaRef.current.paused && !mediaRef.current.ended;\n const firstFrameCondition = absoluteFrame === 0;\n if (pausedCondition || firstFrameCondition) {\n const reason = pausedCondition ? \"media tag is paused\" : \"absolute frame is 0\";\n if (makesSenseToSeek) {\n lastSeek.current = seek({\n mediaRef: mediaRef.current,\n time: shouldBeTime,\n logLevel,\n why: `is over timeshift threshold (threshold = ${seekThreshold}) and ${reason}`,\n mountTime\n });\n }\n playAndHandleNotAllowedError({\n mediaRef,\n mediaType,\n onAutoPlayError,\n logLevel,\n mountTime,\n reason: `player is playing and ${reason}`,\n isPlayer: env.isPlayer\n });\n if (!isVariableFpsVideo && playbackRate > 0) {\n bufferUntilFirstFrame(shouldBeTime);\n }\n }\n }, [\n absoluteFrame,\n acceptableTimeShiftButLessThanDuration,\n bufferUntilFirstFrame,\n buffering.buffering,\n rvcCurrentTime,\n logLevel,\n desiredUnclampedTime,\n isBuffering,\n isMediaTagBuffering,\n mediaRef,\n mediaType,\n onlyWarnForMediaSeekingError,\n playbackRate,\n playing,\n src,\n onAutoPlayError,\n isPremounting,\n isPostmounting,\n pauseWhenBuffering,\n mountTime,\n mediaTagCurrentTime,\n env.isPlayer\n ]);\n};\n\n// src/use-media-tag.ts\nimport { useEffect as useEffect9 } from \"react\";\nvar useMediaTag = ({\n mediaRef,\n id,\n mediaType,\n onAutoPlayError,\n isPremounting,\n isPostmounting\n}) => {\n const { audioAndVideoTags, imperativePlaying } = useTimelineContext();\n const logLevel = useLogLevel();\n const mountTime = useMountTime();\n const env = useRemotionEnvironment();\n useEffect9(() => {\n const tag = {\n id,\n play: (reason) => {\n if (!imperativePlaying.current) {\n return;\n }\n if (isPremounting || isPostmounting) {\n return;\n }\n return playAndHandleNotAllowedError({\n mediaRef,\n mediaType,\n onAutoPlayError,\n logLevel,\n mountTime,\n reason,\n isPlayer: env.isPlayer\n });\n }\n };\n audioAndVideoTags.current.push(tag);\n return () => {\n audioAndVideoTags.current = audioAndVideoTags.current.filter((a) => a.id !== id);\n };\n }, [\n audioAndVideoTags,\n id,\n mediaRef,\n mediaType,\n onAutoPlayError,\n imperativePlaying,\n isPremounting,\n isPostmounting,\n logLevel,\n mountTime,\n env.isPlayer\n ]);\n};\n\n// src/volume-position-state.ts\nimport { createContext as createContext18, useContext as useContext23, useMemo as useMemo21 } from \"react\";\nvar MediaVolumeContext = createContext18({\n mediaMuted: false,\n mediaVolume: 1\n});\nvar SetMediaVolumeContext = createContext18({\n setMediaMuted: () => {\n throw new Error(\"default\");\n },\n setMediaVolume: () => {\n throw new Error(\"default\");\n }\n});\nvar useMediaVolumeState = () => {\n const { mediaVolume } = useContext23(MediaVolumeContext);\n const { setMediaVolume } = useContext23(SetMediaVolumeContext);\n return useMemo21(() => {\n return [mediaVolume, setMediaVolume];\n }, [mediaVolume, setMediaVolume]);\n};\nvar useMediaMutedState = () => {\n const { mediaMuted } = useContext23(MediaVolumeContext);\n const { setMediaMuted } = useContext23(SetMediaVolumeContext);\n return useMemo21(() => {\n return [mediaMuted, setMediaMuted];\n }, [mediaMuted, setMediaMuted]);\n};\n\n// src/volume-safeguard.ts\nvar warnAboutTooHighVolume = (volume) => {\n if (volume >= 100) {\n throw new Error(`Volume was set to ${volume}, but regular volume is 1, not 100. Did you forget to divide by 100? Set a volume of less than 100 to dismiss this error.`);\n }\n};\n\n// src/audio/AudioForPreview.tsx\nimport { jsx as jsx17 } from \"react/jsx-runtime\";\nvar AudioForDevelopmentForwardRefFunction = (props, ref) => {\n const [initialShouldPreMountAudioElements] = useState13(props.shouldPreMountAudioTags);\n if (props.shouldPreMountAudioTags !== initialShouldPreMountAudioElements) {\n throw new Error(\"Cannot change the behavior for pre-mounting audio tags dynamically.\");\n }\n const logLevel = useLogLevel();\n const {\n volume,\n muted,\n playbackRate,\n shouldPreMountAudioTags,\n src,\n onDuration,\n acceptableTimeShiftInSeconds,\n _remotionInternalNeedsDurationCalculation,\n _remotionInternalNativeLoopPassed,\n _remotionInternalStack,\n allowAmplificationDuringRender,\n name,\n pauseWhenBuffering,\n showInTimeline,\n loopVolumeCurveBehavior,\n stack,\n crossOrigin,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n toneFrequency,\n useWebAudioApi,\n onError,\n onNativeError,\n audioStreamIndex,\n ...nativeProps\n } = props;\n const _propsValid = true;\n if (!_propsValid) {\n throw new Error(\"typecheck error\");\n }\n const [mediaVolume] = useMediaVolumeState();\n const [mediaMuted] = useMediaMutedState();\n const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? \"repeat\");\n const { hidden } = useContext24(SequenceVisibilityToggleContext);\n if (!src) {\n throw new TypeError(\"No 'src' was passed to <Html5Audio>.\");\n }\n const preloadedSrc = usePreload(src);\n const sequenceContext = useContext24(SequenceContext);\n const [timelineId] = useState13(() => String(Math.random()));\n const isSequenceHidden = hidden[timelineId] ?? false;\n const userPreferredVolume = evaluateVolume({\n frame: volumePropFrame,\n volume,\n mediaVolume\n });\n warnAboutTooHighVolume(userPreferredVolume);\n const crossOriginValue = getCrossOriginValue({\n crossOrigin,\n requestsVideoFrame: false,\n isClientSideRendering: false\n });\n const propsToPass = useMemo22(() => {\n return {\n muted: muted || mediaMuted || isSequenceHidden || userPreferredVolume <= 0,\n src: preloadedSrc,\n loop: _remotionInternalNativeLoopPassed,\n crossOrigin: crossOriginValue,\n ...nativeProps\n };\n }, [\n _remotionInternalNativeLoopPassed,\n isSequenceHidden,\n mediaMuted,\n muted,\n nativeProps,\n preloadedSrc,\n userPreferredVolume,\n crossOriginValue\n ]);\n const id = useMemo22(() => `audio-${random(src ?? \"\")}-${sequenceContext?.relativeFrom}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.durationInFrames}-muted:${props.muted}-loop:${props.loop}`, [\n src,\n sequenceContext?.relativeFrom,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.durationInFrames,\n props.muted,\n props.loop\n ]);\n const {\n el: audioRef,\n mediaElementSourceNode,\n cleanupOnMediaTagUnmount\n } = useSharedAudio({\n aud: propsToPass,\n audioId: id,\n premounting: Boolean(sequenceContext?.premounting),\n postmounting: Boolean(sequenceContext?.postmounting)\n });\n useMediaInTimeline({\n volume,\n mediaVolume,\n src,\n mediaType: \"audio\",\n playbackRate: playbackRate ?? 1,\n displayName: name ?? null,\n id: timelineId,\n stack: _remotionInternalStack,\n showInTimeline,\n premountDisplay: sequenceContext?.premountDisplay ?? null,\n postmountDisplay: sequenceContext?.postmountDisplay ?? null,\n loopDisplay: undefined\n });\n useMediaPlayback({\n mediaRef: audioRef,\n src,\n mediaType: \"audio\",\n playbackRate: playbackRate ?? 1,\n onlyWarnForMediaSeekingError: false,\n acceptableTimeshift: acceptableTimeShiftInSeconds ?? null,\n isPremounting: Boolean(sequenceContext?.premounting),\n isPostmounting: Boolean(sequenceContext?.postmounting),\n pauseWhenBuffering,\n onAutoPlayError: null\n });\n useMediaTag({\n id: timelineId,\n isPostmounting: Boolean(sequenceContext?.postmounting),\n isPremounting: Boolean(sequenceContext?.premounting),\n mediaRef: audioRef,\n mediaType: \"audio\",\n onAutoPlayError: null\n });\n useVolume({\n logLevel,\n mediaRef: audioRef,\n source: mediaElementSourceNode,\n volume: userPreferredVolume,\n shouldUseWebAudioApi: useWebAudioApi ?? false\n });\n const effectToUse = React17.useInsertionEffect ?? React17.useLayoutEffect;\n effectToUse(() => {\n return () => {\n requestAnimationFrame(() => {\n cleanupOnMediaTagUnmount();\n });\n };\n }, [cleanupOnMediaTagUnmount]);\n useImperativeHandle5(ref, () => {\n return audioRef.current;\n }, [audioRef]);\n const currentOnDurationCallback = useRef14(onDuration);\n currentOnDurationCallback.current = onDuration;\n useEffect10(() => {\n const { current } = audioRef;\n if (!current) {\n return;\n }\n if (current.duration) {\n currentOnDurationCallback.current?.(current.src, current.duration);\n return;\n }\n const onLoadedMetadata = () => {\n currentOnDurationCallback.current?.(current.src, current.duration);\n };\n current.addEventListener(\"loadedmetadata\", onLoadedMetadata);\n return () => {\n current.removeEventListener(\"loadedmetadata\", onLoadedMetadata);\n };\n }, [audioRef, src]);\n if (initialShouldPreMountAudioElements) {\n return null;\n }\n return /* @__PURE__ */ jsx17(\"audio\", {\n ref: audioRef,\n preload: \"metadata\",\n crossOrigin: crossOriginValue,\n ...propsToPass\n });\n};\nvar AudioForPreview = forwardRef4(AudioForDevelopmentForwardRefFunction);\n\n// src/audio/AudioForRendering.tsx\nimport {\n forwardRef as forwardRef5,\n useContext as useContext25,\n useEffect as useEffect11,\n useImperativeHandle as useImperativeHandle6,\n useLayoutEffect as useLayoutEffect8,\n useMemo as useMemo23,\n useRef as useRef15\n} from \"react\";\nimport { jsx as jsx18 } from \"react/jsx-runtime\";\nvar AudioForRenderingRefForwardingFunction = (props, ref) => {\n const audioRef = useRef15(null);\n const {\n volume: volumeProp,\n playbackRate,\n allowAmplificationDuringRender,\n onDuration,\n toneFrequency,\n _remotionInternalNeedsDurationCalculation,\n _remotionInternalNativeLoopPassed,\n acceptableTimeShiftInSeconds,\n name,\n onNativeError,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n loopVolumeCurveBehavior,\n pauseWhenBuffering,\n audioStreamIndex,\n ...nativeProps\n } = props;\n const absoluteFrame = useTimelinePosition();\n const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? \"repeat\");\n const frame = useCurrentFrame();\n const sequenceContext = useContext25(SequenceContext);\n const { registerRenderAsset, unregisterRenderAsset } = useContext25(RenderAssetManager);\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n const id = useMemo23(() => `audio-${random(props.src ?? \"\")}-${sequenceContext?.relativeFrom}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.durationInFrames}`, [\n props.src,\n sequenceContext?.relativeFrom,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.durationInFrames\n ]);\n const volume = evaluateVolume({\n volume: volumeProp,\n frame: volumePropFrame,\n mediaVolume: 1\n });\n warnAboutTooHighVolume(volume);\n useImperativeHandle6(ref, () => {\n return audioRef.current;\n }, []);\n useEffect11(() => {\n if (!props.src) {\n throw new Error(\"No src passed\");\n }\n if (!window.remotion_audioEnabled) {\n return;\n }\n if (props.muted) {\n return;\n }\n if (volume <= 0) {\n return;\n }\n registerRenderAsset({\n type: \"audio\",\n src: getAbsoluteSrc(props.src),\n id,\n frame: absoluteFrame,\n volume,\n mediaFrame: frame,\n playbackRate: props.playbackRate ?? 1,\n toneFrequency: toneFrequency ?? 1,\n audioStartFrame: Math.max(0, -(sequenceContext?.relativeFrom ?? 0)),\n audioStreamIndex: audioStreamIndex ?? 0\n });\n return () => unregisterRenderAsset(id);\n }, [\n props.muted,\n props.src,\n registerRenderAsset,\n absoluteFrame,\n id,\n unregisterRenderAsset,\n volume,\n volumePropFrame,\n frame,\n playbackRate,\n props.playbackRate,\n toneFrequency,\n sequenceContext?.relativeFrom,\n audioStreamIndex\n ]);\n const { src } = props;\n const needsToRenderAudioTag = ref || _remotionInternalNeedsDurationCalculation;\n useLayoutEffect8(() => {\n if (window.process?.env?.NODE_ENV === \"test\") {\n return;\n }\n if (!needsToRenderAudioTag) {\n return;\n }\n const newHandle = delayRender2(\"Loading <Html5Audio> duration with src=\" + src, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n const { current } = audioRef;\n const didLoad = () => {\n if (current?.duration) {\n onDuration(current.src, current.duration);\n }\n continueRender2(newHandle);\n };\n if (current?.duration) {\n onDuration(current.src, current.duration);\n continueRender2(newHandle);\n } else {\n current?.addEventListener(\"loadedmetadata\", didLoad, { once: true });\n }\n return () => {\n current?.removeEventListener(\"loadedmetadata\", didLoad);\n continueRender2(newHandle);\n };\n }, [\n src,\n onDuration,\n needsToRenderAudioTag,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n continueRender2,\n delayRender2\n ]);\n if (!needsToRenderAudioTag) {\n return null;\n }\n return /* @__PURE__ */ jsx18(\"audio\", {\n ref: audioRef,\n ...nativeProps,\n onError: onNativeError\n });\n};\nvar AudioForRendering = forwardRef5(AudioForRenderingRefForwardingFunction);\n\n// src/audio/Audio.tsx\nimport { jsx as jsx19 } from \"react/jsx-runtime\";\nvar AudioRefForwardingFunction = (props, ref) => {\n const audioContext = useContext26(SharedAudioContext);\n const {\n startFrom,\n endAt,\n trimBefore,\n trimAfter,\n name,\n stack,\n pauseWhenBuffering,\n showInTimeline,\n onError: onRemotionError,\n ...otherProps\n } = props;\n const { loop, ...propsOtherThanLoop } = props;\n const { fps } = useVideoConfig();\n const environment = useRemotionEnvironment();\n if (environment.isClientSideRendering) {\n throw new Error(\"<Html5Audio> is not supported in @remotion/web-renderer. Use <Audio> from @remotion/media instead. See https://remotion.dev/docs/client-side-rendering/limitations\");\n }\n const { durations, setDurations } = useContext26(DurationsContext);\n if (typeof props.src !== \"string\") {\n throw new TypeError(`The \\`<Html5Audio>\\` tag requires a string for \\`src\\`, but got ${JSON.stringify(props.src)} instead.`);\n }\n const preloadedSrc = usePreload(props.src);\n const onError = useCallback11((e) => {\n console.log(e.currentTarget.error);\n const errMessage = `Could not play audio with src ${preloadedSrc}: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;\n if (loop) {\n if (onRemotionError) {\n onRemotionError(new Error(errMessage));\n return;\n }\n cancelRender(new Error(errMessage));\n } else {\n onRemotionError?.(new Error(errMessage));\n console.warn(errMessage);\n }\n }, [loop, onRemotionError, preloadedSrc]);\n const onDuration = useCallback11((src, durationInSeconds) => {\n setDurations({ type: \"got-duration\", durationInSeconds, src });\n }, [setDurations]);\n const durationFetched = durations[getAbsoluteSrc(preloadedSrc)] ?? durations[getAbsoluteSrc(props.src)];\n validateMediaTrimProps({ startFrom, endAt, trimBefore, trimAfter });\n const { trimBeforeValue, trimAfterValue } = resolveTrimProps({\n startFrom,\n endAt,\n trimBefore,\n trimAfter\n });\n if (loop && durationFetched !== undefined) {\n if (!Number.isFinite(durationFetched)) {\n return /* @__PURE__ */ jsx19(Html5Audio, {\n ...propsOtherThanLoop,\n ref,\n _remotionInternalNativeLoopPassed: true\n });\n }\n const duration = durationFetched * fps;\n return /* @__PURE__ */ jsx19(Loop, {\n layout: \"none\",\n durationInFrames: calculateMediaDuration({\n trimAfter: trimAfterValue,\n mediaDurationInFrames: duration,\n playbackRate: props.playbackRate ?? 1,\n trimBefore: trimBeforeValue\n }),\n children: /* @__PURE__ */ jsx19(Html5Audio, {\n ...propsOtherThanLoop,\n ref,\n _remotionInternalNativeLoopPassed: true\n })\n });\n }\n if (typeof trimBeforeValue !== \"undefined\" || typeof trimAfterValue !== \"undefined\") {\n return /* @__PURE__ */ jsx19(Sequence, {\n layout: \"none\",\n from: 0 - (trimBeforeValue ?? 0),\n showInTimeline: false,\n durationInFrames: trimAfterValue,\n name,\n children: /* @__PURE__ */ jsx19(Html5Audio, {\n _remotionInternalNeedsDurationCalculation: Boolean(loop),\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n ...otherProps,\n ref\n })\n });\n }\n validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, \"Html5Audio\");\n if (environment.isRendering) {\n return /* @__PURE__ */ jsx19(AudioForRendering, {\n onDuration,\n ...props,\n ref,\n onNativeError: onError,\n _remotionInternalNeedsDurationCalculation: Boolean(loop)\n });\n }\n return /* @__PURE__ */ jsx19(AudioForPreview, {\n _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false,\n _remotionInternalStack: stack ?? null,\n shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0,\n ...props,\n ref,\n onNativeError: onError,\n onDuration,\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n _remotionInternalNeedsDurationCalculation: Boolean(loop),\n showInTimeline: showInTimeline ?? true\n });\n};\nvar Html5Audio = forwardRef6(AudioRefForwardingFunction);\naddSequenceStackTraces(Html5Audio);\nvar Audio = Html5Audio;\n// src/Composition.tsx\nimport { Suspense, useContext as useContext28, useEffect as useEffect13 } from \"react\";\nimport { createPortal } from \"react-dom\";\n\n// src/Folder.tsx\nimport { createContext as createContext19, useContext as useContext27, useEffect as useEffect12, useMemo as useMemo24 } from \"react\";\n\n// src/validation/validate-folder-name.ts\nvar getRegex = () => /^([a-zA-Z0-9-\\u4E00-\\u9FFF])+$/g;\nvar isFolderNameValid = (name) => name.match(getRegex());\nvar validateFolderName = (name) => {\n if (name === undefined || name === null) {\n throw new TypeError(\"You must pass a name to a <Folder />.\");\n }\n if (typeof name !== \"string\") {\n throw new TypeError(`The \"name\" you pass into <Folder /> must be a string. Got: ${typeof name}`);\n }\n if (!isFolderNameValid(name)) {\n throw new Error(`Folder name can only contain a-z, A-Z, 0-9 and -. You passed ${name}`);\n }\n};\nvar invalidFolderNameErrorMessage = `Folder name must match ${String(getRegex())}`;\n\n// src/Folder.tsx\nimport { jsx as jsx20 } from \"react/jsx-runtime\";\nvar FolderContext = createContext19({\n folderName: null,\n parentName: null\n});\nvar Folder = ({ name, children }) => {\n const parent = useContext27(FolderContext);\n const { registerFolder, unregisterFolder } = useContext27(CompositionSetters);\n const nonce = useNonce();\n validateFolderName(name);\n const parentNameArr = [parent.parentName, parent.folderName].filter(truthy);\n const parentName = parentNameArr.length === 0 ? null : parentNameArr.join(\"/\");\n const value = useMemo24(() => {\n return {\n folderName: name,\n parentName\n };\n }, [name, parentName]);\n useEffect12(() => {\n registerFolder(name, parentName, nonce.get());\n return () => {\n unregisterFolder(name, parentName);\n };\n }, [\n name,\n parent.folderName,\n parentName,\n registerFolder,\n unregisterFolder,\n nonce\n ]);\n return /* @__PURE__ */ jsx20(FolderContext.Provider, {\n value,\n children\n });\n};\n\n// src/loading-indicator.tsx\nimport { jsx as jsx21, jsxs as jsxs2 } from \"react/jsx-runtime\";\nvar rotate = {\n transform: `rotate(90deg)`\n};\nvar ICON_SIZE = 40;\nvar label = {\n color: \"white\",\n fontSize: 14,\n fontFamily: \"sans-serif\"\n};\nvar container = {\n justifyContent: \"center\",\n alignItems: \"center\"\n};\nvar Loading = () => {\n return /* @__PURE__ */ jsxs2(AbsoluteFill, {\n style: container,\n id: \"remotion-comp-loading\",\n children: [\n /* @__PURE__ */ jsx21(\"style\", {\n type: \"text/css\",\n children: `\n\t\t\t\t@keyframes anim {\n\t\t\t\t\tfrom {\n\t\t\t\t\t\topacity: 0\n\t\t\t\t\t}\n\t\t\t\t\tto {\n\t\t\t\t\t\topacity: 1\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t#remotion-comp-loading {\n\t\t\t\t\tanimation: anim 2s;\n\t\t\t\t\tanimation-fill-mode: forwards;\n\t\t\t\t}\n\t\t\t`\n }),\n /* @__PURE__ */ jsx21(\"svg\", {\n width: ICON_SIZE,\n height: ICON_SIZE,\n viewBox: \"-100 -100 400 400\",\n style: rotate,\n children: /* @__PURE__ */ jsx21(\"path\", {\n fill: \"#555\",\n stroke: \"#555\",\n strokeWidth: \"100\",\n strokeLinejoin: \"round\",\n d: \"M 2 172 a 196 100 0 0 0 195 5 A 196 240 0 0 0 100 2.259 A 196 240 0 0 0 2 172 z\"\n })\n }),\n /* @__PURE__ */ jsxs2(\"p\", {\n style: label,\n children: [\n \"Resolving \",\n \"<Suspense>\",\n \"...\"\n ]\n })\n ]\n });\n};\n\n// src/portal-node.ts\nvar _portalNode = null;\nvar portalNode = () => {\n if (!_portalNode) {\n if (typeof document === \"undefined\") {\n throw new Error(\"Tried to call an API that only works in the browser from outside the browser\");\n }\n _portalNode = document.createElement(\"div\");\n _portalNode.style.position = \"absolute\";\n _portalNode.style.top = \"0px\";\n _portalNode.style.left = \"0px\";\n _portalNode.style.right = \"0px\";\n _portalNode.style.bottom = \"0px\";\n _portalNode.style.width = \"100%\";\n _portalNode.style.height = \"100%\";\n _portalNode.style.display = \"flex\";\n _portalNode.style.flexDirection = \"column\";\n const containerNode = document.createElement(\"div\");\n containerNode.style.position = \"fixed\";\n containerNode.style.top = -999999 + \"px\";\n containerNode.appendChild(_portalNode);\n document.body.appendChild(containerNode);\n }\n return _portalNode;\n};\n\n// src/use-lazy-component.ts\nimport React20, { useMemo as useMemo25, useRef as useRef16 } from \"react\";\nvar useLazyComponent = ({\n compProps,\n componentName,\n noSuspense\n}) => {\n const componentRef = useRef16(null);\n if (\"component\" in compProps) {\n componentRef.current = compProps.component;\n }\n const lazy = useMemo25(() => {\n if (\"component\" in compProps) {\n if (typeof document === \"undefined\" || noSuspense) {\n return compProps.component;\n }\n if (typeof compProps.component === \"undefined\") {\n throw new Error(`A value of \\`undefined\\` was passed to the \\`component\\` prop. Check the value you are passing to the <${componentName}/> component.`);\n }\n const Wrapper = (props2) => {\n const Comp = componentRef.current;\n return React20.createElement(Comp, props2);\n };\n return Wrapper;\n }\n if (\"lazyComponent\" in compProps && typeof compProps.lazyComponent !== \"undefined\") {\n if (typeof compProps.lazyComponent === \"undefined\") {\n throw new Error(`A value of \\`undefined\\` was passed to the \\`lazyComponent\\` prop. Check the value you are passing to the <${componentName}/> component.`);\n }\n return React20.lazy(compProps.lazyComponent);\n }\n throw new Error(\"You must pass either 'component' or 'lazyComponent'\");\n }, [compProps.lazyComponent]);\n return lazy;\n};\n\n// src/validation/validate-composition-id.ts\nvar getRegex2 = () => /^([a-zA-Z0-9-\\u4E00-\\u9FFF])+$/g;\nvar isCompositionIdValid = (id) => id.match(getRegex2());\nvar validateCompositionId = (id) => {\n if (!isCompositionIdValid(id)) {\n throw new Error(`Composition id can only contain a-z, A-Z, 0-9, CJK characters and -. You passed ${id}`);\n }\n};\nvar invalidCompositionErrorMessage = `Composition ID must match ${String(getRegex2())}`;\n\n// src/validation/validate-default-props.ts\nvar validateDefaultAndInputProps = (defaultProps, name, compositionId) => {\n if (!defaultProps) {\n return;\n }\n if (typeof defaultProps !== \"object\") {\n throw new Error(`\"${name}\" must be an object, but you passed a value of type ${typeof defaultProps}`);\n }\n if (Array.isArray(defaultProps)) {\n throw new Error(`\"${name}\" must be an object, an array was passed ${compositionId ? `for composition \"${compositionId}\"` : \"\"}`);\n }\n};\n\n// src/Composition.tsx\nimport { jsx as jsx22 } from \"react/jsx-runtime\";\nvar Fallback = () => {\n const { continueRender: continueRender2, delayRender: delayRender2 } = useDelayRender();\n useEffect13(() => {\n const fallback = delayRender2(\"Waiting for Root component to unsuspend\");\n return () => continueRender2(fallback);\n }, [continueRender2, delayRender2]);\n return null;\n};\nvar InnerComposition = ({\n width,\n height,\n fps,\n durationInFrames,\n id,\n defaultProps,\n schema,\n ...compProps\n}) => {\n const compManager = useContext28(CompositionSetters);\n const { registerComposition, unregisterComposition } = compManager;\n const video = useVideo();\n const lazy = useLazyComponent({\n compProps,\n componentName: \"Composition\",\n noSuspense: false\n });\n const nonce = useNonce();\n const isPlayer = useIsPlayer();\n const environment = useRemotionEnvironment();\n const canUseComposition = useContext28(CanUseRemotionHooks);\n if (typeof window !== \"undefined\") {\n window.remotion_seenCompositionIds = Array.from(new Set([...window.remotion_seenCompositionIds ?? [], id]));\n }\n if (canUseComposition) {\n if (isPlayer) {\n throw new Error(\"<Composition> was mounted inside the `component` that was passed to the <Player>. See https://remotion.dev/docs/wrong-composition-mount for help.\");\n }\n throw new Error(\"<Composition> mounted inside another composition. See https://remotion.dev/docs/wrong-composition-mount for help.\");\n }\n const { folderName, parentName } = useContext28(FolderContext);\n useEffect13(() => {\n if (!id) {\n throw new Error(\"No id for composition passed.\");\n }\n validateCompositionId(id);\n validateDefaultAndInputProps(defaultProps, \"defaultProps\", id);\n registerComposition({\n durationInFrames: durationInFrames ?? undefined,\n fps: fps ?? undefined,\n height: height ?? undefined,\n width: width ?? undefined,\n id,\n folderName,\n component: lazy,\n defaultProps: serializeThenDeserializeInStudio(defaultProps ?? {}),\n nonce: nonce.get(),\n parentFolderName: parentName,\n schema: schema ?? null,\n calculateMetadata: compProps.calculateMetadata ?? null\n });\n return () => {\n unregisterComposition(id);\n };\n }, [\n durationInFrames,\n fps,\n height,\n lazy,\n id,\n folderName,\n defaultProps,\n width,\n nonce,\n parentName,\n schema,\n compProps.calculateMetadata,\n registerComposition,\n unregisterComposition\n ]);\n useEffect13(() => {\n window.dispatchEvent(new CustomEvent(PROPS_UPDATED_EXTERNALLY, {\n detail: {\n resetUnsaved: id\n }\n }));\n }, [defaultProps, id]);\n const resolved = useResolvedVideoConfig(id);\n if (environment.isStudio && video && video.component === lazy && video.id === id) {\n const Comp = lazy;\n if (resolved === null || resolved.type !== \"success\" && resolved.type !== \"success-and-refreshing\") {\n return null;\n }\n return createPortal(/* @__PURE__ */ jsx22(CanUseRemotionHooksProvider, {\n children: /* @__PURE__ */ jsx22(Suspense, {\n fallback: /* @__PURE__ */ jsx22(Loading, {}),\n children: /* @__PURE__ */ jsx22(Comp, {\n ...resolved.result.props ?? {}\n })\n })\n }), portalNode());\n }\n if (environment.isRendering && video && video.component === lazy && video.id === id) {\n const Comp = lazy;\n if (resolved === null || resolved.type !== \"success\" && resolved.type !== \"success-and-refreshing\") {\n return null;\n }\n return createPortal(/* @__PURE__ */ jsx22(CanUseRemotionHooksProvider, {\n children: /* @__PURE__ */ jsx22(Suspense, {\n fallback: /* @__PURE__ */ jsx22(Fallback, {}),\n children: /* @__PURE__ */ jsx22(Comp, {\n ...resolved.result.props ?? {}\n })\n })\n }), portalNode());\n }\n return null;\n};\nvar Composition = (props2) => {\n const { onlyRenderComposition } = useContext28(CompositionSetters);\n if (onlyRenderComposition && onlyRenderComposition !== props2.id) {\n return null;\n }\n return /* @__PURE__ */ jsx22(InnerComposition, {\n ...props2\n });\n};\n// src/bezier.ts\nvar NEWTON_ITERATIONS = 4;\nvar NEWTON_MIN_SLOPE = 0.001;\nvar SUBDIVISION_PRECISION = 0.0000001;\nvar SUBDIVISION_MAX_ITERATIONS = 10;\nvar kSplineTableSize = 11;\nvar kSampleStepSize = 1 / (kSplineTableSize - 1);\nvar float32ArraySupported = typeof Float32Array === \"function\";\nfunction a(aA1, aA2) {\n return 1 - 3 * aA2 + 3 * aA1;\n}\nfunction b(aA1, aA2) {\n return 3 * aA2 - 6 * aA1;\n}\nfunction c(aA1) {\n return 3 * aA1;\n}\nfunction calcBezier(aT, aA1, aA2) {\n return ((a(aA1, aA2) * aT + b(aA1, aA2)) * aT + c(aA1)) * aT;\n}\nfunction getSlope(aT, aA1, aA2) {\n return 3 * a(aA1, aA2) * aT * aT + 2 * b(aA1, aA2) * aT + c(aA1);\n}\nfunction binarySubdivide({\n aX,\n _aA,\n _aB,\n mX1,\n mX2\n}) {\n let currentX;\n let currentT;\n let i = 0;\n let aA = _aA;\n let aB = _aB;\n do {\n currentT = aA + (aB - aA) / 2;\n currentX = calcBezier(currentT, mX1, mX2) - aX;\n if (currentX > 0) {\n aB = currentT;\n } else {\n aA = currentT;\n }\n } while (Math.abs(currentX) > SUBDIVISION_PRECISION && ++i < SUBDIVISION_MAX_ITERATIONS);\n return currentT;\n}\nfunction newtonRaphsonIterate(aX, _aGuessT, mX1, mX2) {\n let aGuessT = _aGuessT;\n for (let i = 0;i < NEWTON_ITERATIONS; ++i) {\n const currentSlope = getSlope(aGuessT, mX1, mX2);\n if (currentSlope === 0) {\n return aGuessT;\n }\n const currentX = calcBezier(aGuessT, mX1, mX2) - aX;\n aGuessT -= currentX / currentSlope;\n }\n return aGuessT;\n}\nfunction bezier(mX1, mY1, mX2, mY2) {\n if (!(mX1 >= 0 && mX1 <= 1 && mX2 >= 0 && mX2 <= 1)) {\n throw new Error(\"bezier x values must be in [0, 1] range\");\n }\n const sampleValues = float32ArraySupported ? new Float32Array(kSplineTableSize) : new Array(kSplineTableSize);\n if (mX1 !== mY1 || mX2 !== mY2) {\n for (let i = 0;i < kSplineTableSize; ++i) {\n sampleValues[i] = calcBezier(i * kSampleStepSize, mX1, mX2);\n }\n }\n function getTForX(aX) {\n let intervalStart = 0;\n let currentSample = 1;\n const lastSample = kSplineTableSize - 1;\n for (;currentSample !== lastSample && sampleValues[currentSample] <= aX; ++currentSample) {\n intervalStart += kSampleStepSize;\n }\n --currentSample;\n const dist = (aX - sampleValues[currentSample]) / (sampleValues[currentSample + 1] - sampleValues[currentSample]);\n const guessForT = intervalStart + dist * kSampleStepSize;\n const initialSlope = getSlope(guessForT, mX1, mX2);\n if (initialSlope >= NEWTON_MIN_SLOPE) {\n return newtonRaphsonIterate(aX, guessForT, mX1, mX2);\n }\n if (initialSlope === 0) {\n return guessForT;\n }\n return binarySubdivide({\n aX,\n _aA: intervalStart,\n _aB: intervalStart + kSampleStepSize,\n mX1,\n mX2\n });\n }\n return function(x) {\n if (mX1 === mY1 && mX2 === mY2) {\n return x;\n }\n if (x === 0) {\n return 0;\n }\n if (x === 1) {\n return 1;\n }\n return calcBezier(getTForX(x), mY1, mY2);\n };\n}\n\n// src/easing.ts\nclass Easing {\n static step0(n) {\n return n > 0 ? 1 : 0;\n }\n static step1(n) {\n return n >= 1 ? 1 : 0;\n }\n static linear(t) {\n return t;\n }\n static ease(t) {\n return Easing.bezier(0.42, 0, 1, 1)(t);\n }\n static quad(t) {\n return t * t;\n }\n static cubic(t) {\n return t * t * t;\n }\n static poly(n) {\n return (t) => t ** n;\n }\n static sin(t) {\n return 1 - Math.cos(t * Math.PI / 2);\n }\n static circle(t) {\n return 1 - Math.sqrt(1 - t * t);\n }\n static exp(t) {\n return 2 ** (10 * (t - 1));\n }\n static elastic(bounciness = 1) {\n const p = bounciness * Math.PI;\n return (t) => 1 - Math.cos(t * Math.PI / 2) ** 3 * Math.cos(t * p);\n }\n static back(s = 1.70158) {\n return (t) => t * t * ((s + 1) * t - s);\n }\n static bounce(t) {\n if (t < 1 / 2.75) {\n return 7.5625 * t * t;\n }\n if (t < 2 / 2.75) {\n const t2_ = t - 1.5 / 2.75;\n return 7.5625 * t2_ * t2_ + 0.75;\n }\n if (t < 2.5 / 2.75) {\n const t2_ = t - 2.25 / 2.75;\n return 7.5625 * t2_ * t2_ + 0.9375;\n }\n const t2 = t - 2.625 / 2.75;\n return 7.5625 * t2 * t2 + 0.984375;\n }\n static bezier(x1, y1, x2, y2) {\n return bezier(x1, y1, x2, y2);\n }\n static in(easing) {\n return easing;\n }\n static out(easing) {\n return (t) => 1 - easing(1 - t);\n }\n static inOut(easing) {\n return (t) => {\n if (t < 0.5) {\n return easing(t * 2) / 2;\n }\n return 1 - easing((1 - t) * 2) / 2;\n };\n }\n}\n// src/get-static-files.ts\nvar warnedServer = false;\nvar warnedPlayer = false;\nvar warnServerOnce = () => {\n if (warnedServer) {\n return;\n }\n warnedServer = true;\n console.warn(\"Called getStaticFiles() on the server. The API is only available in the browser. An empty array was returned.\");\n};\nvar warnPlayerOnce = () => {\n if (warnedPlayer) {\n return;\n }\n warnedPlayer = true;\n console.warn(\"Called getStaticFiles() while using the Remotion Player. The API is only available while using the Remotion Studio. An empty array was returned.\");\n};\nvar getStaticFiles = () => {\n if (ENABLE_V5_BREAKING_CHANGES) {\n throw new Error(\"getStaticFiles() has moved into the `@remotion/studio` package. Update your imports.\");\n }\n if (typeof document === \"undefined\") {\n warnServerOnce();\n return [];\n }\n if (window.remotion_isPlayer) {\n warnPlayerOnce();\n return [];\n }\n return window.remotion_staticFiles;\n};\n// src/IFrame.tsx\nimport { forwardRef as forwardRef7, useCallback as useCallback12, useState as useState14 } from \"react\";\nimport { jsx as jsx23 } from \"react/jsx-runtime\";\nvar IFrameRefForwarding = ({\n onLoad,\n onError,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n ...props2\n}, ref) => {\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n const [handle] = useState14(() => delayRender2(`Loading <IFrame> with source ${props2.src}`, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n }));\n const didLoad = useCallback12((e) => {\n continueRender2(handle);\n onLoad?.(e);\n }, [handle, onLoad, continueRender2]);\n const didGetError = useCallback12((e) => {\n continueRender2(handle);\n if (onError) {\n onError(e);\n } else {\n console.error(\"Error loading iframe:\", e, \"Handle the event using the onError() prop to make this message disappear.\");\n }\n }, [handle, onError, continueRender2]);\n return /* @__PURE__ */ jsx23(\"iframe\", {\n referrerPolicy: \"strict-origin-when-cross-origin\",\n ...props2,\n ref,\n onError: didGetError,\n onLoad: didLoad\n });\n};\nvar IFrame = forwardRef7(IFrameRefForwarding);\n// src/Img.tsx\nimport {\n forwardRef as forwardRef8,\n useCallback as useCallback13,\n useContext as useContext29,\n useImperativeHandle as useImperativeHandle7,\n useLayoutEffect as useLayoutEffect9,\n useRef as useRef17\n} from \"react\";\nimport { jsx as jsx24 } from \"react/jsx-runtime\";\nfunction exponentialBackoff(errorCount) {\n return 1000 * 2 ** (errorCount - 1);\n}\nvar ImgRefForwarding = ({\n onError,\n maxRetries = 2,\n src,\n pauseWhenLoading,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n onImageFrame,\n crossOrigin,\n ...props2\n}, ref) => {\n const imageRef = useRef17(null);\n const errors = useRef17({});\n const { delayPlayback } = useBufferState();\n const sequenceContext = useContext29(SequenceContext);\n if (!src) {\n throw new Error('No \"src\" prop was passed to <Img>.');\n }\n const _propsValid = true;\n if (!_propsValid) {\n throw new Error(\"typecheck error\");\n }\n useImperativeHandle7(ref, () => {\n return imageRef.current;\n }, []);\n const actualSrc = usePreload(src);\n const retryIn = useCallback13((timeout) => {\n if (!imageRef.current) {\n return;\n }\n const currentSrc = imageRef.current.src;\n setTimeout(() => {\n if (!imageRef.current) {\n return;\n }\n const newSrc = imageRef.current?.src;\n if (newSrc !== currentSrc) {\n return;\n }\n imageRef.current.removeAttribute(\"src\");\n imageRef.current.setAttribute(\"src\", newSrc);\n }, timeout);\n }, []);\n const { delayRender: delayRender2, continueRender: continueRender2, cancelRender: cancelRender2 } = useDelayRender();\n const didGetError = useCallback13((e) => {\n if (!errors.current) {\n return;\n }\n errors.current[imageRef.current?.src] = (errors.current[imageRef.current?.src] ?? 0) + 1;\n if (onError && (errors.current[imageRef.current?.src] ?? 0) > maxRetries) {\n onError(e);\n return;\n }\n if ((errors.current[imageRef.current?.src] ?? 0) <= maxRetries) {\n const backoff = exponentialBackoff(errors.current[imageRef.current?.src] ?? 0);\n console.warn(`Could not load image with source ${imageRef.current?.src}, retrying again in ${backoff}ms`);\n retryIn(backoff);\n return;\n }\n try {\n cancelRender2(\"Error loading image with src: \" + imageRef.current?.src);\n } catch {}\n }, [cancelRender2, maxRetries, onError, retryIn]);\n if (typeof window !== \"undefined\") {\n const isPremounting = Boolean(sequenceContext?.premounting);\n const isPostmounting = Boolean(sequenceContext?.postmounting);\n useLayoutEffect9(() => {\n if (window.process?.env?.NODE_ENV === \"test\") {\n if (imageRef.current) {\n imageRef.current.src = actualSrc;\n }\n return;\n }\n const { current } = imageRef;\n if (!current) {\n return;\n }\n const newHandle = delayRender2(\"Loading <Img> with src=\" + actualSrc, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n const unblock = pauseWhenLoading && !isPremounting && !isPostmounting ? delayPlayback().unblock : () => {\n return;\n };\n let unmounted = false;\n const onComplete = () => {\n if (unmounted) {\n continueRender2(newHandle);\n return;\n }\n if ((errors.current[imageRef.current?.src] ?? 0) > 0) {\n delete errors.current[imageRef.current?.src];\n console.info(`Retry successful - ${imageRef.current?.src} is now loaded`);\n }\n if (current) {\n onImageFrame?.(current);\n }\n unblock();\n continueRender2(newHandle);\n };\n if (!imageRef.current) {\n onComplete();\n return;\n }\n current.src = actualSrc;\n current.decode().then(onComplete).catch((err) => {\n console.warn(err);\n if (current.complete && current.naturalWidth > 0 && current.naturalHeight > 0) {\n onComplete();\n } else {\n current.addEventListener(\"load\", onComplete);\n }\n });\n return () => {\n unmounted = true;\n current.removeEventListener(\"load\", onComplete);\n unblock();\n continueRender2(newHandle);\n };\n }, [\n actualSrc,\n delayPlayback,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n pauseWhenLoading,\n isPremounting,\n isPostmounting,\n onImageFrame,\n continueRender2,\n delayRender2\n ]);\n }\n const { isClientSideRendering } = useRemotionEnvironment();\n const crossOriginValue = getCrossOriginValue({\n crossOrigin,\n requestsVideoFrame: false,\n isClientSideRendering\n });\n return /* @__PURE__ */ jsx24(\"img\", {\n ...props2,\n ref: imageRef,\n crossOrigin: crossOriginValue,\n onError: didGetError,\n decoding: \"sync\"\n });\n};\nvar Img = forwardRef8(ImgRefForwarding);\n// src/internals.ts\nimport { createRef as createRef3 } from \"react\";\n\n// src/CompositionManager.tsx\nimport React24 from \"react\";\nvar compositionsRef = React24.createRef();\n\n// src/CompositionManagerProvider.tsx\nimport {\n useCallback as useCallback14,\n useImperativeHandle as useImperativeHandle8,\n useMemo as useMemo26,\n useRef as useRef18,\n useState as useState15\n} from \"react\";\nimport { jsx as jsx25 } from \"react/jsx-runtime\";\nvar CompositionManagerProvider = ({\n children,\n onlyRenderComposition,\n currentCompositionMetadata,\n initialCompositions,\n initialCanvasContent\n}) => {\n const [folders, setFolders] = useState15([]);\n const [canvasContent, setCanvasContent] = useState15(initialCanvasContent);\n const [compositions, setCompositions] = useState15(initialCompositions);\n const currentcompositionsRef = useRef18(compositions);\n const updateCompositions = useCallback14((updateComps) => {\n setCompositions((comps) => {\n const updated = updateComps(comps);\n currentcompositionsRef.current = updated;\n return updated;\n });\n }, []);\n const registerComposition = useCallback14((comp) => {\n updateCompositions((comps) => {\n if (comps.find((c2) => c2.id === comp.id)) {\n throw new Error(`Multiple composition with id ${comp.id} are registered.`);\n }\n return [...comps, comp];\n });\n }, [updateCompositions]);\n const unregisterComposition = useCallback14((id) => {\n setCompositions((comps) => {\n return comps.filter((c2) => c2.id !== id);\n });\n }, []);\n const registerFolder = useCallback14((name, parent, nonce) => {\n setFolders((prevFolders) => {\n return [\n ...prevFolders,\n {\n name,\n parent,\n nonce\n }\n ];\n });\n }, []);\n const unregisterFolder = useCallback14((name, parent) => {\n setFolders((prevFolders) => {\n return prevFolders.filter((p) => !(p.name === name && p.parent === parent));\n });\n }, []);\n useImperativeHandle8(compositionsRef, () => {\n return {\n getCompositions: () => currentcompositionsRef.current\n };\n }, []);\n const updateCompositionDefaultProps = useCallback14((id, newDefaultProps) => {\n setCompositions((comps) => {\n const updated = comps.map((c2) => {\n if (c2.id === id) {\n return {\n ...c2,\n defaultProps: newDefaultProps\n };\n }\n return c2;\n });\n return updated;\n });\n }, []);\n const compositionManagerSetters = useMemo26(() => {\n return {\n registerComposition,\n unregisterComposition,\n registerFolder,\n unregisterFolder,\n setCanvasContent,\n updateCompositionDefaultProps,\n onlyRenderComposition\n };\n }, [\n registerComposition,\n registerFolder,\n unregisterComposition,\n unregisterFolder,\n updateCompositionDefaultProps,\n onlyRenderComposition\n ]);\n const compositionManagerContextValue = useMemo26(() => {\n return {\n compositions,\n folders,\n currentCompositionMetadata,\n canvasContent\n };\n }, [compositions, folders, currentCompositionMetadata, canvasContent]);\n return /* @__PURE__ */ jsx25(CompositionManager.Provider, {\n value: compositionManagerContextValue,\n children: /* @__PURE__ */ jsx25(CompositionSetters.Provider, {\n value: compositionManagerSetters,\n children\n })\n });\n};\n\n// src/default-css.ts\nvar exports_default_css = {};\n__export(exports_default_css, {\n makeDefaultPreviewCSS: () => makeDefaultPreviewCSS,\n injectCSS: () => injectCSS,\n OBJECTFIT_CONTAIN_CLASS_NAME: () => OBJECTFIT_CONTAIN_CLASS_NAME\n});\nvar injected = {};\nvar injectCSS = (css) => {\n if (typeof document === \"undefined\") {\n return () => {};\n }\n if (injected[css]) {\n return () => {};\n }\n const head = document.head || document.getElementsByTagName(\"head\")[0];\n const style = document.createElement(\"style\");\n style.appendChild(document.createTextNode(css));\n head.prepend(style);\n injected[css] = style;\n return () => {\n const styleElement = injected[css];\n if (styleElement) {\n if (styleElement.parentNode) {\n styleElement.parentNode.removeChild(styleElement);\n }\n delete injected[css];\n }\n };\n};\nvar OBJECTFIT_CONTAIN_CLASS_NAME = \"__remotion_objectfitcontain\";\nvar makeDefaultPreviewCSS = (scope, backgroundColor) => {\n if (!scope) {\n return `\n * {\n box-sizing: border-box;\n }\n body {\n margin: 0;\n\t background-color: ${backgroundColor};\n }\n .${OBJECTFIT_CONTAIN_CLASS_NAME} {\n object-fit: contain;\n }\n `;\n }\n return `\n ${scope} * {\n box-sizing: border-box;\n }\n ${scope} *:-webkit-full-screen {\n width: 100%;\n height: 100%;\n }\n ${scope} .${OBJECTFIT_CONTAIN_CLASS_NAME} {\n object-fit: contain;\n }\n `;\n};\n\n// src/get-effective-visual-mode-value.ts\nvar getEffectiveVisualModeValue = ({\n codeValue,\n runtimeValue,\n dragOverrideValue,\n defaultValue,\n shouldResortToDefaultValueIfUndefined = false\n}) => {\n if (dragOverrideValue !== undefined) {\n return dragOverrideValue;\n }\n if (!codeValue) {\n return runtimeValue;\n }\n if (!codeValue.canUpdate) {\n return runtimeValue;\n }\n if (codeValue.codeValue === undefined && shouldResortToDefaultValueIfUndefined) {\n return defaultValue;\n }\n return codeValue.codeValue;\n};\n\n// src/get-preview-dom-element.ts\nvar REMOTION_STUDIO_CONTAINER_ELEMENT = \"__remotion-studio-container\";\nvar getPreviewDomElement = () => {\n return document.getElementById(REMOTION_STUDIO_CONTAINER_ELEMENT);\n};\n\n// src/max-video-cache-size.ts\nimport React25 from \"react\";\nvar MaxMediaCacheSizeContext = React25.createContext(null);\n\n// src/register-root.ts\nvar Root = null;\nvar listeners = [];\nvar registerRoot = (comp) => {\n if (!comp) {\n throw new Error(`You must pass a React component to registerRoot(), but ${JSON.stringify(comp)} was passed.`);\n }\n if (Root) {\n throw new Error(\"registerRoot() was called more than once.\");\n }\n Root = comp;\n listeners.forEach((l) => {\n l(comp);\n });\n};\nvar getRoot = () => {\n return Root;\n};\nvar waitForRoot = (fn) => {\n if (Root) {\n fn(Root);\n return () => {\n return;\n };\n }\n listeners.push(fn);\n return () => {\n listeners = listeners.filter((l) => l !== fn);\n };\n};\n\n// src/RemotionRoot.tsx\nimport { useMemo as useMemo28 } from \"react\";\n\n// src/use-media-enabled.tsx\nimport { createContext as createContext20, useContext as useContext30, useMemo as useMemo27 } from \"react\";\nimport { jsx as jsx26 } from \"react/jsx-runtime\";\nvar MediaEnabledContext = createContext20(null);\nvar useVideoEnabled = () => {\n const context = useContext30(MediaEnabledContext);\n if (!context) {\n return window.remotion_videoEnabled;\n }\n if (context.videoEnabled === null) {\n return window.remotion_videoEnabled;\n }\n return context.videoEnabled;\n};\nvar useAudioEnabled = () => {\n const context = useContext30(MediaEnabledContext);\n if (!context) {\n return window.remotion_audioEnabled;\n }\n if (context.audioEnabled === null) {\n return window.remotion_audioEnabled;\n }\n return context.audioEnabled;\n};\nvar MediaEnabledProvider = ({\n children,\n videoEnabled,\n audioEnabled\n}) => {\n const value = useMemo27(() => ({ videoEnabled, audioEnabled }), [videoEnabled, audioEnabled]);\n return /* @__PURE__ */ jsx26(MediaEnabledContext.Provider, {\n value,\n children\n });\n};\n\n// src/RemotionRoot.tsx\nimport { jsx as jsx27 } from \"react/jsx-runtime\";\nvar RemotionRootContexts = ({\n children,\n numberOfAudioTags,\n logLevel,\n audioLatencyHint,\n videoEnabled,\n audioEnabled,\n frameState,\n visualModeEnabled\n}) => {\n const nonceContext = useMemo28(() => {\n let counter = 0;\n return {\n getNonce: () => counter++\n };\n }, []);\n const logging = useMemo28(() => {\n return { logLevel, mountTime: Date.now() };\n }, [logLevel]);\n return /* @__PURE__ */ jsx27(LogLevelContext.Provider, {\n value: logging,\n children: /* @__PURE__ */ jsx27(NonceContext.Provider, {\n value: nonceContext,\n children: /* @__PURE__ */ jsx27(TimelineContextProvider, {\n frameState,\n children: /* @__PURE__ */ jsx27(MediaEnabledProvider, {\n videoEnabled,\n audioEnabled,\n children: /* @__PURE__ */ jsx27(EditorPropsProvider, {\n children: /* @__PURE__ */ jsx27(PrefetchProvider, {\n children: /* @__PURE__ */ jsx27(SequenceManagerProvider, {\n visualModeEnabled,\n children: /* @__PURE__ */ jsx27(SharedAudioContextProvider, {\n numberOfAudioTags,\n audioLatencyHint,\n audioEnabled,\n children: /* @__PURE__ */ jsx27(DurationsContextProvider, {\n children: /* @__PURE__ */ jsx27(BufferingProvider, {\n children\n })\n })\n })\n })\n })\n })\n })\n })\n })\n });\n};\n\n// src/codec.ts\nvar validCodecs = [\n \"h264\",\n \"h265\",\n \"vp8\",\n \"vp9\",\n \"mp3\",\n \"aac\",\n \"wav\",\n \"prores\",\n \"h264-mkv\",\n \"h264-ts\",\n \"gif\"\n];\n\n// src/validation/validate-default-codec.ts\nfunction validateCodec(defaultCodec, location, name) {\n if (typeof defaultCodec === \"undefined\") {\n return;\n }\n if (typeof defaultCodec !== \"string\") {\n throw new TypeError(`The \"${name}\" prop ${location} must be a string, but you passed a value of type ${typeof defaultCodec}.`);\n }\n if (!validCodecs.includes(defaultCodec)) {\n throw new Error(`The \"${name}\" prop ${location} must be one of ${validCodecs.join(\", \")}, but you passed ${defaultCodec}.`);\n }\n}\n\n// src/resolve-video-config.ts\nvar validateCalculated = ({\n calculated,\n compositionId,\n compositionFps,\n compositionHeight,\n compositionWidth,\n compositionDurationInFrames\n}) => {\n const calculateMetadataErrorLocation = `calculated by calculateMetadata() for the composition \"${compositionId}\"`;\n const defaultErrorLocation = `of the \"<Composition />\" component with the id \"${compositionId}\"`;\n const width = calculated?.width ?? compositionWidth ?? undefined;\n validateDimension(width, \"width\", calculated?.width ? calculateMetadataErrorLocation : defaultErrorLocation);\n const height = calculated?.height ?? compositionHeight ?? undefined;\n validateDimension(height, \"height\", calculated?.height ? calculateMetadataErrorLocation : defaultErrorLocation);\n const fps = calculated?.fps ?? compositionFps ?? null;\n validateFps(fps, calculated?.fps ? calculateMetadataErrorLocation : defaultErrorLocation, false);\n const durationInFrames = calculated?.durationInFrames ?? compositionDurationInFrames ?? null;\n validateDurationInFrames(durationInFrames, {\n allowFloats: false,\n component: `of the \"<Composition />\" component with the id \"${compositionId}\"`\n });\n const defaultCodec = calculated?.defaultCodec;\n validateCodec(defaultCodec, calculateMetadataErrorLocation, \"defaultCodec\");\n const defaultOutName = calculated?.defaultOutName;\n const defaultVideoImageFormat = calculated?.defaultVideoImageFormat;\n const defaultPixelFormat = calculated?.defaultPixelFormat;\n const defaultProResProfile = calculated?.defaultProResProfile;\n return {\n width,\n height,\n fps,\n durationInFrames,\n defaultCodec,\n defaultOutName,\n defaultVideoImageFormat,\n defaultPixelFormat,\n defaultProResProfile\n };\n};\nvar resolveVideoConfig = ({\n calculateMetadata,\n signal,\n defaultProps,\n inputProps: originalProps,\n compositionId,\n compositionDurationInFrames,\n compositionFps,\n compositionHeight,\n compositionWidth\n}) => {\n const calculatedProm = calculateMetadata ? calculateMetadata({\n defaultProps,\n props: originalProps,\n abortSignal: signal,\n compositionId,\n isRendering: getRemotionEnvironment().isRendering\n }) : null;\n if (calculatedProm !== null && typeof calculatedProm === \"object\" && \"then\" in calculatedProm) {\n return calculatedProm.then((c2) => {\n const {\n height,\n width,\n durationInFrames,\n fps,\n defaultCodec,\n defaultOutName,\n defaultVideoImageFormat,\n defaultPixelFormat,\n defaultProResProfile\n } = validateCalculated({\n calculated: c2,\n compositionDurationInFrames,\n compositionFps,\n compositionHeight,\n compositionWidth,\n compositionId\n });\n return {\n width,\n height,\n fps,\n durationInFrames,\n id: compositionId,\n defaultProps: serializeThenDeserializeInStudio(defaultProps),\n props: serializeThenDeserializeInStudio(c2.props ?? originalProps),\n defaultCodec: defaultCodec ?? null,\n defaultOutName: defaultOutName ?? null,\n defaultVideoImageFormat: defaultVideoImageFormat ?? null,\n defaultPixelFormat: defaultPixelFormat ?? null,\n defaultProResProfile: defaultProResProfile ?? null\n };\n });\n }\n const data = validateCalculated({\n calculated: calculatedProm,\n compositionDurationInFrames,\n compositionFps,\n compositionHeight,\n compositionWidth,\n compositionId\n });\n if (calculatedProm === null) {\n return {\n ...data,\n id: compositionId,\n defaultProps: serializeThenDeserializeInStudio(defaultProps ?? {}),\n props: serializeThenDeserializeInStudio(originalProps),\n defaultCodec: null,\n defaultOutName: null,\n defaultVideoImageFormat: null,\n defaultPixelFormat: null,\n defaultProResProfile: null\n };\n }\n return {\n ...data,\n id: compositionId,\n defaultProps: serializeThenDeserializeInStudio(defaultProps ?? {}),\n props: serializeThenDeserializeInStudio(calculatedProm.props ?? originalProps),\n defaultCodec: calculatedProm.defaultCodec ?? null,\n defaultOutName: calculatedProm.defaultOutName ?? null,\n defaultVideoImageFormat: calculatedProm.defaultVideoImageFormat ?? null,\n defaultPixelFormat: calculatedProm.defaultPixelFormat ?? null,\n defaultProResProfile: calculatedProm.defaultProResProfile ?? null\n };\n};\nvar resolveVideoConfigOrCatch = (params) => {\n try {\n const promiseOrReturnValue = resolveVideoConfig(params);\n return {\n type: \"success\",\n result: promiseOrReturnValue\n };\n } catch (err) {\n return {\n type: \"error\",\n error: err\n };\n }\n};\n\n// src/sequence-stack-traces.ts\nimport React27 from \"react\";\nvar SequenceStackTracesUpdateContext = React27.createContext(() => {});\n\n// src/setup-env-variables.ts\nvar getEnvVariables = () => {\n if (getRemotionEnvironment().isRendering) {\n const param = window.remotion_envVariables;\n if (!param) {\n return {};\n }\n return { ...JSON.parse(param), NODE_ENV: \"production\" };\n }\n if (false) {}\n return {\n NODE_ENV: \"production\"\n };\n};\nvar setupEnvVariables = () => {\n const env = getEnvVariables();\n if (!window.process) {\n window.process = {};\n }\n if (!window.process.env) {\n window.process.env = {};\n }\n Object.keys(env).forEach((key) => {\n window.process.env[key] = env[key];\n });\n};\n\n// src/use-current-scale.ts\nimport React28, { createContext as createContext21 } from \"react\";\nvar CurrentScaleContext = React28.createContext(null);\nvar PreviewSizeContext = createContext21({\n setSize: () => {\n return;\n },\n size: { size: \"auto\", translation: { x: 0, y: 0 } }\n});\nvar calculateScale = ({\n canvasSize,\n compositionHeight,\n compositionWidth,\n previewSize\n}) => {\n const heightRatio = canvasSize.height / compositionHeight;\n const widthRatio = canvasSize.width / compositionWidth;\n const ratio = Math.min(heightRatio, widthRatio);\n if (previewSize === \"auto\") {\n if (ratio === 0) {\n return 1;\n }\n return ratio;\n }\n return Number(previewSize);\n};\nvar useCurrentScale = (options) => {\n const hasContext = React28.useContext(CurrentScaleContext);\n const zoomContext = React28.useContext(PreviewSizeContext);\n const config = useUnsafeVideoConfig();\n const env = useRemotionEnvironment();\n if (hasContext === null || config === null || zoomContext === null) {\n if (options?.dontThrowIfOutsideOfRemotion) {\n return 1;\n }\n if (env.isRendering) {\n return 1;\n }\n throw new Error([\n \"useCurrentScale() was called outside of a Remotion context.\",\n \"This hook can only be called in a component that is being rendered by Remotion.\",\n \"If you want to this hook to return 1 outside of Remotion, pass {dontThrowIfOutsideOfRemotion: true} as an option.\",\n \"If you think you called this hook in a Remotion component, make sure all versions of Remotion are aligned.\"\n ].join(`\n`));\n }\n if (hasContext.type === \"scale\") {\n return hasContext.scale;\n }\n return calculateScale({\n canvasSize: hasContext.canvasSize,\n compositionHeight: config.height,\n compositionWidth: config.width,\n previewSize: zoomContext.size.size\n });\n};\n\n// src/use-schema.ts\nimport { useContext as useContext31, useMemo as useMemo29, useState as useState16 } from \"react\";\nvar useSchema = (schema, currentValue) => {\n const env = useRemotionEnvironment();\n const earlyReturn = useMemo29(() => {\n if (!env.isStudio || env.isReadOnlyStudio) {\n return {\n controls: undefined,\n values: currentValue ?? {}\n };\n }\n return;\n }, [env.isStudio, env.isReadOnlyStudio, currentValue]);\n if (earlyReturn) {\n return earlyReturn;\n }\n const [overrideId] = useState16(() => String(Math.random()));\n const {\n visualModeEnabled,\n dragOverrides: overrides,\n codeValues\n } = useContext31(VisualModeOverridesContext);\n const controls = useMemo29(() => {\n if (!visualModeEnabled) {\n return;\n }\n if (schema === null || currentValue === null) {\n return;\n }\n return {\n schema,\n currentValue,\n overrideId\n };\n }, [schema, currentValue, overrideId, visualModeEnabled]);\n return useMemo29(() => {\n if (controls === undefined || currentValue === null || schema === null || !visualModeEnabled) {\n return {\n controls: undefined,\n values: currentValue ?? {}\n };\n }\n const overrideValues = overrides[overrideId] ?? {};\n const propStatus = codeValues[overrideId];\n const currentValueKeys = Object.keys(currentValue);\n const keysToUpdate = new Set(currentValueKeys).values();\n const merged = {};\n for (const key of keysToUpdate) {\n const codeValueStatus = propStatus?.[key] ?? null;\n merged[key] = getEffectiveVisualModeValue({\n codeValue: codeValueStatus,\n runtimeValue: currentValue[key],\n dragOverrideValue: overrideValues[key],\n defaultValue: schema[key]?.default,\n shouldResortToDefaultValueIfUndefined: false\n });\n }\n return {\n controls,\n values: merged\n };\n }, [\n controls,\n currentValue,\n overrideId,\n overrides,\n codeValues,\n schema,\n visualModeEnabled\n ]);\n};\n\n// src/use-sequence-control-override.ts\nimport { useContext as useContext32 } from \"react\";\nvar useSequenceControlOverride = (key) => {\n const seqContext = useContext32(SequenceContext);\n const { dragOverrides: overrides } = useContext32(VisualModeOverridesContext);\n if (!seqContext) {\n return;\n }\n return overrides[seqContext.id]?.[key];\n};\n\n// src/video/OffthreadVideo.tsx\nimport { useCallback as useCallback16 } from \"react\";\n\n// src/video/OffthreadVideoForRendering.tsx\nimport {\n useCallback as useCallback15,\n useContext as useContext33,\n useEffect as useEffect14,\n useLayoutEffect as useLayoutEffect10,\n useMemo as useMemo30,\n useState as useState17\n} from \"react\";\n\n// src/video/offthread-video-source.ts\nvar getOffthreadVideoSource = ({\n src,\n transparent,\n currentTime,\n toneMapped\n}) => {\n return `http://localhost:${window.remotion_proxyPort}/proxy?src=${encodeURIComponent(getAbsoluteSrc(src))}&time=${encodeURIComponent(Math.max(0, currentTime))}&transparent=${String(transparent)}&toneMapped=${String(toneMapped)}`;\n};\n\n// src/video/OffthreadVideoForRendering.tsx\nimport { jsx as jsx28 } from \"react/jsx-runtime\";\nvar OffthreadVideoForRendering = ({\n onError,\n volume: volumeProp,\n playbackRate,\n src,\n muted,\n allowAmplificationDuringRender,\n transparent,\n toneMapped,\n toneFrequency,\n name,\n loopVolumeCurveBehavior,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n onVideoFrame,\n crossOrigin,\n audioStreamIndex,\n ...props2\n}) => {\n const absoluteFrame = useTimelinePosition();\n const frame = useCurrentFrame();\n const volumePropsFrame = useFrameForVolumeProp(loopVolumeCurveBehavior);\n const videoConfig = useUnsafeVideoConfig();\n const sequenceContext = useContext33(SequenceContext);\n const mediaStartsAt = useMediaStartsAt();\n const { registerRenderAsset, unregisterRenderAsset } = useContext33(RenderAssetManager);\n if (!src) {\n throw new TypeError(\"No `src` was passed to <OffthreadVideo>.\");\n }\n const id = useMemo30(() => `offthreadvideo-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [\n src,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.relativeFrom,\n sequenceContext?.durationInFrames\n ]);\n if (!videoConfig) {\n throw new Error(\"No video config found\");\n }\n const volume = evaluateVolume({\n volume: volumeProp,\n frame: volumePropsFrame,\n mediaVolume: 1\n });\n warnAboutTooHighVolume(volume);\n useEffect14(() => {\n if (!src) {\n throw new Error(\"No src passed\");\n }\n if (!window.remotion_audioEnabled) {\n return;\n }\n if (muted) {\n return;\n }\n if (volume <= 0) {\n return;\n }\n registerRenderAsset({\n type: \"video\",\n src: getAbsoluteSrc(src),\n id,\n frame: absoluteFrame,\n volume,\n mediaFrame: frame,\n playbackRate,\n toneFrequency,\n audioStartFrame: Math.max(0, -(sequenceContext?.relativeFrom ?? 0)),\n audioStreamIndex\n });\n return () => unregisterRenderAsset(id);\n }, [\n muted,\n src,\n registerRenderAsset,\n id,\n unregisterRenderAsset,\n volume,\n frame,\n absoluteFrame,\n playbackRate,\n toneFrequency,\n sequenceContext?.relativeFrom,\n audioStreamIndex\n ]);\n const currentTime = useMemo30(() => {\n return getExpectedMediaFrameUncorrected({\n frame,\n playbackRate: playbackRate || 1,\n startFrom: -mediaStartsAt\n }) / videoConfig.fps;\n }, [frame, mediaStartsAt, playbackRate, videoConfig.fps]);\n const actualSrc = useMemo30(() => {\n return getOffthreadVideoSource({\n src,\n currentTime,\n transparent,\n toneMapped\n });\n }, [toneMapped, currentTime, src, transparent]);\n const [imageSrc, setImageSrc] = useState17(null);\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n useLayoutEffect10(() => {\n if (!window.remotion_videoEnabled) {\n return;\n }\n const cleanup = [];\n setImageSrc(null);\n const controller = new AbortController;\n const newHandle = delayRender2(`Fetching ${actualSrc} from server`, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n const execute = async () => {\n try {\n const res = await fetch(actualSrc, {\n signal: controller.signal,\n cache: \"no-store\"\n });\n if (res.status !== 200) {\n if (res.status === 500) {\n const json = await res.json();\n if (json.error) {\n const cleanedUpErrorMessage = json.error.replace(/^Error: /, \"\");\n throw new Error(cleanedUpErrorMessage);\n }\n }\n throw new Error(`Server returned status ${res.status} while fetching ${actualSrc}`);\n }\n const blob = await res.blob();\n const url = URL.createObjectURL(blob);\n cleanup.push(() => URL.revokeObjectURL(url));\n setImageSrc({\n src: url,\n handle: newHandle\n });\n } catch (err) {\n if (err.message.includes(\"aborted\")) {\n continueRender2(newHandle);\n return;\n }\n if (controller.signal.aborted) {\n continueRender2(newHandle);\n return;\n }\n if (err.message.includes(\"Failed to fetch\")) {\n err = new Error(`Failed to fetch ${actualSrc}. This could be caused by Chrome rejecting the request because the disk space is low. Consider increasing the disk size of your environment.`, { cause: err });\n }\n if (onError) {\n onError(err);\n } else {\n cancelRender(err);\n }\n }\n };\n execute();\n cleanup.push(() => {\n if (controller.signal.aborted) {\n return;\n }\n controller.abort();\n });\n return () => {\n cleanup.forEach((c2) => c2());\n };\n }, [\n actualSrc,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n onError,\n continueRender2,\n delayRender2\n ]);\n const onErr = useCallback15(() => {\n if (onError) {\n onError?.(new Error(\"Failed to load image with src \" + imageSrc));\n } else {\n cancelRender(\"Failed to load image with src \" + imageSrc);\n }\n }, [imageSrc, onError]);\n const className = useMemo30(() => {\n return [OBJECTFIT_CONTAIN_CLASS_NAME, props2.className].filter(truthy).join(\" \");\n }, [props2.className]);\n const onImageFrame = useCallback15((img) => {\n if (onVideoFrame) {\n onVideoFrame(img);\n }\n }, [onVideoFrame]);\n if (!imageSrc || !window.remotion_videoEnabled) {\n return null;\n }\n continueRender2(imageSrc.handle);\n return /* @__PURE__ */ jsx28(Img, {\n src: imageSrc.src,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n onImageFrame,\n ...props2,\n onError: onErr,\n className\n });\n};\n\n// src/video/VideoForPreview.tsx\nimport React30, {\n forwardRef as forwardRef9,\n useContext as useContext34,\n useEffect as useEffect16,\n useImperativeHandle as useImperativeHandle9,\n useMemo as useMemo31,\n useRef as useRef19,\n useState as useState18\n} from \"react\";\n\n// src/video/emit-video-frame.ts\nimport { useEffect as useEffect15 } from \"react\";\nvar useEmitVideoFrame = ({\n ref,\n onVideoFrame\n}) => {\n useEffect15(() => {\n const { current } = ref;\n if (!current) {\n return;\n }\n if (!onVideoFrame) {\n return;\n }\n let handle = 0;\n const callback = () => {\n if (!ref.current) {\n return;\n }\n onVideoFrame(ref.current);\n handle = ref.current.requestVideoFrameCallback(callback);\n };\n callback();\n return () => {\n current.cancelVideoFrameCallback(handle);\n };\n }, [onVideoFrame, ref]);\n};\n\n// src/video/VideoForPreview.tsx\nimport { jsx as jsx29 } from \"react/jsx-runtime\";\nvar VideoForDevelopmentRefForwardingFunction = (props2, ref) => {\n const context = useContext34(SharedAudioContext);\n if (!context) {\n throw new Error(\"SharedAudioContext not found\");\n }\n const videoRef = useRef19(null);\n const sharedSource = useMemo31(() => {\n if (!context.audioContext) {\n return null;\n }\n return makeSharedElementSourceNode({\n audioContext: context.audioContext,\n ref: videoRef\n });\n }, [context.audioContext]);\n const effectToUse = React30.useInsertionEffect ?? React30.useLayoutEffect;\n effectToUse(() => {\n return () => {\n requestAnimationFrame(() => {\n sharedSource?.cleanup();\n });\n };\n }, [sharedSource]);\n const {\n volume,\n muted,\n playbackRate,\n onlyWarnForMediaSeekingError,\n src,\n onDuration,\n acceptableTimeShift,\n acceptableTimeShiftInSeconds,\n toneFrequency,\n name,\n _remotionInternalNativeLoopPassed,\n _remotionInternalStack,\n style,\n pauseWhenBuffering,\n showInTimeline,\n loopVolumeCurveBehavior,\n onError,\n onAutoPlayError,\n onVideoFrame,\n crossOrigin,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n allowAmplificationDuringRender,\n useWebAudioApi,\n audioStreamIndex,\n ...nativeProps\n } = props2;\n const _propsValid = true;\n if (!_propsValid) {\n throw new Error(\"typecheck error\");\n }\n const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? \"repeat\");\n const { fps, durationInFrames } = useVideoConfig();\n const parentSequence = useContext34(SequenceContext);\n const { hidden } = useContext34(SequenceVisibilityToggleContext);\n const logLevel = useLogLevel();\n const mountTime = useMountTime();\n const [timelineId] = useState18(() => String(Math.random()));\n const isSequenceHidden = hidden[timelineId] ?? false;\n if (typeof acceptableTimeShift !== \"undefined\") {\n throw new Error(\"acceptableTimeShift has been removed. Use acceptableTimeShiftInSeconds instead.\");\n }\n const [mediaVolume] = useMediaVolumeState();\n const [mediaMuted] = useMediaMutedState();\n const userPreferredVolume = evaluateVolume({\n frame: volumePropFrame,\n volume,\n mediaVolume\n });\n warnAboutTooHighVolume(userPreferredVolume);\n useMediaInTimeline({\n volume,\n mediaVolume,\n mediaType: \"video\",\n src,\n playbackRate: props2.playbackRate ?? 1,\n displayName: name ?? null,\n id: timelineId,\n stack: _remotionInternalStack,\n showInTimeline,\n premountDisplay: parentSequence?.premountDisplay ?? null,\n postmountDisplay: parentSequence?.postmountDisplay ?? null,\n loopDisplay: undefined\n });\n useMediaPlayback({\n mediaRef: videoRef,\n src,\n mediaType: \"video\",\n playbackRate: props2.playbackRate ?? 1,\n onlyWarnForMediaSeekingError,\n acceptableTimeshift: acceptableTimeShiftInSeconds ?? null,\n isPremounting: Boolean(parentSequence?.premounting),\n isPostmounting: Boolean(parentSequence?.postmounting),\n pauseWhenBuffering,\n onAutoPlayError: onAutoPlayError ?? null\n });\n useMediaTag({\n id: timelineId,\n isPostmounting: Boolean(parentSequence?.postmounting),\n isPremounting: Boolean(parentSequence?.premounting),\n mediaRef: videoRef,\n mediaType: \"video\",\n onAutoPlayError: onAutoPlayError ?? null\n });\n useVolume({\n logLevel,\n mediaRef: videoRef,\n volume: userPreferredVolume,\n source: sharedSource,\n shouldUseWebAudioApi: useWebAudioApi ?? false\n });\n const actualFrom = parentSequence ? parentSequence.relativeFrom : 0;\n const duration = parentSequence ? Math.min(parentSequence.durationInFrames, durationInFrames) : durationInFrames;\n const preloadedSrc = usePreload(src);\n const actualSrc = useAppendVideoFragment({\n actualSrc: preloadedSrc,\n actualFrom,\n duration,\n fps\n });\n useImperativeHandle9(ref, () => {\n return videoRef.current;\n }, []);\n useState18(() => playbackLogging({\n logLevel,\n message: `Mounting video with source = ${actualSrc}, v=${VERSION}, user agent=${typeof navigator === \"undefined\" ? \"server\" : navigator.userAgent}`,\n tag: \"video\",\n mountTime\n }));\n useEffect16(() => {\n const { current } = videoRef;\n if (!current) {\n return;\n }\n const errorHandler = () => {\n if (current.error) {\n console.error(\"Error occurred in video\", current?.error);\n if (onError) {\n const err = new Error(`Code ${current.error.code}: ${current.error.message}`);\n onError(err);\n return;\n }\n throw new Error(`The browser threw an error while playing the video ${src}: Code ${current.error.code} - ${current?.error?.message}. See https://remotion.dev/docs/media-playback-error for help. Pass an onError() prop to handle the error.`);\n } else {\n if (onError) {\n const err = new Error(`The browser threw an error while playing the video ${src}`);\n onError(err);\n return;\n }\n throw new Error(\"The browser threw an error while playing the video\");\n }\n };\n current.addEventListener(\"error\", errorHandler, { once: true });\n return () => {\n current.removeEventListener(\"error\", errorHandler);\n };\n }, [onError, src]);\n const currentOnDurationCallback = useRef19(onDuration);\n currentOnDurationCallback.current = onDuration;\n useEmitVideoFrame({ ref: videoRef, onVideoFrame });\n useEffect16(() => {\n const { current } = videoRef;\n if (!current) {\n return;\n }\n if (current.duration) {\n currentOnDurationCallback.current?.(src, current.duration);\n return;\n }\n const onLoadedMetadata = () => {\n currentOnDurationCallback.current?.(src, current.duration);\n };\n current.addEventListener(\"loadedmetadata\", onLoadedMetadata);\n return () => {\n current.removeEventListener(\"loadedmetadata\", onLoadedMetadata);\n };\n }, [src]);\n useEffect16(() => {\n const { current } = videoRef;\n if (!current) {\n return;\n }\n if (isIosSafari()) {\n current.preload = \"metadata\";\n } else {\n current.preload = \"auto\";\n }\n }, []);\n const actualStyle = useMemo31(() => {\n return {\n ...style,\n opacity: isSequenceHidden ? 0 : style?.opacity ?? 1\n };\n }, [isSequenceHidden, style]);\n const crossOriginValue = getCrossOriginValue({\n crossOrigin,\n requestsVideoFrame: Boolean(onVideoFrame),\n isClientSideRendering: false\n });\n return /* @__PURE__ */ jsx29(\"video\", {\n ref: videoRef,\n muted: muted || mediaMuted || isSequenceHidden || userPreferredVolume <= 0,\n playsInline: true,\n src: actualSrc,\n loop: _remotionInternalNativeLoopPassed,\n style: actualStyle,\n disableRemotePlayback: true,\n crossOrigin: crossOriginValue,\n ...nativeProps\n });\n};\nvar VideoForPreview = forwardRef9(VideoForDevelopmentRefForwardingFunction);\n\n// src/video/OffthreadVideo.tsx\nimport { jsx as jsx30 } from \"react/jsx-runtime\";\nvar InnerOffthreadVideo = (props2) => {\n const {\n startFrom,\n endAt,\n trimBefore,\n trimAfter,\n name,\n pauseWhenBuffering,\n stack,\n showInTimeline,\n ...otherProps\n } = props2;\n const environment = useRemotionEnvironment();\n if (environment.isClientSideRendering) {\n throw new Error(\"<OffthreadVideo> is not supported in @remotion/web-renderer. Use <Video> from @remotion/media instead. See https://remotion.dev/docs/client-side-rendering/limitations\");\n }\n const onDuration = useCallback16(() => {\n return;\n }, []);\n if (typeof props2.src !== \"string\") {\n throw new TypeError(`The \\`<OffthreadVideo>\\` tag requires a string for \\`src\\`, but got ${JSON.stringify(props2.src)} instead.`);\n }\n validateMediaTrimProps({ startFrom, endAt, trimBefore, trimAfter });\n const { trimBeforeValue, trimAfterValue } = resolveTrimProps({\n startFrom,\n endAt,\n trimBefore,\n trimAfter\n });\n if (typeof trimBeforeValue !== \"undefined\" || typeof trimAfterValue !== \"undefined\") {\n return /* @__PURE__ */ jsx30(Sequence, {\n layout: \"none\",\n from: 0 - (trimBeforeValue ?? 0),\n showInTimeline: false,\n durationInFrames: trimAfterValue,\n name,\n children: /* @__PURE__ */ jsx30(InnerOffthreadVideo, {\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n ...otherProps,\n trimAfter: undefined,\n name: undefined,\n showInTimeline,\n trimBefore: undefined,\n stack: undefined,\n startFrom: undefined,\n endAt: undefined\n })\n });\n }\n validateMediaProps(props2, \"Video\");\n if (environment.isRendering) {\n return /* @__PURE__ */ jsx30(OffthreadVideoForRendering, {\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n ...otherProps,\n trimAfter: undefined,\n name: undefined,\n showInTimeline,\n trimBefore: undefined,\n stack: undefined,\n startFrom: undefined,\n endAt: undefined\n });\n }\n const {\n transparent,\n toneMapped,\n onAutoPlayError,\n onVideoFrame,\n crossOrigin,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n ...propsForPreview\n } = otherProps;\n return /* @__PURE__ */ jsx30(VideoForPreview, {\n _remotionInternalStack: stack ?? null,\n onDuration,\n onlyWarnForMediaSeekingError: true,\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n showInTimeline: showInTimeline ?? true,\n onAutoPlayError: onAutoPlayError ?? undefined,\n onVideoFrame: onVideoFrame ?? null,\n crossOrigin,\n ...propsForPreview,\n _remotionInternalNativeLoopPassed: false\n });\n};\nvar OffthreadVideo = ({\n src,\n acceptableTimeShiftInSeconds,\n allowAmplificationDuringRender,\n audioStreamIndex,\n className,\n crossOrigin,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n id,\n loopVolumeCurveBehavior,\n muted,\n name,\n onAutoPlayError,\n onError,\n onVideoFrame,\n pauseWhenBuffering,\n playbackRate,\n showInTimeline,\n style,\n toneFrequency,\n toneMapped,\n transparent,\n trimAfter,\n trimBefore,\n useWebAudioApi,\n volume,\n _remotionInternalNativeLoopPassed,\n endAt,\n stack,\n startFrom,\n imageFormat\n}) => {\n if (imageFormat) {\n throw new TypeError(`The \\`<OffthreadVideo>\\` tag does no longer accept \\`imageFormat\\`. Use the \\`transparent\\` prop if you want to render a transparent video.`);\n }\n return /* @__PURE__ */ jsx30(InnerOffthreadVideo, {\n acceptableTimeShiftInSeconds,\n allowAmplificationDuringRender: allowAmplificationDuringRender ?? true,\n audioStreamIndex: audioStreamIndex ?? 0,\n className,\n crossOrigin,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n id,\n loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? \"repeat\",\n muted: muted ?? false,\n name,\n onAutoPlayError: onAutoPlayError ?? null,\n onError,\n onVideoFrame,\n pauseWhenBuffering: pauseWhenBuffering ?? true,\n playbackRate: playbackRate ?? 1,\n toneFrequency: toneFrequency ?? 1,\n showInTimeline: showInTimeline ?? true,\n src,\n stack,\n startFrom,\n _remotionInternalNativeLoopPassed: _remotionInternalNativeLoopPassed ?? false,\n endAt,\n style,\n toneMapped: toneMapped ?? true,\n transparent: transparent ?? false,\n trimAfter,\n trimBefore,\n useWebAudioApi: useWebAudioApi ?? false,\n volume\n });\n};\naddSequenceStackTraces(OffthreadVideo);\n\n// src/watch-static-file.ts\nvar WATCH_REMOTION_STATIC_FILES = \"remotion_staticFilesChanged\";\nvar watchStaticFile = (fileName, callback) => {\n if (ENABLE_V5_BREAKING_CHANGES) {\n throw new Error(\"watchStaticFile() has moved into the `@remotion/studio` package. Update your imports.\");\n }\n if (!getRemotionEnvironment().isStudio) {\n console.warn(\"The watchStaticFile() API is only available while using the Remotion Studio.\");\n return { cancel: () => {\n return;\n } };\n }\n const withoutStaticBase = fileName.startsWith(window.remotion_staticBase) ? fileName.replace(window.remotion_staticBase, \"\") : fileName;\n const withoutLeadingSlash = withoutStaticBase.startsWith(\"/\") ? withoutStaticBase.slice(1) : withoutStaticBase;\n let prevFileData = window.remotion_staticFiles.find((file) => file.name === withoutLeadingSlash);\n const checkFile = (event) => {\n const staticFiles = event.detail.files;\n const newFileData = staticFiles.find((file) => file.name === withoutLeadingSlash);\n if (!newFileData) {\n if (prevFileData !== undefined) {\n callback(null);\n }\n prevFileData = undefined;\n return;\n }\n if (prevFileData === undefined || prevFileData.lastModified !== newFileData.lastModified) {\n callback(newFileData);\n prevFileData = newFileData;\n }\n };\n window.addEventListener(WATCH_REMOTION_STATIC_FILES, checkFile);\n const cancel = () => {\n return window.removeEventListener(WATCH_REMOTION_STATIC_FILES, checkFile);\n };\n return { cancel };\n};\n\n// src/wrap-in-schema.ts\nimport React32, { forwardRef as forwardRef10, useMemo as useMemo32 } from \"react\";\nvar getNestedValue = (obj, key) => {\n const parts = key.split(\".\");\n let current = obj;\n for (const part of parts) {\n if (current === null || current === undefined || typeof current !== \"object\")\n return;\n current = current[part];\n }\n return current;\n};\nvar mergeValues = (props2, values, schemaKeys) => {\n const merged = { ...props2 };\n for (const key of schemaKeys) {\n const value = values[key];\n const parts = key.split(\".\");\n if (parts.length === 1) {\n merged[key] = value;\n continue;\n }\n let current = merged;\n for (let i = 0;i < parts.length - 1; i++) {\n const part = parts[i];\n if (typeof current[part] === \"object\" && current[part] !== null) {\n current[part] = { ...current[part] };\n } else {\n current[part] = {};\n }\n current = current[part];\n }\n current[parts[parts.length - 1]] = value;\n }\n return merged;\n};\nvar wrapInSchema = (Component, schema) => {\n const schemaKeys = Object.keys(schema);\n const Wrapped = forwardRef10((props2, ref) => {\n const env = useRemotionEnvironment();\n if (!env.isStudio || env.isReadOnlyStudio || env.isRendering || !process.env.EXPERIMENTAL_VISUAL_MODE_ENABLED) {\n return React32.createElement(Component, {\n ...props2,\n controls: null,\n ref\n });\n }\n const schemaInput = useMemo32(() => {\n const input = {};\n for (const key of schemaKeys) {\n input[key] = getNestedValue(props2, key);\n }\n return input;\n }, schemaKeys.map((key) => getNestedValue(props2, key)));\n const { controls, values } = useSchema(schema, schemaInput);\n const mergedProps = mergeValues(props2, values, schemaKeys);\n return React32.createElement(Component, {\n ...mergedProps,\n controls,\n ref\n });\n });\n Wrapped.displayName = `wrapInSchema(${Component.displayName || Component.name || \"Component\"})`;\n return Wrapped;\n};\n\n// src/wrap-remotion-context.tsx\nimport React33, { useMemo as useMemo33 } from \"react\";\nimport { jsx as jsx31 } from \"react/jsx-runtime\";\nfunction useRemotionContexts() {\n const compositionManagerCtx = React33.useContext(CompositionManager);\n const timelineContext = React33.useContext(TimelineContext);\n const setTimelineContext = React33.useContext(SetTimelineContext);\n const sequenceContext = React33.useContext(SequenceContext);\n const nonceContext = React33.useContext(NonceContext);\n const canUseRemotionHooksContext = React33.useContext(CanUseRemotionHooks);\n const preloadContext = React33.useContext(PreloadContext);\n const resolveCompositionContext = React33.useContext(ResolveCompositionContext);\n const renderAssetManagerContext = React33.useContext(RenderAssetManager);\n const sequenceManagerContext = React33.useContext(SequenceManager);\n const bufferManagerContext = React33.useContext(BufferingContextReact);\n const logLevelContext = React33.useContext(LogLevelContext);\n return useMemo33(() => ({\n compositionManagerCtx,\n timelineContext,\n setTimelineContext,\n sequenceContext,\n nonceContext,\n canUseRemotionHooksContext,\n preloadContext,\n resolveCompositionContext,\n renderAssetManagerContext,\n sequenceManagerContext,\n bufferManagerContext,\n logLevelContext\n }), [\n compositionManagerCtx,\n nonceContext,\n sequenceContext,\n setTimelineContext,\n timelineContext,\n canUseRemotionHooksContext,\n preloadContext,\n resolveCompositionContext,\n renderAssetManagerContext,\n sequenceManagerContext,\n bufferManagerContext,\n logLevelContext\n ]);\n}\nvar RemotionContextProvider = (props2) => {\n const { children, contexts } = props2;\n return /* @__PURE__ */ jsx31(LogLevelContext.Provider, {\n value: contexts.logLevelContext,\n children: /* @__PURE__ */ jsx31(CanUseRemotionHooks.Provider, {\n value: contexts.canUseRemotionHooksContext,\n children: /* @__PURE__ */ jsx31(NonceContext.Provider, {\n value: contexts.nonceContext,\n children: /* @__PURE__ */ jsx31(PreloadContext.Provider, {\n value: contexts.preloadContext,\n children: /* @__PURE__ */ jsx31(CompositionManager.Provider, {\n value: contexts.compositionManagerCtx,\n children: /* @__PURE__ */ jsx31(SequenceManager.Provider, {\n value: contexts.sequenceManagerContext,\n children: /* @__PURE__ */ jsx31(RenderAssetManager.Provider, {\n value: contexts.renderAssetManagerContext,\n children: /* @__PURE__ */ jsx31(ResolveCompositionContext.Provider, {\n value: contexts.resolveCompositionContext,\n children: /* @__PURE__ */ jsx31(TimelineContext.Provider, {\n value: contexts.timelineContext,\n children: /* @__PURE__ */ jsx31(SetTimelineContext.Provider, {\n value: contexts.setTimelineContext,\n children: /* @__PURE__ */ jsx31(SequenceContext.Provider, {\n value: contexts.sequenceContext,\n children: /* @__PURE__ */ jsx31(BufferingContextReact.Provider, {\n value: contexts.bufferManagerContext,\n children\n })\n })\n })\n })\n })\n })\n })\n })\n })\n })\n })\n });\n};\n\n// src/internals.ts\nvar compositionSelectorRef = createRef3();\nvar Internals = {\n MaxMediaCacheSizeContext,\n useUnsafeVideoConfig,\n useFrameForVolumeProp,\n useTimelinePosition,\n useAbsoluteTimelinePosition,\n evaluateVolume,\n getAbsoluteSrc,\n Timeline: exports_timeline_position_state,\n validateMediaTrimProps,\n validateMediaProps,\n resolveTrimProps,\n VideoForPreview,\n CompositionManager,\n CompositionSetters,\n VisualModeOverridesContext,\n SequenceManager,\n SequenceStackTracesUpdateContext,\n SequenceVisibilityToggleContext,\n useSchema,\n wrapInSchema,\n useSequenceControlOverride,\n RemotionRootContexts,\n CompositionManagerProvider,\n useVideo,\n getRoot,\n useMediaVolumeState,\n useMediaMutedState,\n useMediaInTimeline,\n useLazyComponent,\n truthy,\n SequenceContext,\n PremountContext,\n useRemotionContexts,\n RemotionContextProvider,\n CSSUtils: exports_default_css,\n setupEnvVariables,\n MediaVolumeContext,\n SetMediaVolumeContext,\n getRemotionEnvironment,\n SharedAudioContext,\n SharedAudioContextProvider,\n invalidCompositionErrorMessage,\n calculateMediaDuration,\n isCompositionIdValid,\n getPreviewDomElement,\n compositionsRef,\n portalNode,\n waitForRoot,\n SetTimelineContext,\n CanUseRemotionHooksProvider,\n CanUseRemotionHooks,\n PrefetchProvider,\n DurationsContextProvider,\n IsPlayerContextProvider,\n useIsPlayer,\n EditorPropsProvider,\n EditorPropsContext,\n usePreload,\n NonceContext,\n resolveVideoConfig,\n resolveVideoConfigOrCatch,\n ResolveCompositionContext,\n useResolvedVideoConfig,\n resolveCompositionsRef,\n REMOTION_STUDIO_CONTAINER_ELEMENT,\n RenderAssetManager,\n persistCurrentFrame,\n useTimelineContext,\n useTimelineSetFrame,\n isIosSafari,\n WATCH_REMOTION_STATIC_FILES,\n addSequenceStackTraces,\n useMediaStartsAt,\n BufferingProvider,\n BufferingContextReact,\n getComponentsToAddStacksTo,\n CurrentScaleContext,\n PreviewSizeContext,\n calculateScale,\n editorPropsProviderRef,\n PROPS_UPDATED_EXTERNALLY,\n validateRenderAsset,\n Log,\n LogLevelContext,\n useLogLevel,\n playbackLogging,\n timeValueRef,\n compositionSelectorRef,\n RemotionEnvironmentContext,\n warnAboutTooHighVolume,\n AudioForPreview,\n OBJECTFIT_CONTAIN_CLASS_NAME,\n InnerOffthreadVideo,\n useBasicMediaInTimeline,\n getInputPropsOverride,\n setInputPropsOverride,\n useVideoEnabled,\n useAudioEnabled,\n useIsPlayerBuffering,\n TimelinePosition: exports_timeline_position_state,\n DelayRenderContextType,\n TimelineContext,\n AbsoluteTimeContext,\n RenderAssetManagerProvider,\n getEffectiveVisualModeValue\n};\n// src/interpolate-colors.ts\nvar NUMBER = \"[-+]?\\\\d*\\\\.?\\\\d+\";\nvar PERCENTAGE = NUMBER + \"%\";\nfunction call(...args) {\n return \"\\\\(\\\\s*(\" + args.join(\")\\\\s*,\\\\s*(\") + \")\\\\s*\\\\)\";\n}\nfunction getMatchers() {\n const cachedMatchers = {\n rgb: undefined,\n rgba: undefined,\n hsl: undefined,\n hsla: undefined,\n hex3: undefined,\n hex4: undefined,\n hex5: undefined,\n hex6: undefined,\n hex8: undefined\n };\n if (cachedMatchers.rgb === undefined) {\n cachedMatchers.rgb = new RegExp(\"rgb\" + call(NUMBER, NUMBER, NUMBER));\n cachedMatchers.rgba = new RegExp(\"rgba\" + call(NUMBER, NUMBER, NUMBER, NUMBER));\n cachedMatchers.hsl = new RegExp(\"hsl\" + call(NUMBER, PERCENTAGE, PERCENTAGE));\n cachedMatchers.hsla = new RegExp(\"hsla\" + call(NUMBER, PERCENTAGE, PERCENTAGE, NUMBER));\n cachedMatchers.hex3 = /^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/;\n cachedMatchers.hex4 = /^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/;\n cachedMatchers.hex6 = /^#([0-9a-fA-F]{6})$/;\n cachedMatchers.hex8 = /^#([0-9a-fA-F]{8})$/;\n }\n return cachedMatchers;\n}\nfunction hue2rgb(p, q, t) {\n if (t < 0) {\n t += 1;\n }\n if (t > 1) {\n t -= 1;\n }\n if (t < 1 / 6) {\n return p + (q - p) * 6 * t;\n }\n if (t < 1 / 2) {\n return q;\n }\n if (t < 2 / 3) {\n return p + (q - p) * (2 / 3 - t) * 6;\n }\n return p;\n}\nfunction hslToRgb(h, s, l) {\n const q = l < 0.5 ? l * (1 + s) : l + s - l * s;\n const p = 2 * l - q;\n const r = hue2rgb(p, q, h + 1 / 3);\n const g = hue2rgb(p, q, h);\n const b2 = hue2rgb(p, q, h - 1 / 3);\n return Math.round(r * 255) << 24 | Math.round(g * 255) << 16 | Math.round(b2 * 255) << 8;\n}\nfunction parse255(str) {\n const int = Number.parseInt(str, 10);\n if (int < 0) {\n return 0;\n }\n if (int > 255) {\n return 255;\n }\n return int;\n}\nfunction parse360(str) {\n const int = Number.parseFloat(str);\n return (int % 360 + 360) % 360 / 360;\n}\nfunction parse1(str) {\n const num = Number.parseFloat(str);\n if (num < 0) {\n return 0;\n }\n if (num > 1) {\n return 255;\n }\n return Math.round(num * 255);\n}\nfunction parsePercentage(str) {\n const int = Number.parseFloat(str);\n if (int < 0) {\n return 0;\n }\n if (int > 100) {\n return 1;\n }\n return int / 100;\n}\nvar colorNames = {\n transparent: 0,\n aliceblue: 4042850303,\n antiquewhite: 4209760255,\n aqua: 16777215,\n aquamarine: 2147472639,\n azure: 4043309055,\n beige: 4126530815,\n bisque: 4293182719,\n black: 255,\n blanchedalmond: 4293643775,\n blue: 65535,\n blueviolet: 2318131967,\n brown: 2771004159,\n burlywood: 3736635391,\n burntsienna: 3934150143,\n cadetblue: 1604231423,\n chartreuse: 2147418367,\n chocolate: 3530104575,\n coral: 4286533887,\n cornflowerblue: 1687547391,\n cornsilk: 4294499583,\n crimson: 3692313855,\n cyan: 16777215,\n darkblue: 35839,\n darkcyan: 9145343,\n darkgoldenrod: 3095792639,\n darkgray: 2846468607,\n darkgreen: 6553855,\n darkgrey: 2846468607,\n darkkhaki: 3182914559,\n darkmagenta: 2332068863,\n darkolivegreen: 1433087999,\n darkorange: 4287365375,\n darkorchid: 2570243327,\n darkred: 2332033279,\n darksalmon: 3918953215,\n darkseagreen: 2411499519,\n darkslateblue: 1211993087,\n darkslategray: 793726975,\n darkslategrey: 793726975,\n darkturquoise: 13554175,\n darkviolet: 2483082239,\n deeppink: 4279538687,\n deepskyblue: 12582911,\n dimgray: 1768516095,\n dimgrey: 1768516095,\n dodgerblue: 512819199,\n firebrick: 2988581631,\n floralwhite: 4294635775,\n forestgreen: 579543807,\n fuchsia: 4278255615,\n gainsboro: 3705462015,\n ghostwhite: 4177068031,\n gold: 4292280575,\n goldenrod: 3668254975,\n gray: 2155905279,\n green: 8388863,\n greenyellow: 2919182335,\n grey: 2155905279,\n honeydew: 4043305215,\n hotpink: 4285117695,\n indianred: 3445382399,\n indigo: 1258324735,\n ivory: 4294963455,\n khaki: 4041641215,\n lavender: 3873897215,\n lavenderblush: 4293981695,\n lawngreen: 2096890111,\n lemonchiffon: 4294626815,\n lightblue: 2916673279,\n lightcoral: 4034953471,\n lightcyan: 3774873599,\n lightgoldenrodyellow: 4210742015,\n lightgray: 3553874943,\n lightgreen: 2431553791,\n lightgrey: 3553874943,\n lightpink: 4290167295,\n lightsalmon: 4288707327,\n lightseagreen: 548580095,\n lightskyblue: 2278488831,\n lightslategray: 2005441023,\n lightslategrey: 2005441023,\n lightsteelblue: 2965692159,\n lightyellow: 4294959359,\n lime: 16711935,\n limegreen: 852308735,\n linen: 4210091775,\n magenta: 4278255615,\n maroon: 2147483903,\n mediumaquamarine: 1724754687,\n mediumblue: 52735,\n mediumorchid: 3126187007,\n mediumpurple: 2473647103,\n mediumseagreen: 1018393087,\n mediumslateblue: 2070474495,\n mediumspringgreen: 16423679,\n mediumturquoise: 1221709055,\n mediumvioletred: 3340076543,\n midnightblue: 421097727,\n mintcream: 4127193855,\n mistyrose: 4293190143,\n moccasin: 4293178879,\n navajowhite: 4292783615,\n navy: 33023,\n oldlace: 4260751103,\n olive: 2155872511,\n olivedrab: 1804477439,\n orange: 4289003775,\n orangered: 4282712319,\n orchid: 3664828159,\n palegoldenrod: 4008225535,\n palegreen: 2566625535,\n paleturquoise: 2951671551,\n palevioletred: 3681588223,\n papayawhip: 4293907967,\n peachpuff: 4292524543,\n peru: 3448061951,\n pink: 4290825215,\n plum: 3718307327,\n powderblue: 2967529215,\n purple: 2147516671,\n rebeccapurple: 1714657791,\n red: 4278190335,\n rosybrown: 3163525119,\n royalblue: 1097458175,\n saddlebrown: 2336560127,\n salmon: 4202722047,\n sandybrown: 4104413439,\n seagreen: 780883967,\n seashell: 4294307583,\n sienna: 2689740287,\n silver: 3233857791,\n skyblue: 2278484991,\n slateblue: 1784335871,\n slategray: 1887473919,\n slategrey: 1887473919,\n snow: 4294638335,\n springgreen: 16744447,\n steelblue: 1182971135,\n tan: 3535047935,\n teal: 8421631,\n thistle: 3636451583,\n tomato: 4284696575,\n turquoise: 1088475391,\n violet: 4001558271,\n wheat: 4125012991,\n white: 4294967295,\n whitesmoke: 4126537215,\n yellow: 4294902015,\n yellowgreen: 2597139199\n};\nfunction normalizeColor(color) {\n const matchers = getMatchers();\n let match;\n if (matchers.hex6) {\n if (match = matchers.hex6.exec(color)) {\n return Number.parseInt(match[1] + \"ff\", 16) >>> 0;\n }\n }\n if (colorNames[color] !== undefined) {\n return colorNames[color];\n }\n if (matchers.rgb) {\n if (match = matchers.rgb.exec(color)) {\n return (parse255(match[1]) << 24 | parse255(match[2]) << 16 | parse255(match[3]) << 8 | 255) >>> 0;\n }\n }\n if (matchers.rgba) {\n if (match = matchers.rgba.exec(color)) {\n return (parse255(match[1]) << 24 | parse255(match[2]) << 16 | parse255(match[3]) << 8 | parse1(match[4])) >>> 0;\n }\n }\n if (matchers.hex3) {\n if (match = matchers.hex3.exec(color)) {\n return Number.parseInt(match[1] + match[1] + match[2] + match[2] + match[3] + match[3] + \"ff\", 16) >>> 0;\n }\n }\n if (matchers.hex8) {\n if (match = matchers.hex8.exec(color)) {\n return Number.parseInt(match[1], 16) >>> 0;\n }\n }\n if (matchers.hex4) {\n if (match = matchers.hex4.exec(color)) {\n return Number.parseInt(match[1] + match[1] + match[2] + match[2] + match[3] + match[3] + match[4] + match[4], 16) >>> 0;\n }\n }\n if (matchers.hsl) {\n if (match = matchers.hsl.exec(color)) {\n return (hslToRgb(parse360(match[1]), parsePercentage(match[2]), parsePercentage(match[3])) | 255) >>> 0;\n }\n }\n if (matchers.hsla) {\n if (match = matchers.hsla.exec(color)) {\n return (hslToRgb(parse360(match[1]), parsePercentage(match[2]), parsePercentage(match[3])) | parse1(match[4])) >>> 0;\n }\n }\n throw new Error(`invalid color string ${color} provided`);\n}\nvar opacity = (c2) => {\n return (c2 >> 24 & 255) / 255;\n};\nvar red = (c2) => {\n return c2 >> 16 & 255;\n};\nvar green = (c2) => {\n return c2 >> 8 & 255;\n};\nvar blue = (c2) => {\n return c2 & 255;\n};\nvar rgbaColor = (r, g, b2, alpha) => {\n return `rgba(${r}, ${g}, ${b2}, ${alpha})`;\n};\nfunction processColor(color) {\n const normalizedColor = normalizeColor(color);\n return (normalizedColor << 24 | normalizedColor >>> 8) >>> 0;\n}\nvar interpolateColorsRGB = (value, inputRange, colors) => {\n const [r, g, b2, a2] = [red, green, blue, opacity].map((f) => {\n const unrounded = interpolate(value, inputRange, colors.map((c2) => f(c2)), {\n extrapolateLeft: \"clamp\",\n extrapolateRight: \"clamp\"\n });\n if (f === opacity) {\n return Number(unrounded.toFixed(3));\n }\n return Math.round(unrounded);\n });\n return rgbaColor(r, g, b2, a2);\n};\nvar interpolateColors = (input, inputRange, outputRange) => {\n if (typeof input === \"undefined\") {\n throw new TypeError(\"input can not be undefined\");\n }\n if (typeof inputRange === \"undefined\") {\n throw new TypeError(\"inputRange can not be undefined\");\n }\n if (typeof outputRange === \"undefined\") {\n throw new TypeError(\"outputRange can not be undefined\");\n }\n if (inputRange.length !== outputRange.length) {\n throw new TypeError(\"inputRange (\" + inputRange.length + \" values provided) and outputRange (\" + outputRange.length + \" values provided) must have the same length\");\n }\n const processedOutputRange = outputRange.map((c2) => processColor(c2));\n return interpolateColorsRGB(input, inputRange, processedOutputRange);\n};\n// src/validate-frame.ts\nvar validateFrame = ({\n allowFloats,\n durationInFrames,\n frame\n}) => {\n if (typeof frame === \"undefined\") {\n throw new TypeError(`Argument missing for parameter \"frame\"`);\n }\n if (typeof frame !== \"number\") {\n throw new TypeError(`Argument passed for \"frame\" is not a number: ${frame}`);\n }\n if (!Number.isFinite(frame)) {\n throw new RangeError(`Frame ${frame} is not finite`);\n }\n if (frame % 1 !== 0 && !allowFloats) {\n throw new RangeError(`Argument for frame must be an integer, but got ${frame}`);\n }\n if (frame < 0 && frame < -durationInFrames) {\n throw new RangeError(`Cannot use frame ${frame}: Duration of composition is ${durationInFrames}, therefore the lowest frame that can be rendered is ${-durationInFrames}`);\n }\n if (frame > durationInFrames - 1) {\n throw new RangeError(`Cannot use frame ${frame}: Duration of composition is ${durationInFrames}, therefore the highest frame that can be rendered is ${durationInFrames - 1}`);\n }\n};\n// src/series/index.tsx\nimport { Children, forwardRef as forwardRef11, useMemo as useMemo34 } from \"react\";\n\n// src/series/flatten-children.tsx\nimport React34 from \"react\";\nvar flattenChildren = (children) => {\n const childrenArray = React34.Children.toArray(children);\n return childrenArray.reduce((flatChildren, child) => {\n if (child.type === React34.Fragment) {\n return flatChildren.concat(flattenChildren(child.props.children));\n }\n flatChildren.push(child);\n return flatChildren;\n }, []);\n};\n\n// src/series/is-inside-series.tsx\nimport React35, { createContext as createContext22 } from \"react\";\nimport { jsx as jsx32 } from \"react/jsx-runtime\";\nvar IsInsideSeriesContext = createContext22(false);\nvar IsInsideSeriesContainer = ({ children }) => {\n return /* @__PURE__ */ jsx32(IsInsideSeriesContext.Provider, {\n value: true,\n children\n });\n};\nvar IsNotInsideSeriesProvider = ({ children }) => {\n return /* @__PURE__ */ jsx32(IsInsideSeriesContext.Provider, {\n value: false,\n children\n });\n};\nvar useRequireToBeInsideSeries = () => {\n const isInsideSeries = React35.useContext(IsInsideSeriesContext);\n if (!isInsideSeries) {\n throw new Error(\"This component must be inside a <Series /> component.\");\n }\n};\n\n// src/series/index.tsx\nimport { jsx as jsx33 } from \"react/jsx-runtime\";\nvar SeriesSequenceRefForwardingFunction = ({ children }, _ref) => {\n useRequireToBeInsideSeries();\n return /* @__PURE__ */ jsx33(IsNotInsideSeriesProvider, {\n children\n });\n};\nvar SeriesSequence = forwardRef11(SeriesSequenceRefForwardingFunction);\nvar Series = (props2) => {\n const childrenValue = useMemo34(() => {\n let startFrame = 0;\n const flattenedChildren = flattenChildren(props2.children);\n return Children.map(flattenedChildren, (child, i) => {\n const castedChild = child;\n if (typeof castedChild === \"string\") {\n if (castedChild.trim() === \"\") {\n return null;\n }\n throw new TypeError(`The <Series /> component only accepts a list of <Series.Sequence /> components as its children, but you passed a string \"${castedChild}\"`);\n }\n if (castedChild.type !== SeriesSequence) {\n throw new TypeError(`The <Series /> component only accepts a list of <Series.Sequence /> components as its children, but got ${castedChild} instead`);\n }\n const debugInfo = `index = ${i}, duration = ${castedChild.props.durationInFrames}`;\n if (!castedChild?.props.children) {\n throw new TypeError(`A <Series.Sequence /> component (${debugInfo}) was detected to not have any children. Delete it to fix this error.`);\n }\n const durationInFramesProp = castedChild.props.durationInFrames;\n const {\n durationInFrames,\n children: _children,\n from,\n name,\n ...passedProps\n } = castedChild.props;\n if (i !== flattenedChildren.length - 1 || durationInFramesProp !== Infinity) {\n validateDurationInFrames(durationInFramesProp, {\n component: `of a <Series.Sequence /> component`,\n allowFloats: true\n });\n }\n const offset = castedChild.props.offset ?? 0;\n if (Number.isNaN(offset)) {\n throw new TypeError(`The \"offset\" property of a <Series.Sequence /> must not be NaN, but got NaN (${debugInfo}).`);\n }\n if (!Number.isFinite(offset)) {\n throw new TypeError(`The \"offset\" property of a <Series.Sequence /> must be finite, but got ${offset} (${debugInfo}).`);\n }\n if (offset % 1 !== 0) {\n throw new TypeError(`The \"offset\" property of a <Series.Sequence /> must be finite, but got ${offset} (${debugInfo}).`);\n }\n const currentStartFrame = startFrame + offset;\n startFrame += durationInFramesProp + offset;\n return /* @__PURE__ */ jsx33(Sequence, {\n name: name || \"<Series.Sequence>\",\n from: currentStartFrame,\n durationInFrames: durationInFramesProp,\n ...passedProps,\n ref: castedChild.ref,\n children: child\n });\n });\n }, [props2.children]);\n if (ENABLE_V5_BREAKING_CHANGES) {\n return /* @__PURE__ */ jsx33(IsInsideSeriesContainer, {\n children: /* @__PURE__ */ jsx33(Sequence, {\n ...props2,\n children: childrenValue\n })\n });\n }\n return /* @__PURE__ */ jsx33(IsInsideSeriesContainer, {\n children: childrenValue\n });\n};\nSeries.Sequence = SeriesSequence;\naddSequenceStackTraces(SeriesSequence);\n// src/validation/validation-spring-duration.ts\nvar validateSpringDuration = (dur) => {\n if (typeof dur === \"undefined\") {\n return;\n }\n if (typeof dur !== \"number\") {\n throw new TypeError(`A \"duration\" of a spring must be a \"number\" but is \"${typeof dur}\"`);\n }\n if (Number.isNaN(dur)) {\n throw new TypeError('A \"duration\" of a spring is NaN, which it must not be');\n }\n if (!Number.isFinite(dur)) {\n throw new TypeError('A \"duration\" of a spring must be finite, but is ' + dur);\n }\n if (dur <= 0) {\n throw new TypeError('A \"duration\" of a spring must be positive, but is ' + dur);\n }\n};\n\n// src/spring/spring-utils.ts\nvar defaultSpringConfig = {\n damping: 10,\n mass: 1,\n stiffness: 100,\n overshootClamping: false\n};\nvar advanceCache = {};\nfunction advance({\n animation,\n now,\n config\n}) {\n const { toValue, lastTimestamp, current, velocity } = animation;\n const deltaTime = Math.min(now - lastTimestamp, 64);\n if (config.damping <= 0) {\n throw new Error(\"Spring damping must be greater than 0, otherwise the spring() animation will never end, causing an infinite loop.\");\n }\n const c2 = config.damping;\n const m = config.mass;\n const k = config.stiffness;\n const cacheKey = [\n toValue,\n lastTimestamp,\n current,\n velocity,\n c2,\n m,\n k,\n now\n ].join(\"-\");\n if (advanceCache[cacheKey]) {\n return advanceCache[cacheKey];\n }\n const v0 = -velocity;\n const x0 = toValue - current;\n const zeta = c2 / (2 * Math.sqrt(k * m));\n const omega0 = Math.sqrt(k / m);\n const omega1 = omega0 * Math.sqrt(1 - zeta ** 2);\n const t = deltaTime / 1000;\n const sin1 = Math.sin(omega1 * t);\n const cos1 = Math.cos(omega1 * t);\n const underDampedEnvelope = Math.exp(-zeta * omega0 * t);\n const underDampedFrag1 = underDampedEnvelope * (sin1 * ((v0 + zeta * omega0 * x0) / omega1) + x0 * cos1);\n const underDampedPosition = toValue - underDampedFrag1;\n const underDampedVelocity = zeta * omega0 * underDampedFrag1 - underDampedEnvelope * (cos1 * (v0 + zeta * omega0 * x0) - omega1 * x0 * sin1);\n const criticallyDampedEnvelope = Math.exp(-omega0 * t);\n const criticallyDampedPosition = toValue - criticallyDampedEnvelope * (x0 + (v0 + omega0 * x0) * t);\n const criticallyDampedVelocity = criticallyDampedEnvelope * (v0 * (t * omega0 - 1) + t * x0 * omega0 * omega0);\n const animationNode = {\n toValue,\n prevPosition: current,\n lastTimestamp: now,\n current: zeta < 1 ? underDampedPosition : criticallyDampedPosition,\n velocity: zeta < 1 ? underDampedVelocity : criticallyDampedVelocity\n };\n advanceCache[cacheKey] = animationNode;\n return animationNode;\n}\nvar calculationCache = {};\nfunction springCalculation({\n frame,\n fps,\n config = {}\n}) {\n const from = 0;\n const to = 1;\n const cacheKey = [\n frame,\n fps,\n config.damping,\n config.mass,\n config.overshootClamping,\n config.stiffness\n ].join(\"-\");\n if (calculationCache[cacheKey]) {\n return calculationCache[cacheKey];\n }\n let animation = {\n lastTimestamp: 0,\n current: from,\n toValue: to,\n velocity: 0,\n prevPosition: 0\n };\n const frameClamped = Math.max(0, frame);\n const unevenRest = frameClamped % 1;\n for (let f = 0;f <= Math.floor(frameClamped); f++) {\n if (f === Math.floor(frameClamped)) {\n f += unevenRest;\n }\n const time = f / fps * 1000;\n animation = advance({\n animation,\n now: time,\n config: {\n ...defaultSpringConfig,\n ...config\n }\n });\n }\n calculationCache[cacheKey] = animation;\n return animation;\n}\n\n// src/spring/measure-spring.ts\nvar cache = new Map;\nfunction measureSpring({\n fps,\n config = {},\n threshold = 0.005\n}) {\n if (typeof threshold !== \"number\") {\n throw new TypeError(`threshold must be a number, got ${threshold} of type ${typeof threshold}`);\n }\n if (threshold === 0) {\n return Infinity;\n }\n if (threshold === 1) {\n return 0;\n }\n if (isNaN(threshold)) {\n throw new TypeError(\"Threshold is NaN\");\n }\n if (!Number.isFinite(threshold)) {\n throw new TypeError(\"Threshold is not finite\");\n }\n if (threshold < 0) {\n throw new TypeError(\"Threshold is below 0\");\n }\n const cacheKey = [\n fps,\n config.damping,\n config.mass,\n config.overshootClamping,\n config.stiffness,\n threshold\n ].join(\"-\");\n if (cache.has(cacheKey)) {\n return cache.get(cacheKey);\n }\n validateFps(fps, \"to the measureSpring() function\", false);\n let frame = 0;\n let finishedFrame = 0;\n const calc = () => {\n return springCalculation({\n fps,\n frame,\n config\n });\n };\n let animation = calc();\n const calcDifference = () => {\n return Math.abs(animation.current - animation.toValue);\n };\n let difference = calcDifference();\n while (difference >= threshold) {\n frame++;\n animation = calc();\n difference = calcDifference();\n }\n finishedFrame = frame;\n for (let i = 0;i < 20; i++) {\n frame++;\n animation = calc();\n difference = calcDifference();\n if (difference >= threshold) {\n i = 0;\n finishedFrame = frame + 1;\n }\n }\n cache.set(cacheKey, finishedFrame);\n return finishedFrame;\n}\n\n// src/spring/index.ts\nfunction spring({\n frame: passedFrame,\n fps,\n config = {},\n from = 0,\n to = 1,\n durationInFrames: passedDurationInFrames,\n durationRestThreshold,\n delay = 0,\n reverse = false\n}) {\n validateSpringDuration(passedDurationInFrames);\n validateFrame({\n frame: passedFrame,\n durationInFrames: Infinity,\n allowFloats: true\n });\n validateFps(fps, \"to spring()\", false);\n const needsToCalculateNaturalDuration = reverse || typeof passedDurationInFrames !== \"undefined\";\n const naturalDuration = needsToCalculateNaturalDuration ? measureSpring({\n fps,\n config,\n threshold: durationRestThreshold\n }) : undefined;\n const naturalDurationGetter = needsToCalculateNaturalDuration ? {\n get: () => naturalDuration\n } : {\n get: () => {\n throw new Error(\"did not calculate natural duration, this is an error with Remotion. Please report\");\n }\n };\n const reverseProcessed = reverse ? (passedDurationInFrames ?? naturalDurationGetter.get()) - passedFrame : passedFrame;\n const delayProcessed = reverseProcessed + (reverse ? delay : -delay);\n const durationProcessed = passedDurationInFrames === undefined ? delayProcessed : delayProcessed / (passedDurationInFrames / naturalDurationGetter.get());\n if (passedDurationInFrames && delayProcessed > passedDurationInFrames) {\n return to;\n }\n const spr = springCalculation({\n fps,\n frame: durationProcessed,\n config\n });\n const inner = config.overshootClamping ? to >= from ? Math.min(spr.current, to) : Math.max(spr.current, to) : spr.current;\n const interpolated = from === 0 && to === 1 ? inner : interpolate(inner, [0, 1], [from, to]);\n return interpolated;\n}\n// src/static-file.ts\nvar problematicCharacters = {\n \"%3A\": \":\",\n \"%2F\": \"/\",\n \"%3F\": \"?\",\n \"%23\": \"#\",\n \"%5B\": \"[\",\n \"%5D\": \"]\",\n \"%40\": \"@\",\n \"%21\": \"!\",\n \"%24\": \"$\",\n \"%26\": \"&\",\n \"%27\": \"'\",\n \"%28\": \"(\",\n \"%29\": \")\",\n \"%2A\": \"*\",\n \"%2B\": \"+\",\n \"%2C\": \",\",\n \"%3B\": \";\"\n};\nvar didWarn2 = {};\nvar warnOnce3 = (message) => {\n if (didWarn2[message]) {\n return;\n }\n console.warn(message);\n didWarn2[message] = true;\n};\nvar includesHexOfUnsafeChar = (path) => {\n for (const key of Object.keys(problematicCharacters)) {\n if (path.includes(key)) {\n return { containsHex: true, hexCode: key };\n }\n }\n return { containsHex: false };\n};\nvar trimLeadingSlash = (path) => {\n if (path.startsWith(\"/\")) {\n return trimLeadingSlash(path.substring(1));\n }\n return path;\n};\nvar inner = (path) => {\n if (typeof window !== \"undefined\" && window.remotion_staticBase) {\n if (path.startsWith(window.remotion_staticBase)) {\n throw new Error(`The value \"${path}\" is already prefixed with the static base ${window.remotion_staticBase}. You don't need to call staticFile() on it.`);\n }\n return `${window.remotion_staticBase}/${trimLeadingSlash(path)}`;\n }\n return `/${trimLeadingSlash(path)}`;\n};\nvar encodeBySplitting = (path) => {\n const splitBySlash = path.split(\"/\");\n const encodedArray = splitBySlash.map((element) => {\n return encodeURIComponent(element);\n });\n const merged = encodedArray.join(\"/\");\n return merged;\n};\nvar staticFile = (path) => {\n if (path === null) {\n throw new TypeError(\"null was passed to staticFile()\");\n }\n if (typeof path === \"undefined\") {\n throw new TypeError(\"undefined was passed to staticFile()\");\n }\n if (path.startsWith(\"http://\") || path.startsWith(\"https://\")) {\n throw new TypeError(`staticFile() does not support remote URLs - got \"${path}\". Instead, pass the URL without wrapping it in staticFile(). See: https://remotion.dev/docs/staticfile-remote-urls`);\n }\n if (path.startsWith(\"..\") || path.startsWith(\"./\")) {\n throw new TypeError(`staticFile() does not support relative paths - got \"${path}\". Instead, pass the name of a file that is inside the public/ folder. See: https://remotion.dev/docs/staticfile-relative-paths`);\n }\n if (path.startsWith(\"/Users\") || path.startsWith(\"/home\") || path.startsWith(\"/tmp\") || path.startsWith(\"/etc\") || path.startsWith(\"/opt\") || path.startsWith(\"/var\") || path.startsWith(\"C:\") || path.startsWith(\"D:\") || path.startsWith(\"E:\")) {\n throw new TypeError(`staticFile() does not support absolute paths - got \"${path}\". Instead, pass the name of a file that is inside the public/ folder. See: https://remotion.dev/docs/staticfile-relative-paths`);\n }\n if (path.startsWith(\"public/\")) {\n throw new TypeError(`Do not include the public/ prefix when using staticFile() - got \"${path}\". See: https://remotion.dev/docs/staticfile-relative-paths`);\n }\n const includesHex = includesHexOfUnsafeChar(path);\n if (includesHex.containsHex) {\n warnOnce3(`WARNING: You seem to pass an already encoded path (path contains ${includesHex.hexCode}). Since Remotion 4.0, the encoding is done by staticFile() itself. You may want to remove a encodeURIComponent() wrapping.`);\n }\n const preprocessed = encodeBySplitting(path);\n const preparsed = inner(preprocessed);\n if (!preparsed.startsWith(\"/\")) {\n return `/${preparsed}`;\n }\n return preparsed;\n};\n// src/Still.tsx\nimport React37 from \"react\";\nvar Still = (props2) => {\n const newProps = {\n ...props2,\n durationInFrames: 1,\n fps: 1\n };\n return React37.createElement(Composition, newProps);\n};\n// src/video/Video.tsx\nimport { forwardRef as forwardRef13, useCallback as useCallback17, useContext as useContext36 } from \"react\";\n\n// src/video/VideoForRendering.tsx\nimport {\n forwardRef as forwardRef12,\n useContext as useContext35,\n useEffect as useEffect17,\n useImperativeHandle as useImperativeHandle10,\n useLayoutEffect as useLayoutEffect11,\n useMemo as useMemo35,\n useRef as useRef20\n} from \"react\";\n\n// src/video/seek-until-right.ts\nvar roundTo6Commas = (num) => {\n return Math.round(num * 1e5) / 1e5;\n};\nvar seekToTime = ({\n element,\n desiredTime,\n logLevel,\n mountTime\n}) => {\n if (isApproximatelyTheSame(element.currentTime, desiredTime)) {\n return {\n wait: Promise.resolve(desiredTime),\n cancel: () => {}\n };\n }\n seek({\n logLevel,\n mediaRef: element,\n time: desiredTime,\n why: \"Seeking during rendering\",\n mountTime\n });\n let cancel;\n let cancelSeeked = null;\n const prom = new Promise((resolve) => {\n cancel = element.requestVideoFrameCallback((now, metadata) => {\n const displayIn = metadata.expectedDisplayTime - now;\n if (displayIn <= 0) {\n resolve(metadata.mediaTime);\n return;\n }\n setTimeout(() => {\n resolve(metadata.mediaTime);\n }, displayIn + 150);\n });\n });\n const waitForSeekedEvent = new Promise((resolve) => {\n const onDone = () => {\n resolve();\n };\n element.addEventListener(\"seeked\", onDone, {\n once: true\n });\n cancelSeeked = () => {\n element.removeEventListener(\"seeked\", onDone);\n };\n });\n return {\n wait: Promise.all([prom, waitForSeekedEvent]).then(([time]) => time),\n cancel: () => {\n cancelSeeked?.();\n element.cancelVideoFrameCallback(cancel);\n }\n };\n};\nvar seekToTimeMultipleUntilRight = ({\n element,\n desiredTime,\n fps,\n logLevel,\n mountTime\n}) => {\n const threshold = 1 / fps / 2;\n let currentCancel = () => {\n return;\n };\n if (Number.isFinite(element.duration) && element.currentTime >= element.duration && desiredTime >= element.duration) {\n return {\n prom: Promise.resolve(),\n cancel: () => {}\n };\n }\n const prom = new Promise((resolve, reject) => {\n const firstSeek = seekToTime({\n element,\n desiredTime: desiredTime + threshold,\n logLevel,\n mountTime\n });\n firstSeek.wait.then((seekedTo) => {\n const difference = Math.abs(desiredTime - seekedTo);\n if (difference <= threshold) {\n return resolve();\n }\n const sign = desiredTime > seekedTo ? 1 : -1;\n const newSeek = seekToTime({\n element,\n desiredTime: seekedTo + threshold * sign,\n logLevel,\n mountTime\n });\n currentCancel = newSeek.cancel;\n newSeek.wait.then((newTime) => {\n const newDifference = Math.abs(desiredTime - newTime);\n if (roundTo6Commas(newDifference) <= roundTo6Commas(threshold)) {\n return resolve();\n }\n const thirdSeek = seekToTime({\n element,\n desiredTime: desiredTime + threshold,\n logLevel,\n mountTime\n });\n currentCancel = thirdSeek.cancel;\n return thirdSeek.wait.then(() => {\n resolve();\n }).catch((err) => {\n reject(err);\n });\n }).catch((err) => {\n reject(err);\n });\n });\n currentCancel = firstSeek.cancel;\n });\n return {\n prom,\n cancel: () => {\n currentCancel();\n }\n };\n};\n\n// src/video/VideoForRendering.tsx\nimport { jsx as jsx34 } from \"react/jsx-runtime\";\nvar VideoForRenderingForwardFunction = ({\n onError,\n volume: volumeProp,\n allowAmplificationDuringRender,\n playbackRate,\n onDuration,\n toneFrequency,\n name,\n acceptableTimeShiftInSeconds,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n loopVolumeCurveBehavior,\n audioStreamIndex,\n onVideoFrame,\n ...props2\n}, ref) => {\n const absoluteFrame = useTimelinePosition();\n const frame = useCurrentFrame();\n const volumePropsFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? \"repeat\");\n const videoConfig = useUnsafeVideoConfig();\n const videoRef = useRef20(null);\n const sequenceContext = useContext35(SequenceContext);\n const mediaStartsAt = useMediaStartsAt();\n const environment = useRemotionEnvironment();\n const logLevel = useLogLevel();\n const mountTime = useMountTime();\n const { delayRender: delayRender2, continueRender: continueRender2 } = useDelayRender();\n const { registerRenderAsset, unregisterRenderAsset } = useContext35(RenderAssetManager);\n const id = useMemo35(() => `video-${random(props2.src ?? \"\")}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [\n props2.src,\n sequenceContext?.cumulatedFrom,\n sequenceContext?.relativeFrom,\n sequenceContext?.durationInFrames\n ]);\n if (!videoConfig) {\n throw new Error(\"No video config found\");\n }\n const volume = evaluateVolume({\n volume: volumeProp,\n frame: volumePropsFrame,\n mediaVolume: 1\n });\n warnAboutTooHighVolume(volume);\n useEffect17(() => {\n if (!props2.src) {\n throw new Error(\"No src passed\");\n }\n if (props2.muted) {\n return;\n }\n if (volume <= 0) {\n return;\n }\n if (!window.remotion_audioEnabled) {\n return;\n }\n registerRenderAsset({\n type: \"video\",\n src: getAbsoluteSrc(props2.src),\n id,\n frame: absoluteFrame,\n volume,\n mediaFrame: frame,\n playbackRate: playbackRate ?? 1,\n toneFrequency: toneFrequency ?? 1,\n audioStartFrame: Math.max(0, -(sequenceContext?.relativeFrom ?? 0)),\n audioStreamIndex: audioStreamIndex ?? 0\n });\n return () => unregisterRenderAsset(id);\n }, [\n props2.muted,\n props2.src,\n registerRenderAsset,\n id,\n unregisterRenderAsset,\n volume,\n frame,\n absoluteFrame,\n playbackRate,\n toneFrequency,\n sequenceContext?.relativeFrom,\n audioStreamIndex\n ]);\n useImperativeHandle10(ref, () => {\n return videoRef.current;\n }, []);\n useEffect17(() => {\n if (!window.remotion_videoEnabled) {\n return;\n }\n const { current } = videoRef;\n if (!current) {\n return;\n }\n const currentTime = getMediaTime({\n frame,\n playbackRate: playbackRate || 1,\n startFrom: -mediaStartsAt,\n fps: videoConfig.fps\n });\n const handle = delayRender2(`Rendering <Html5Video /> with src=\"${props2.src}\" at time ${currentTime}`, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n if (window.process?.env?.NODE_ENV === \"test\") {\n continueRender2(handle);\n return;\n }\n if (isApproximatelyTheSame(current.currentTime, currentTime)) {\n if (current.readyState >= 2) {\n continueRender2(handle);\n return;\n }\n const loadedDataHandler = () => {\n continueRender2(handle);\n };\n current.addEventListener(\"loadeddata\", loadedDataHandler, { once: true });\n return () => {\n current.removeEventListener(\"loadeddata\", loadedDataHandler);\n };\n }\n const endedHandler = () => {\n continueRender2(handle);\n };\n const seek2 = seekToTimeMultipleUntilRight({\n element: current,\n desiredTime: currentTime,\n fps: videoConfig.fps,\n logLevel,\n mountTime\n });\n seek2.prom.then(() => {\n continueRender2(handle);\n });\n current.addEventListener(\"ended\", endedHandler, { once: true });\n const errorHandler = () => {\n if (current?.error) {\n console.error(\"Error occurred in video\", current?.error);\n if (onError) {\n return;\n }\n throw new Error(`The browser threw an error while playing the video ${props2.src}: Code ${current.error.code} - ${current?.error?.message}. See https://remotion.dev/docs/media-playback-error for help. Pass an onError() prop to handle the error.`);\n } else {\n throw new Error(\"The browser threw an error\");\n }\n };\n current.addEventListener(\"error\", errorHandler, { once: true });\n return () => {\n seek2.cancel();\n current.removeEventListener(\"ended\", endedHandler);\n current.removeEventListener(\"error\", errorHandler);\n continueRender2(handle);\n };\n }, [\n volumePropsFrame,\n props2.src,\n playbackRate,\n videoConfig.fps,\n frame,\n mediaStartsAt,\n onError,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n logLevel,\n mountTime,\n continueRender2,\n delayRender2\n ]);\n const { src } = props2;\n if (environment.isRendering) {\n useLayoutEffect11(() => {\n if (window.process?.env?.NODE_ENV === \"test\") {\n return;\n }\n const newHandle = delayRender2(\"Loading <Html5Video> duration with src=\" + src, {\n retries: delayRenderRetries ?? undefined,\n timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined\n });\n const { current } = videoRef;\n const didLoad = () => {\n if (current?.duration) {\n onDuration(src, current.duration);\n }\n continueRender2(newHandle);\n };\n if (current?.duration) {\n onDuration(src, current.duration);\n continueRender2(newHandle);\n } else {\n current?.addEventListener(\"loadedmetadata\", didLoad, { once: true });\n }\n return () => {\n current?.removeEventListener(\"loadedmetadata\", didLoad);\n continueRender2(newHandle);\n };\n }, [\n src,\n onDuration,\n delayRenderRetries,\n delayRenderTimeoutInMilliseconds,\n continueRender2,\n delayRender2\n ]);\n }\n return /* @__PURE__ */ jsx34(\"video\", {\n ref: videoRef,\n disableRemotePlayback: true,\n ...props2\n });\n};\nvar VideoForRendering = forwardRef12(VideoForRenderingForwardFunction);\n\n// src/video/Video.tsx\nimport { jsx as jsx35 } from \"react/jsx-runtime\";\nvar VideoForwardingFunction = (props2, ref) => {\n const {\n startFrom,\n endAt,\n trimBefore,\n trimAfter,\n name,\n pauseWhenBuffering,\n stack,\n _remotionInternalNativeLoopPassed,\n showInTimeline,\n onAutoPlayError,\n ...otherProps\n } = props2;\n const { loop, ...propsOtherThanLoop } = props2;\n const { fps } = useVideoConfig();\n const environment = useRemotionEnvironment();\n if (environment.isClientSideRendering) {\n throw new Error(\"<Html5Video> is not supported in @remotion/web-renderer. Use <Video> from @remotion/media instead. See https://remotion.dev/docs/client-side-rendering/limitations\");\n }\n const { durations, setDurations } = useContext36(DurationsContext);\n if (typeof ref === \"string\") {\n throw new Error(\"string refs are not supported\");\n }\n if (typeof props2.src !== \"string\") {\n throw new TypeError(`The \\`<Html5Video>\\` tag requires a string for \\`src\\`, but got ${JSON.stringify(props2.src)} instead.`);\n }\n const preloadedSrc = usePreload(props2.src);\n const onDuration = useCallback17((src, durationInSeconds) => {\n setDurations({ type: \"got-duration\", durationInSeconds, src });\n }, [setDurations]);\n const onVideoFrame = useCallback17(() => {}, []);\n const durationFetched = durations[getAbsoluteSrc(preloadedSrc)] ?? durations[getAbsoluteSrc(props2.src)];\n validateMediaTrimProps({ startFrom, endAt, trimBefore, trimAfter });\n const { trimBeforeValue, trimAfterValue } = resolveTrimProps({\n startFrom,\n endAt,\n trimBefore,\n trimAfter\n });\n if (loop && durationFetched !== undefined) {\n if (!Number.isFinite(durationFetched)) {\n return /* @__PURE__ */ jsx35(Html5Video, {\n ...propsOtherThanLoop,\n ref,\n stack,\n _remotionInternalNativeLoopPassed: true\n });\n }\n const mediaDuration = durationFetched * fps;\n return /* @__PURE__ */ jsx35(Loop, {\n durationInFrames: calculateMediaDuration({\n trimAfter: trimAfterValue,\n mediaDurationInFrames: mediaDuration,\n playbackRate: props2.playbackRate ?? 1,\n trimBefore: trimBeforeValue\n }),\n layout: \"none\",\n name,\n children: /* @__PURE__ */ jsx35(Html5Video, {\n ...propsOtherThanLoop,\n ref,\n stack,\n _remotionInternalNativeLoopPassed: true\n })\n });\n }\n if (typeof trimBeforeValue !== \"undefined\" || typeof trimAfterValue !== \"undefined\") {\n return /* @__PURE__ */ jsx35(Sequence, {\n layout: \"none\",\n from: 0 - (trimBeforeValue ?? 0),\n showInTimeline: false,\n durationInFrames: trimAfterValue === undefined ? undefined : trimAfterValue / (props2.playbackRate ?? 1),\n name,\n children: /* @__PURE__ */ jsx35(Html5Video, {\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n ...otherProps,\n ref,\n stack\n })\n });\n }\n validateMediaProps({ playbackRate: props2.playbackRate, volume: props2.volume }, \"Html5Video\");\n if (environment.isRendering) {\n return /* @__PURE__ */ jsx35(VideoForRendering, {\n onDuration,\n onVideoFrame: onVideoFrame ?? null,\n ...otherProps,\n ref\n });\n }\n return /* @__PURE__ */ jsx35(VideoForPreview, {\n onlyWarnForMediaSeekingError: false,\n ...otherProps,\n ref,\n onVideoFrame: null,\n pauseWhenBuffering: pauseWhenBuffering ?? false,\n onDuration,\n _remotionInternalStack: stack ?? null,\n _remotionInternalNativeLoopPassed: _remotionInternalNativeLoopPassed ?? false,\n showInTimeline: showInTimeline ?? true,\n onAutoPlayError: onAutoPlayError ?? undefined\n });\n};\nvar Html5Video = forwardRef13(VideoForwardingFunction);\naddSequenceStackTraces(Html5Video);\nvar Video = Html5Video;\n// src/index.ts\ncheckMultipleRemotionVersions();\nvar Experimental = {\n Clipper,\n Null,\n useIsPlayer\n};\nvar proxyObj = {};\nvar Config = new Proxy(proxyObj, {\n get(_, prop) {\n if (prop === \"Bundling\" || prop === \"Rendering\" || prop === \"Log\" || prop === \"Puppeteer\" || prop === \"Output\") {\n return Config;\n }\n return () => {\n console.warn(\"⚠️ The CLI configuration has been extracted from Remotion Core.\");\n console.warn(\"Update the import from the config file:\");\n console.warn();\n console.warn(\"- Delete:\");\n console.warn('import {Config} from \"remotion\";');\n console.warn(\"+ Replace:\");\n console.warn('import {Config} from \"@remotion/cli/config\";');\n console.warn();\n console.warn(\"For more information, see https://www.remotion.dev/docs/4-0-migration.\");\n process.exit(1);\n };\n }\n});\nSequence.displayName = \"Sequence\";\naddSequenceStackTraces(Sequence);\nexport {\n watchStaticFile,\n useVideoConfig,\n useRemotionEnvironment,\n useDelayRender,\n useCurrentScale,\n useCurrentFrame,\n useBufferState,\n staticFile,\n spring,\n registerRoot,\n random,\n prefetch,\n measureSpring,\n interpolateColors,\n interpolate,\n getStaticFiles,\n getRemotionEnvironment,\n getInputProps,\n delayRender,\n continueRender,\n cancelRender,\n Video,\n VERSION,\n Still,\n Series,\n Sequence,\n OffthreadVideo,\n Loop,\n Internals,\n Img,\n IFrame,\n Html5Video,\n Html5Audio,\n Freeze,\n FolderContext,\n Folder,\n Experimental,\n Easing,\n Config,\n Composition,\n Audio,\n Artifact,\n AnimatedImage,\n AbsoluteFill\n};\n","// src/interpolate.ts\nfunction interpolateFunction(input, inputRange, outputRange, options) {\n const { extrapolateLeft, extrapolateRight, easing } = options;\n let result = input;\n const [inputMin, inputMax] = inputRange;\n const [outputMin, outputMax] = outputRange;\n if (result < inputMin) {\n if (extrapolateLeft === \"identity\") {\n return result;\n }\n if (extrapolateLeft === \"clamp\") {\n result = inputMin;\n } else if (extrapolateLeft === \"wrap\") {\n const range = inputMax - inputMin;\n result = ((result - inputMin) % range + range) % range + inputMin;\n } else if (extrapolateLeft === \"extend\") {}\n }\n if (result > inputMax) {\n if (extrapolateRight === \"identity\") {\n return result;\n }\n if (extrapolateRight === \"clamp\") {\n result = inputMax;\n } else if (extrapolateRight === \"wrap\") {\n const range = inputMax - inputMin;\n result = ((result - inputMin) % range + range) % range + inputMin;\n } else if (extrapolateRight === \"extend\") {}\n }\n if (outputMin === outputMax) {\n return outputMin;\n }\n result = (result - inputMin) / (inputMax - inputMin);\n result = easing(result);\n result = result * (outputMax - outputMin) + outputMin;\n return result;\n}\nfunction findRange(input, inputRange) {\n let i;\n for (i = 1;i < inputRange.length - 1; ++i) {\n if (inputRange[i] >= input) {\n break;\n }\n }\n return i - 1;\n}\nfunction checkValidInputRange(arr) {\n for (let i = 1;i < arr.length; ++i) {\n if (!(arr[i] > arr[i - 1])) {\n throw new Error(`inputRange must be strictly monotonically increasing but got [${arr.join(\",\")}]`);\n }\n }\n}\nfunction checkInfiniteRange(name, arr) {\n if (arr.length < 2) {\n throw new Error(name + \" must have at least 2 elements\");\n }\n for (const element of arr) {\n if (typeof element !== \"number\") {\n throw new Error(`${name} must contain only numbers`);\n }\n if (!Number.isFinite(element)) {\n throw new Error(`${name} must contain only finite numbers, but got [${arr.join(\",\")}]`);\n }\n }\n}\nfunction interpolate(input, inputRange, outputRange, options) {\n if (typeof input === \"undefined\") {\n throw new Error(\"input can not be undefined\");\n }\n if (typeof inputRange === \"undefined\") {\n throw new Error(\"inputRange can not be undefined\");\n }\n if (typeof outputRange === \"undefined\") {\n throw new Error(\"outputRange can not be undefined\");\n }\n if (inputRange.length !== outputRange.length) {\n throw new Error(\"inputRange (\" + inputRange.length + \") and outputRange (\" + outputRange.length + \") must have the same length\");\n }\n checkInfiniteRange(\"inputRange\", inputRange);\n checkInfiniteRange(\"outputRange\", outputRange);\n checkValidInputRange(inputRange);\n const easing = options?.easing ?? ((num) => num);\n let extrapolateLeft = \"extend\";\n if (options?.extrapolateLeft !== undefined) {\n extrapolateLeft = options.extrapolateLeft;\n }\n let extrapolateRight = \"extend\";\n if (options?.extrapolateRight !== undefined) {\n extrapolateRight = options.extrapolateRight;\n }\n if (typeof input !== \"number\") {\n throw new TypeError(\"Cannot interpolate an input which is not a number\");\n }\n const range = findRange(input, inputRange);\n return interpolateFunction(input, [inputRange[range], inputRange[range + 1]], [outputRange[range], outputRange[range + 1]], {\n easing,\n extrapolateLeft,\n extrapolateRight\n });\n}\n// src/random.ts\nfunction mulberry32(a) {\n let t = a + 1831565813;\n t = Math.imul(t ^ t >>> 15, t | 1);\n t ^= t + Math.imul(t ^ t >>> 7, t | 61);\n return ((t ^ t >>> 14) >>> 0) / 4294967296;\n}\nfunction hashCode(str) {\n let i = 0;\n let chr = 0;\n let hash = 0;\n for (i = 0;i < str.length; i++) {\n chr = str.charCodeAt(i);\n hash = (hash << 5) - hash + chr;\n hash |= 0;\n }\n return hash;\n}\nvar random = (seed, dummy) => {\n if (dummy !== undefined) {\n throw new TypeError(\"random() takes only one argument\");\n }\n if (seed === null) {\n return Math.random();\n }\n if (typeof seed === \"string\") {\n return mulberry32(hashCode(seed));\n }\n if (typeof seed === \"number\") {\n return mulberry32(seed * 10000000000);\n }\n throw new Error(\"random() argument must be a number or a string\");\n};\n// src/truthy.ts\nfunction truthy(value) {\n return Boolean(value);\n}\n\n// src/delay-render.ts\nif (typeof window !== \"undefined\") {\n window.remotion_renderReady = false;\n if (!window.remotion_delayRenderTimeouts) {\n window.remotion_delayRenderTimeouts = {};\n }\n window.remotion_delayRenderHandles = [];\n}\nvar DELAY_RENDER_CALLSTACK_TOKEN = \"The delayRender was called:\";\nvar DELAY_RENDER_RETRIES_LEFT = \"Retries left: \";\nvar DELAY_RENDER_RETRY_TOKEN = \"- Rendering the frame will be retried.\";\nvar DELAY_RENDER_CLEAR_TOKEN = \"handle was cleared after\";\n\n// src/input-props-serialization.ts\nvar DATE_TOKEN = \"remotion-date:\";\nvar FILE_TOKEN = \"remotion-file:\";\nvar serializeJSONWithSpecialTypes = ({\n data,\n indent,\n staticBase\n}) => {\n let customDateUsed = false;\n let customFileUsed = false;\n let mapUsed = false;\n let setUsed = false;\n try {\n const serializedString = JSON.stringify(data, function(key, value) {\n const item = this[key];\n if (item instanceof Date) {\n customDateUsed = true;\n return `${DATE_TOKEN}${item.toISOString()}`;\n }\n if (item instanceof Map) {\n mapUsed = true;\n return value;\n }\n if (item instanceof Set) {\n setUsed = true;\n return value;\n }\n if (typeof item === \"string\" && staticBase !== null && item.startsWith(staticBase)) {\n customFileUsed = true;\n return `${FILE_TOKEN}${item.replace(staticBase + \"/\", \"\")}`;\n }\n return value;\n }, indent);\n return { serializedString, customDateUsed, customFileUsed, mapUsed, setUsed };\n } catch (err) {\n throw new Error(\"Could not serialize the passed input props to JSON: \" + err.message);\n }\n};\nvar deserializeJSONWithSpecialTypes = (data) => {\n return JSON.parse(data, (_, value) => {\n if (typeof value === \"string\" && value.startsWith(DATE_TOKEN)) {\n return new Date(value.replace(DATE_TOKEN, \"\"));\n }\n if (typeof value === \"string\" && value.startsWith(FILE_TOKEN)) {\n return `${window.remotion_staticBase}/${value.replace(FILE_TOKEN, \"\")}`;\n }\n return value;\n });\n};\n\n// src/interpolate-colors.ts\nvar NUMBER = \"[-+]?\\\\d*\\\\.?\\\\d+\";\nvar PERCENTAGE = NUMBER + \"%\";\nfunction call(...args) {\n return \"\\\\(\\\\s*(\" + args.join(\")\\\\s*,\\\\s*(\") + \")\\\\s*\\\\)\";\n}\nfunction getMatchers() {\n const cachedMatchers = {\n rgb: undefined,\n rgba: undefined,\n hsl: undefined,\n hsla: undefined,\n hex3: undefined,\n hex4: undefined,\n hex5: undefined,\n hex6: undefined,\n hex8: undefined\n };\n if (cachedMatchers.rgb === undefined) {\n cachedMatchers.rgb = new RegExp(\"rgb\" + call(NUMBER, NUMBER, NUMBER));\n cachedMatchers.rgba = new RegExp(\"rgba\" + call(NUMBER, NUMBER, NUMBER, NUMBER));\n cachedMatchers.hsl = new RegExp(\"hsl\" + call(NUMBER, PERCENTAGE, PERCENTAGE));\n cachedMatchers.hsla = new RegExp(\"hsla\" + call(NUMBER, PERCENTAGE, PERCENTAGE, NUMBER));\n cachedMatchers.hex3 = /^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/;\n cachedMatchers.hex4 = /^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/;\n cachedMatchers.hex6 = /^#([0-9a-fA-F]{6})$/;\n cachedMatchers.hex8 = /^#([0-9a-fA-F]{8})$/;\n }\n return cachedMatchers;\n}\nfunction hue2rgb(p, q, t) {\n if (t < 0) {\n t += 1;\n }\n if (t > 1) {\n t -= 1;\n }\n if (t < 1 / 6) {\n return p + (q - p) * 6 * t;\n }\n if (t < 1 / 2) {\n return q;\n }\n if (t < 2 / 3) {\n return p + (q - p) * (2 / 3 - t) * 6;\n }\n return p;\n}\nfunction hslToRgb(h, s, l) {\n const q = l < 0.5 ? l * (1 + s) : l + s - l * s;\n const p = 2 * l - q;\n const r = hue2rgb(p, q, h + 1 / 3);\n const g = hue2rgb(p, q, h);\n const b = hue2rgb(p, q, h - 1 / 3);\n return Math.round(r * 255) << 24 | Math.round(g * 255) << 16 | Math.round(b * 255) << 8;\n}\nfunction parse255(str) {\n const int = Number.parseInt(str, 10);\n if (int < 0) {\n return 0;\n }\n if (int > 255) {\n return 255;\n }\n return int;\n}\nfunction parse360(str) {\n const int = Number.parseFloat(str);\n return (int % 360 + 360) % 360 / 360;\n}\nfunction parse1(str) {\n const num = Number.parseFloat(str);\n if (num < 0) {\n return 0;\n }\n if (num > 1) {\n return 255;\n }\n return Math.round(num * 255);\n}\nfunction parsePercentage(str) {\n const int = Number.parseFloat(str);\n if (int < 0) {\n return 0;\n }\n if (int > 100) {\n return 1;\n }\n return int / 100;\n}\nvar colorNames = {\n transparent: 0,\n aliceblue: 4042850303,\n antiquewhite: 4209760255,\n aqua: 16777215,\n aquamarine: 2147472639,\n azure: 4043309055,\n beige: 4126530815,\n bisque: 4293182719,\n black: 255,\n blanchedalmond: 4293643775,\n blue: 65535,\n blueviolet: 2318131967,\n brown: 2771004159,\n burlywood: 3736635391,\n burntsienna: 3934150143,\n cadetblue: 1604231423,\n chartreuse: 2147418367,\n chocolate: 3530104575,\n coral: 4286533887,\n cornflowerblue: 1687547391,\n cornsilk: 4294499583,\n crimson: 3692313855,\n cyan: 16777215,\n darkblue: 35839,\n darkcyan: 9145343,\n darkgoldenrod: 3095792639,\n darkgray: 2846468607,\n darkgreen: 6553855,\n darkgrey: 2846468607,\n darkkhaki: 3182914559,\n darkmagenta: 2332068863,\n darkolivegreen: 1433087999,\n darkorange: 4287365375,\n darkorchid: 2570243327,\n darkred: 2332033279,\n darksalmon: 3918953215,\n darkseagreen: 2411499519,\n darkslateblue: 1211993087,\n darkslategray: 793726975,\n darkslategrey: 793726975,\n darkturquoise: 13554175,\n darkviolet: 2483082239,\n deeppink: 4279538687,\n deepskyblue: 12582911,\n dimgray: 1768516095,\n dimgrey: 1768516095,\n dodgerblue: 512819199,\n firebrick: 2988581631,\n floralwhite: 4294635775,\n forestgreen: 579543807,\n fuchsia: 4278255615,\n gainsboro: 3705462015,\n ghostwhite: 4177068031,\n gold: 4292280575,\n goldenrod: 3668254975,\n gray: 2155905279,\n green: 8388863,\n greenyellow: 2919182335,\n grey: 2155905279,\n honeydew: 4043305215,\n hotpink: 4285117695,\n indianred: 3445382399,\n indigo: 1258324735,\n ivory: 4294963455,\n khaki: 4041641215,\n lavender: 3873897215,\n lavenderblush: 4293981695,\n lawngreen: 2096890111,\n lemonchiffon: 4294626815,\n lightblue: 2916673279,\n lightcoral: 4034953471,\n lightcyan: 3774873599,\n lightgoldenrodyellow: 4210742015,\n lightgray: 3553874943,\n lightgreen: 2431553791,\n lightgrey: 3553874943,\n lightpink: 4290167295,\n lightsalmon: 4288707327,\n lightseagreen: 548580095,\n lightskyblue: 2278488831,\n lightslategray: 2005441023,\n lightslategrey: 2005441023,\n lightsteelblue: 2965692159,\n lightyellow: 4294959359,\n lime: 16711935,\n limegreen: 852308735,\n linen: 4210091775,\n magenta: 4278255615,\n maroon: 2147483903,\n mediumaquamarine: 1724754687,\n mediumblue: 52735,\n mediumorchid: 3126187007,\n mediumpurple: 2473647103,\n mediumseagreen: 1018393087,\n mediumslateblue: 2070474495,\n mediumspringgreen: 16423679,\n mediumturquoise: 1221709055,\n mediumvioletred: 3340076543,\n midnightblue: 421097727,\n mintcream: 4127193855,\n mistyrose: 4293190143,\n moccasin: 4293178879,\n navajowhite: 4292783615,\n navy: 33023,\n oldlace: 4260751103,\n olive: 2155872511,\n olivedrab: 1804477439,\n orange: 4289003775,\n orangered: 4282712319,\n orchid: 3664828159,\n palegoldenrod: 4008225535,\n palegreen: 2566625535,\n paleturquoise: 2951671551,\n palevioletred: 3681588223,\n papayawhip: 4293907967,\n peachpuff: 4292524543,\n peru: 3448061951,\n pink: 4290825215,\n plum: 3718307327,\n powderblue: 2967529215,\n purple: 2147516671,\n rebeccapurple: 1714657791,\n red: 4278190335,\n rosybrown: 3163525119,\n royalblue: 1097458175,\n saddlebrown: 2336560127,\n salmon: 4202722047,\n sandybrown: 4104413439,\n seagreen: 780883967,\n seashell: 4294307583,\n sienna: 2689740287,\n silver: 3233857791,\n skyblue: 2278484991,\n slateblue: 1784335871,\n slategray: 1887473919,\n slategrey: 1887473919,\n snow: 4294638335,\n springgreen: 16744447,\n steelblue: 1182971135,\n tan: 3535047935,\n teal: 8421631,\n thistle: 3636451583,\n tomato: 4284696575,\n turquoise: 1088475391,\n violet: 4001558271,\n wheat: 4125012991,\n white: 4294967295,\n whitesmoke: 4126537215,\n yellow: 4294902015,\n yellowgreen: 2597139199\n};\nfunction normalizeColor(color) {\n const matchers = getMatchers();\n let match;\n if (matchers.hex6) {\n if (match = matchers.hex6.exec(color)) {\n return Number.parseInt(match[1] + \"ff\", 16) >>> 0;\n }\n }\n if (colorNames[color] !== undefined) {\n return colorNames[color];\n }\n if (matchers.rgb) {\n if (match = matchers.rgb.exec(color)) {\n return (parse255(match[1]) << 24 | parse255(match[2]) << 16 | parse255(match[3]) << 8 | 255) >>> 0;\n }\n }\n if (matchers.rgba) {\n if (match = matchers.rgba.exec(color)) {\n return (parse255(match[1]) << 24 | parse255(match[2]) << 16 | parse255(match[3]) << 8 | parse1(match[4])) >>> 0;\n }\n }\n if (matchers.hex3) {\n if (match = matchers.hex3.exec(color)) {\n return Number.parseInt(match[1] + match[1] + match[2] + match[2] + match[3] + match[3] + \"ff\", 16) >>> 0;\n }\n }\n if (matchers.hex8) {\n if (match = matchers.hex8.exec(color)) {\n return Number.parseInt(match[1], 16) >>> 0;\n }\n }\n if (matchers.hex4) {\n if (match = matchers.hex4.exec(color)) {\n return Number.parseInt(match[1] + match[1] + match[2] + match[2] + match[3] + match[3] + match[4] + match[4], 16) >>> 0;\n }\n }\n if (matchers.hsl) {\n if (match = matchers.hsl.exec(color)) {\n return (hslToRgb(parse360(match[1]), parsePercentage(match[2]), parsePercentage(match[3])) | 255) >>> 0;\n }\n }\n if (matchers.hsla) {\n if (match = matchers.hsla.exec(color)) {\n return (hslToRgb(parse360(match[1]), parsePercentage(match[2]), parsePercentage(match[3])) | parse1(match[4])) >>> 0;\n }\n }\n throw new Error(`invalid color string ${color} provided`);\n}\nfunction processColor(color) {\n const normalizedColor = normalizeColor(color);\n return (normalizedColor << 24 | normalizedColor >>> 8) >>> 0;\n}\n\n// src/prores-profile.ts\nvar proResProfileOptions = [\n \"4444-xq\",\n \"4444\",\n \"hq\",\n \"standard\",\n \"light\",\n \"proxy\"\n];\n\n// src/v5-flag.ts\nvar ENABLE_V5_BREAKING_CHANGES = false;\n\n// src/validate-frame.ts\nvar validateFrame = ({\n allowFloats,\n durationInFrames,\n frame\n}) => {\n if (typeof frame === \"undefined\") {\n throw new TypeError(`Argument missing for parameter \"frame\"`);\n }\n if (typeof frame !== \"number\") {\n throw new TypeError(`Argument passed for \"frame\" is not a number: ${frame}`);\n }\n if (!Number.isFinite(frame)) {\n throw new RangeError(`Frame ${frame} is not finite`);\n }\n if (frame % 1 !== 0 && !allowFloats) {\n throw new RangeError(`Argument for frame must be an integer, but got ${frame}`);\n }\n if (frame < 0 && frame < -durationInFrames) {\n throw new RangeError(`Cannot use frame ${frame}: Duration of composition is ${durationInFrames}, therefore the lowest frame that can be rendered is ${-durationInFrames}`);\n }\n if (frame > durationInFrames - 1) {\n throw new RangeError(`Cannot use frame ${frame}: Duration of composition is ${durationInFrames}, therefore the highest frame that can be rendered is ${durationInFrames - 1}`);\n }\n};\n\n// src/codec.ts\nvar validCodecs = [\n \"h264\",\n \"h265\",\n \"vp8\",\n \"vp9\",\n \"mp3\",\n \"aac\",\n \"wav\",\n \"prores\",\n \"h264-mkv\",\n \"h264-ts\",\n \"gif\"\n];\n\n// src/validation/validate-default-codec.ts\nfunction validateCodec(defaultCodec, location, name) {\n if (typeof defaultCodec === \"undefined\") {\n return;\n }\n if (typeof defaultCodec !== \"string\") {\n throw new TypeError(`The \"${name}\" prop ${location} must be a string, but you passed a value of type ${typeof defaultCodec}.`);\n }\n if (!validCodecs.includes(defaultCodec)) {\n throw new Error(`The \"${name}\" prop ${location} must be one of ${validCodecs.join(\", \")}, but you passed ${defaultCodec}.`);\n }\n}\n\n// src/validation/validate-default-props.ts\nvar validateDefaultAndInputProps = (defaultProps, name, compositionId) => {\n if (!defaultProps) {\n return;\n }\n if (typeof defaultProps !== \"object\") {\n throw new Error(`\"${name}\" must be an object, but you passed a value of type ${typeof defaultProps}`);\n }\n if (Array.isArray(defaultProps)) {\n throw new Error(`\"${name}\" must be an object, an array was passed ${compositionId ? `for composition \"${compositionId}\"` : \"\"}`);\n }\n};\n\n// src/validation/validate-dimensions.ts\nfunction validateDimension(amount, nameOfProp, location) {\n if (typeof amount !== \"number\") {\n throw new Error(`The \"${nameOfProp}\" prop ${location} must be a number, but you passed a value of type ${typeof amount}`);\n }\n if (isNaN(amount)) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must not be NaN, but is NaN.`);\n }\n if (!Number.isFinite(amount)) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be finite, but is ${amount}.`);\n }\n if (amount % 1 !== 0) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be an integer, but is ${amount}.`);\n }\n if (amount <= 0) {\n throw new TypeError(`The \"${nameOfProp}\" prop ${location} must be positive, but got ${amount}.`);\n }\n}\n\n// src/validation/validate-duration-in-frames.ts\nfunction validateDurationInFrames(durationInFrames, options) {\n const { allowFloats, component } = options;\n if (typeof durationInFrames === \"undefined\") {\n throw new Error(`The \"durationInFrames\" prop ${component} is missing.`);\n }\n if (typeof durationInFrames !== \"number\") {\n throw new Error(`The \"durationInFrames\" prop ${component} must be a number, but you passed a value of type ${typeof durationInFrames}`);\n }\n if (durationInFrames <= 0) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be positive, but got ${durationInFrames}.`);\n }\n if (!allowFloats && durationInFrames % 1 !== 0) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be an integer, but got ${durationInFrames}.`);\n }\n if (!Number.isFinite(durationInFrames)) {\n throw new TypeError(`The \"durationInFrames\" prop ${component} must be finite, but got ${durationInFrames}.`);\n }\n}\n\n// src/validation/validate-fps.ts\nfunction validateFps(fps, location, isGif) {\n if (typeof fps !== \"number\") {\n throw new Error(`\"fps\" must be a number, but you passed a value of type ${typeof fps} ${location}`);\n }\n if (!Number.isFinite(fps)) {\n throw new Error(`\"fps\" must be a finite, but you passed ${fps} ${location}`);\n }\n if (isNaN(fps)) {\n throw new Error(`\"fps\" must not be NaN, but got ${fps} ${location}`);\n }\n if (fps <= 0) {\n throw new TypeError(`\"fps\" must be positive, but got ${fps} ${location}`);\n }\n if (isGif && fps > 50) {\n throw new TypeError(`The FPS for a GIF cannot be higher than 50. Use the --every-nth-frame option to lower the FPS: https://remotion.dev/docs/render-as-gif`);\n }\n}\n\n// src/video/get-current-time.ts\nvar getExpectedMediaFrameUncorrected = ({\n frame,\n playbackRate,\n startFrom\n}) => {\n return interpolate(frame, [-1, startFrom, startFrom + 1], [-1, startFrom, startFrom + playbackRate]);\n};\n\n// src/absolute-src.ts\nvar getAbsoluteSrc = (relativeSrc) => {\n if (typeof window === \"undefined\") {\n return relativeSrc;\n }\n if (relativeSrc.startsWith(\"http://\") || relativeSrc.startsWith(\"https://\") || relativeSrc.startsWith(\"file://\") || relativeSrc.startsWith(\"blob:\") || relativeSrc.startsWith(\"data:\")) {\n return relativeSrc;\n }\n return new URL(relativeSrc, window.origin).href;\n};\n\n// src/video/offthread-video-source.ts\nvar getOffthreadVideoSource = ({\n src,\n transparent,\n currentTime,\n toneMapped\n}) => {\n return `http://localhost:${window.remotion_proxyPort}/proxy?src=${encodeURIComponent(getAbsoluteSrc(src))}&time=${encodeURIComponent(Math.max(0, currentTime))}&transparent=${String(transparent)}&toneMapped=${String(toneMapped)}`;\n};\n\n// src/no-react.ts\nvar NoReactInternals = {\n processColor,\n truthy,\n validateFps,\n validateDimension,\n validateDurationInFrames,\n validateDefaultAndInputProps,\n validateFrame,\n serializeJSONWithSpecialTypes,\n bundleName: \"bundle.js\",\n bundleMapName: \"bundle.js.map\",\n deserializeJSONWithSpecialTypes,\n DELAY_RENDER_CALLSTACK_TOKEN,\n DELAY_RENDER_RETRY_TOKEN,\n DELAY_RENDER_CLEAR_TOKEN,\n DELAY_RENDER_ATTEMPT_TOKEN: DELAY_RENDER_RETRIES_LEFT,\n getOffthreadVideoSource,\n getExpectedMediaFrameUncorrected,\n ENABLE_V5_BREAKING_CHANGES,\n MIN_NODE_VERSION: ENABLE_V5_BREAKING_CHANGES ? 18 : 16,\n MIN_BUN_VERSION: ENABLE_V5_BREAKING_CHANGES ? \"1.1.3\" : \"1.0.3\",\n colorNames,\n DATE_TOKEN,\n FILE_TOKEN,\n validateCodec,\n proResProfileOptions\n};\nexport {\n random,\n interpolate,\n NoReactInternals\n};\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n// expose the modules object (__webpack_modules__)\n__webpack_require__.m = __webpack_modules__;\n\n","// getDefaultExport function for compatibility with non-harmony modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};","var getProto = Object.getPrototypeOf ? (obj) => (Object.getPrototypeOf(obj)) : (obj) => (obj.__proto__);\nvar leafPrototypes;\n// create a fake namespace object\n// mode & 1: value is a module id, require it\n// mode & 2: merge all properties of value into the ns\n// mode & 4: return value when already ns object\n// mode & 16: return value when it's Promise-like\n// mode & 8|1: behave like require\n__webpack_require__.t = function(value, mode) {\n\tif(mode & 1) value = this(value);\n\tif(mode & 8) return value;\n\tif(typeof value === 'object' && value) {\n\t\tif((mode & 4) && value.__esModule) return value;\n\t\tif((mode & 16) && typeof value.then === 'function') return value;\n\t}\n\tvar ns = Object.create(null);\n\t__webpack_require__.r(ns);\n\tvar def = {};\n\tleafPrototypes = leafPrototypes || [null, getProto({}), getProto([]), getProto(getProto)];\n\tfor(var current = mode & 2 && value; (typeof current == 'object' || typeof current == 'function') && !~leafPrototypes.indexOf(current); current = getProto(current)) {\n\t\tObject.getOwnPropertyNames(current).forEach((key) => (def[key] = () => (value[key])));\n\t}\n\tdef['default'] = () => (value);\n\t__webpack_require__.d(ns, def);\n\treturn ns;\n};","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.f = {};\n// This file contains only the entry chunk.\n// The chunk loading function for additional chunks\n__webpack_require__.e = (chunkId) => {\n\treturn Promise.all(Object.keys(__webpack_require__.f).reduce((promises, key) => {\n\t\t__webpack_require__.f[key](chunkId, promises);\n\t\treturn promises;\n\t}, []));\n};","// This function allow to reference async chunks\n__webpack_require__.u = (chunkId) => {\n\t// return url for filenames based on template\n\treturn \"\" + chunkId + \".bundle.js\";\n};","__webpack_require__.g = (function() {\n\tif (typeof globalThis === 'object') return globalThis;\n\ttry {\n\t\treturn this || new Function('return this')();\n\t} catch (e) {\n\t\tif (typeof window === 'object') return window;\n\t}\n})();","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","var inProgress = {};\nvar dataWebpackPrefix = \"@veolab/discoverylab-templates:\";\n// loadScript function to load a script via script tag\n__webpack_require__.l = (url, done, key, chunkId) => {\n\tif(inProgress[url]) { inProgress[url].push(done); return; }\n\tvar script, needAttach;\n\tif(key !== undefined) {\n\t\tvar scripts = document.getElementsByTagName(\"script\");\n\t\tfor(var i = 0; i < scripts.length; i++) {\n\t\t\tvar s = scripts[i];\n\t\t\tif(s.getAttribute(\"src\") == url || s.getAttribute(\"data-webpack\") == dataWebpackPrefix + key) { script = s; break; }\n\t\t}\n\t}\n\tif(!script) {\n\t\tneedAttach = true;\n\t\tscript = document.createElement('script');\n\n\t\tscript.charset = 'utf-8';\n\t\tif (__webpack_require__.nc) {\n\t\t\tscript.setAttribute(\"nonce\", __webpack_require__.nc);\n\t\t}\n\t\tscript.setAttribute(\"data-webpack\", dataWebpackPrefix + key);\n\n\t\tscript.src = url;\n\t}\n\tinProgress[url] = [done];\n\tvar onScriptComplete = (prev, event) => {\n\t\t// avoid mem leaks in IE.\n\t\tscript.onerror = script.onload = null;\n\t\tclearTimeout(timeout);\n\t\tvar doneFns = inProgress[url];\n\t\tdelete inProgress[url];\n\t\tscript.parentNode && script.parentNode.removeChild(script);\n\t\tdoneFns && doneFns.forEach((fn) => (fn(event)));\n\t\tif(prev) return prev(event);\n\t}\n\tvar timeout = setTimeout(onScriptComplete.bind(null, undefined, { type: 'timeout', target: script }), 120000);\n\tscript.onerror = onScriptComplete.bind(null, script.onerror);\n\tscript.onload = onScriptComplete.bind(null, script.onload);\n\tneedAttach && document.head.appendChild(script);\n};","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","var scriptUrl;\nif (__webpack_require__.g.importScripts) scriptUrl = __webpack_require__.g.location + \"\";\nvar document = __webpack_require__.g.document;\nif (!scriptUrl && document) {\n\tif (document.currentScript && document.currentScript.tagName.toUpperCase() === 'SCRIPT')\n\t\tscriptUrl = document.currentScript.src;\n\tif (!scriptUrl) {\n\t\tvar scripts = document.getElementsByTagName(\"script\");\n\t\tif(scripts.length) {\n\t\t\tvar i = scripts.length - 1;\n\t\t\twhile (i > -1 && (!scriptUrl || !/^http(s?):/.test(scriptUrl))) scriptUrl = scripts[i--].src;\n\t\t}\n\t}\n}\n// When supporting browsers where an automatic publicPath is not supported you must specify an output.publicPath manually via configuration\n// or pass an empty string (\"\") and set the __webpack_public_path__ variable from your code to use your own logic.\nif (!scriptUrl) throw new Error(\"Automatic publicPath is not supported in this browser\");\nscriptUrl = scriptUrl.replace(/^blob:/, \"\").replace(/#.*$/, \"\").replace(/\\?.*$/, \"\").replace(/\\/[^\\/]+$/, \"/\");\n__webpack_require__.p = scriptUrl;","// no baseURI\n\n// object to store loaded and loading chunks\n// undefined = chunk not loaded, null = chunk preloaded/prefetched\n// [resolve, reject, Promise] = chunk loading, 0 = chunk loaded\nvar installedChunks = {\n\t792: 0\n};\n\n__webpack_require__.f.j = (chunkId, promises) => {\n\t\t// JSONP chunk loading for javascript\n\t\tvar installedChunkData = __webpack_require__.o(installedChunks, chunkId) ? installedChunks[chunkId] : undefined;\n\t\tif(installedChunkData !== 0) { // 0 means \"already installed\".\n\n\t\t\t// a Promise means \"currently loading\".\n\t\t\tif(installedChunkData) {\n\t\t\t\tpromises.push(installedChunkData[2]);\n\t\t\t} else {\n\t\t\t\tif(true) { // all chunks have JS\n\t\t\t\t\t// setup Promise in chunk cache\n\t\t\t\t\tvar promise = new Promise((resolve, reject) => (installedChunkData = installedChunks[chunkId] = [resolve, reject]));\n\t\t\t\t\tpromises.push(installedChunkData[2] = promise);\n\n\t\t\t\t\t// start chunk loading\n\t\t\t\t\tvar url = __webpack_require__.p + __webpack_require__.u(chunkId);\n\t\t\t\t\t// create error before stack unwound to get useful stacktrace later\n\t\t\t\t\tvar error = new Error();\n\t\t\t\t\tvar loadingEnded = (event) => {\n\t\t\t\t\t\tif(__webpack_require__.o(installedChunks, chunkId)) {\n\t\t\t\t\t\t\tinstalledChunkData = installedChunks[chunkId];\n\t\t\t\t\t\t\tif(installedChunkData !== 0) installedChunks[chunkId] = undefined;\n\t\t\t\t\t\t\tif(installedChunkData) {\n\t\t\t\t\t\t\t\tvar errorType = event && (event.type === 'load' ? 'missing' : event.type);\n\t\t\t\t\t\t\t\tvar realSrc = event && event.target && event.target.src;\n\t\t\t\t\t\t\t\terror.message = 'Loading chunk ' + chunkId + ' failed.\\n(' + errorType + ': ' + realSrc + ')';\n\t\t\t\t\t\t\t\terror.name = 'ChunkLoadError';\n\t\t\t\t\t\t\t\terror.type = errorType;\n\t\t\t\t\t\t\t\terror.request = realSrc;\n\t\t\t\t\t\t\t\tinstalledChunkData[1](error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\t\t\t\t\t__webpack_require__.l(url, loadingEnded, \"chunk-\" + chunkId, chunkId);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n};\n\n// no prefetching\n\n// no preloaded\n\n// no HMR\n\n// no HMR manifest\n\n// no on chunks loaded\n\n// install a JSONP callback for chunk loading\nvar webpackJsonpCallback = (parentChunkLoadingFunction, data) => {\n\tvar [chunkIds, moreModules, runtime] = data;\n\t// add \"moreModules\" to the modules object,\n\t// then flag all \"chunkIds\" as loaded and fire callback\n\tvar moduleId, chunkId, i = 0;\n\tif(chunkIds.some((id) => (installedChunks[id] !== 0))) {\n\t\tfor(moduleId in moreModules) {\n\t\t\tif(__webpack_require__.o(moreModules, moduleId)) {\n\t\t\t\t__webpack_require__.m[moduleId] = moreModules[moduleId];\n\t\t\t}\n\t\t}\n\t\tif(runtime) var result = runtime(__webpack_require__);\n\t}\n\tif(parentChunkLoadingFunction) parentChunkLoadingFunction(data);\n\tfor(;i < chunkIds.length; i++) {\n\t\tchunkId = chunkIds[i];\n\t\tif(__webpack_require__.o(installedChunks, chunkId) && installedChunks[chunkId]) {\n\t\t\tinstalledChunks[chunkId][0]();\n\t\t}\n\t\tinstalledChunks[chunkId] = 0;\n\t}\n\n}\n\nvar chunkLoadingGlobal = self[\"webpackChunk_veolab_discoverylab_templates\"] = self[\"webpackChunk_veolab_discoverylab_templates\"] || [];\nchunkLoadingGlobal.forEach(webpackJsonpCallback.bind(null, 0));\nchunkLoadingGlobal.push = webpackJsonpCallback.bind(null, chunkLoadingGlobal.push.bind(chunkLoadingGlobal));","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\n__webpack_require__(6507);\n__webpack_require__(4917);\n__webpack_require__(3610);\nvar __webpack_exports__ = __webpack_require__(3482);\n",""],"names":[],"sourceRoot":""}
|